diff --git a/.coveragerc b/.coveragerc
index dd39c85..4646abb 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -21,15 +21,14 @@ branch = True
[report]
fail_under = 100
show_missing = True
+omit = google/cloud/redis/__init__.py
exclude_lines =
# Re-enable the standard pragma
pragma: NO COVER
# Ignore debug-only repr
def __repr__
- # Ignore abstract methods
- raise NotImplementedError
-omit =
- */gapic/*.py
- */proto/*.py
- */core/*.py
- */site-packages/*.py
\ No newline at end of file
+ # Ignore pkg_resources exceptions.
+ # This is added at the module level as a safeguard for if someone
+ # generates the code and tries to run it without pip installing. This
+ # makes it virtually impossible to test properly.
+ except pkg_resources.DistributionNotFound
\ No newline at end of file
diff --git a/.flake8 b/.flake8
index 20fe9bd..ed93163 100644
--- a/.flake8
+++ b/.flake8
@@ -21,6 +21,8 @@ exclude =
# Exclude generated code.
**/proto/**
**/gapic/**
+ **/services/**
+ **/types/**
*_pb2.py
# Standard linting exemptions.
diff --git a/google/cloud/redis_v1/gapic/__init__.py b/.github/snippet-bot.yml
similarity index 100%
rename from google/cloud/redis_v1/gapic/__init__.py
rename to .github/snippet-bot.yml
diff --git a/.gitignore b/.gitignore
index 3fb06e0..b9daa52 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,6 +10,7 @@
dist
build
eggs
+.eggs
parts
bin
var
@@ -45,14 +46,16 @@ pip-log.txt
# Built documentation
docs/_build
bigquery/docs/generated
+docs.metadata
# Virtual environment
env/
coverage.xml
+sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
# Make sure a generated file isn't accidentally committed.
pylintrc
-pylintrc.test
\ No newline at end of file
+pylintrc.test
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index cb895b3..1c66306 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation
python3.6 -m pip install --upgrade --quiet nox
python3.6 -m nox --version
-python3.6 -m nox
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+ python3.6 -m nox -s "${NOX_SESSION:-}"
+else
+ python3.6 -m nox
+fi
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 0000000..412b0b5
--- /dev/null
+++ b/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,98 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:20.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ build-essential \
+ ca-certificates \
+ curl \
+ dirmngr \
+ git \
+ gpg-agent \
+ graphviz \
+ libbz2-dev \
+ libdb5.3-dev \
+ libexpat1-dev \
+ libffi-dev \
+ liblzma-dev \
+ libreadline-dev \
+ libsnappy-dev \
+ libssl-dev \
+ libsqlite3-dev \
+ portaudio19-dev \
+ redis-server \
+ software-properties-common \
+ ssh \
+ sudo \
+ tcl \
+ tcl-dev \
+ tk \
+ tk-dev \
+ uuid-dev \
+ wget \
+ zlib1g-dev \
+ && add-apt-repository universe \
+ && apt-get update \
+ && apt-get -y install jq \
+ && apt-get clean autoclean \
+ && apt-get autoremove -y \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /var/cache/apt/archives/*.deb
+
+
+COPY fetch_gpg_keys.sh /tmp
+# Install the desired versions of Python.
+RUN set -ex \
+ && export GNUPGHOME="$(mktemp -d)" \
+ && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \
+ && /tmp/fetch_gpg_keys.sh \
+ && for PYTHON_VERSION in 3.7.8 3.8.5; do \
+ wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
+ && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
+ && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \
+ && rm -r python-${PYTHON_VERSION}.tar.xz.asc \
+ && mkdir -p /usr/src/python-${PYTHON_VERSION} \
+ && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \
+ && rm python-${PYTHON_VERSION}.tar.xz \
+ && cd /usr/src/python-${PYTHON_VERSION} \
+ && ./configure \
+ --enable-shared \
+ # This works only on Python 2.7 and throws a warning on every other
+ # version, but seems otherwise harmless.
+ --enable-unicode=ucs4 \
+ --with-system-ffi \
+ --without-ensurepip \
+ && make -j$(nproc) \
+ && make install \
+ && ldconfig \
+ ; done \
+ && rm -rf "${GNUPGHOME}" \
+ && rm -rf /usr/src/python* \
+ && rm -rf ~/.cache/
+
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+ && python3.7 /tmp/get-pip.py \
+ && python3.8 /tmp/get-pip.py \
+ && rm /tmp/get-pip.py
+
+CMD ["python3.7"]
diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh
new file mode 100755
index 0000000..d653dd8
--- /dev/null
+++ b/.kokoro/docker/docs/fetch_gpg_keys.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A script to fetch gpg keys with retry.
+# Avoid jinja parsing the file.
+#
+
+function retry {
+ if [[ "${#}" -le 1 ]]; then
+ echo "Usage: ${0} retry_count commands.."
+ exit 1
+ fi
+ local retries=${1}
+ local command="${@:2}"
+ until [[ "${retries}" -le 0 ]]; do
+ $command && return 0
+ if [[ $? -ne 0 ]]; then
+ echo "command failed, retrying"
+ ((retries--))
+ fi
+ done
+ return 1
+}
+
+# 3.6.9, 3.7.5 (Ned Deily)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D
+
+# 3.8.0 (Łukasz Langa)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ E3FF2839C048B25C084DEBE9B26995E310250568
+
+#
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 907b4f4..513dec2 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -11,12 +11,12 @@ action {
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-redis/.kokoro/trampoline.sh"
+build_file: "python-redis/.kokoro/trampoline_v2.sh"
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
}
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
@@ -28,6 +28,23 @@ env_vars: {
value: "docs-staging"
}
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "docs-staging-v2-staging"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+ key: "TRAMPOLINE_DOCKERFILE"
+ value: ".kokoro/docker/docs/Dockerfile"
+}
+
# Fetch the token needed for reporting release status to GitHub
before_action {
fetch_keystore {
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 0000000..1118107
--- /dev/null
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,17 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "false"
+}
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index 84a3747..8acb14e 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -13,33 +13,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
-cd github/python-redis
-
-# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+export PATH="${HOME}/.local/bin:${PATH}"
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --user --upgrade --quiet nox
+python3 -m nox --version
# build docs
nox -s docs
-python3 -m pip install gcp-docuploader
-
-# install a json parser
-sudo apt-get update
-sudo apt-get -y install software-properties-common
-sudo add-apt-repository universe
-sudo apt-get update
-sudo apt-get -y install jq
+python3 -m pip install --user gcp-docuploader
# create metadata
python3 -m docuploader create-metadata \
@@ -54,4 +42,23 @@ python3 -m docuploader create-metadata \
cat docs.metadata
# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index 4a71282..30ce074 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Start the releasetool reporter
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
new file mode 100644
index 0000000..def55e0
--- /dev/null
+++ b/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "lint"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-redis/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-redis/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 0000000..50fec96
--- /dev/null
+++ b/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
new file mode 100644
index 0000000..5f7862a
--- /dev/null
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.6"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-redis/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-redis/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg
new file mode 100644
index 0000000..7218af1
--- /dev/null
+++ b/.kokoro/samples/python3.6/continuous.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
new file mode 100644
index 0000000..50fec96
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.6/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 0000000..c87d3ce
--- /dev/null
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.7"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-redis/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-redis/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 0000000..50fec96
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 0000000..c5eb911
--- /dev/null
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.8"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-redis/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-redis/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 0000000..50fec96
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
new file mode 100755
index 0000000..d2fa4a4
--- /dev/null
+++ b/.kokoro/test-samples.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-redis
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+ git checkout $LATEST_RELEASE
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the Build Cop Bot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
\ No newline at end of file
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
new file mode 100755
index 0000000..719bcd5
--- /dev/null
+++ b/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For Build Cop Bot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/.trampolinerc b/.trampolinerc
new file mode 100644
index 0000000..995ee29
--- /dev/null
+++ b/.trampolinerc
@@ -0,0 +1,51 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Template for .trampolinerc
+
+# Add required env vars here.
+required_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f55540a..0c0230c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,27 @@
[1]: https://pypi.org/project/google-cloud-redis/#history
+## [2.0.0](https://www.github.com/googleapis/python-redis/compare/v1.0.0...v2.0.0) (2020-09-14)
+
+
+### ⚠ BREAKING CHANGES
+
+* migrate to microgen (#30)
+
+### Features
+
+* migrate to microgen ([#30](https://www.github.com/googleapis/python-redis/issues/30)) ([a17c1a8](https://www.github.com/googleapis/python-redis/commit/a17c1a840e10ccde25df8d4305b48997e37acd51))
+
+
+### Bug Fixes
+
+* update retry config ([#24](https://www.github.com/googleapis/python-redis/issues/24)) ([0b3f2c0](https://www.github.com/googleapis/python-redis/commit/0b3f2c075728a6ec4d5d503d010de229ed1ef725))
+
+
+### Documentation
+
+* add multiprocessing note (via synth) ([#17](https://www.github.com/googleapis/python-redis/issues/17)) ([fb04673](https://www.github.com/googleapis/python-redis/commit/fb046731d325132654ce91cb5513870befd7eec4))
+
## [1.0.0](https://www.github.com/googleapis/python-redis/compare/v0.4.0...v1.0.0) (2020-05-12)
diff --git a/MANIFEST.in b/MANIFEST.in
index 68855ab..e9e29d1 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -20,3 +20,6 @@ recursive-include google *.json *.proto
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
+
+# Exclude scripts for samples readmegen
+prune scripts/readme-gen
\ No newline at end of file
diff --git a/README.rst b/README.rst
index c8f29b7..cb01453 100644
--- a/README.rst
+++ b/README.rst
@@ -51,11 +51,13 @@ dependencies.
Supported Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^
-Python >= 3.5
+Python >= 3.6
Deprecated Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^^
-Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+Python == 2.7.
+
+The last version of this library compatible with Python 2.7 is google-cloud-redis==1.0.0.
Mac/Linux
diff --git a/UPGRADING.md b/UPGRADING.md
new file mode 100644
index 0000000..d692c47
--- /dev/null
+++ b/UPGRADING.md
@@ -0,0 +1,162 @@
+# 2.0.0 Migration Guide
+
+The 2.0 release of the `google-cloud-redis` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage.
+
+If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-redis/issues).
+
+## Supported Python Versions
+
+> **WARNING**: Breaking change
+
+The 2.0.0 release requires Python 3.6+.
+
+
+## Method Calls
+
+> **WARNING**: Breaking change
+
+Methods expect request objects. We provide a script that will convert most common use cases.
+
+* Install the library
+
+```py
+python3 -m pip install google-cloud-redis
+```
+
+* The script `fixup_redis_v1_keywords.py` is shipped with the library. It expects
+an input directory (with the code to convert) and an empty destination directory.
+
+```sh
+$ fixup_redis_v1_keywords.py --input-directory .samples/ --output-directory samples/
+```
+
+**Before:**
+```py
+from google.cloud import redis_v1
+
+client = redis_v1.CloudRedisClient()
+
+instance = client.get_instance("instance_name")
+```
+
+
+**After:**
+```py
+from google.cloud import redis_v1
+
+client = redis_v1.CloudRedisClient()
+
+instance = client.get_instance(request={'name': "instance_name"})
+```
+
+### More Details
+
+In `google-cloud-redis<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters.
+
+**Before:**
+```py
+ def create_instance(
+ self,
+ parent,
+ instance_id,
+ instance,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+```
+
+In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional.
+
+Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer.
+
+
+**After:**
+```py
+ def create_instance(
+ self,
+ request: cloud_redis.CreateInstanceRequest = None,
+ *,
+ parent: str = None,
+ instance_id: str = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+```
+
+> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive.
+> Passing both will result in an error.
+
+
+Both of these calls are valid:
+
+```py
+response = client.create_instance(
+ request={
+ "parent": parent,
+ "instance_id": instance_id,
+ "instance": instance,
+ }
+)
+```
+
+```py
+response = client.create_instance(
+ parent=parent,
+ instance_id=instance_id,
+ instance=instance,
+)
+```
+
+This call is invalid because it mixes `request` with a keyword argument `instance`. Executing this code
+will result in an error.
+
+```py
+response = client.create_instance(
+ request={
+ "parent": parent,
+ },
+ instance_id=instance_id,
+ instance=instance,
+)
+```
+
+
+
+## Enums and Types
+
+
+> **WARNING**: Breaking change
+
+The submodules `enums` and `types` have been removed.
+
+**Before:**
+```py
+from google.cloud import redis_v1
+
+state = redis_v1.enums.Instance.State.STATE_UNSPECIFIED
+instance = redis_v1.types.Instance(name="name")
+```
+
+
+**After:**
+```py
+from google.cloud import redis_v1
+
+state = redis_v1.Instance.State.STATE_UNSPECIFIED
+instance = redis_v1.Instance(name="name")
+```
+
+## Location Path Helper Method
+
+Location path helper method has been removed. Please construct
+the path manually.
+
+```py
+project = 'my-project'
+location = 'location'
+
+location_path = f'projects/{project}/locations/{location}'
+```
\ No newline at end of file
diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md
new file mode 120000
index 0000000..01097c8
--- /dev/null
+++ b/docs/UPGRADING.md
@@ -0,0 +1 @@
+../UPGRADING.md
\ No newline at end of file
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index 228529e..6316a53 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/docs/conf.py b/docs/conf.py
index 59d3c36..458660c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -20,12 +20,16 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -38,21 +42,18 @@
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
+ "recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
-autodoc_default_flags = ["members"]
+autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
-# Allow markdown includes (so releases.md can include CHANGLEOG.md)
-# http://www.sphinx-doc.org/en/master/markdown.html
-source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
-
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
@@ -93,7 +94,12 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ["_build"]
+exclude_patterns = [
+ "_build",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -293,7 +299,13 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- (master_doc, "google-cloud-redis", u"google-cloud-redis Documentation", [author], 1)
+ (
+ master_doc,
+ "google-cloud-redis",
+ u"google-cloud-redis Documentation",
+ [author],
+ 1,
+ )
]
# If true, show URL addresses after external links.
@@ -334,7 +346,7 @@
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
}
diff --git a/docs/gapic/v1/api.rst b/docs/gapic/v1/api.rst
deleted file mode 100644
index 50ad1f8..0000000
--- a/docs/gapic/v1/api.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Client for Google Cloud Memorystore for Redis API
-=================================================
-
-.. automodule:: google.cloud.redis_v1
- :members:
- :inherited-members:
\ No newline at end of file
diff --git a/docs/gapic/v1/types.rst b/docs/gapic/v1/types.rst
deleted file mode 100644
index ba92173..0000000
--- a/docs/gapic/v1/types.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-Types for Google Cloud Memorystore for Redis API Client
-=======================================================
-
-.. automodule:: google.cloud.redis_v1.types
- :members:
\ No newline at end of file
diff --git a/docs/gapic/v1beta1/api.rst b/docs/gapic/v1beta1/api.rst
deleted file mode 100644
index b150564..0000000
--- a/docs/gapic/v1beta1/api.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Client for Google Cloud Memorystore for Redis API
-=================================================
-
-.. automodule:: google.cloud.redis_v1beta1
- :members:
- :inherited-members:
\ No newline at end of file
diff --git a/docs/gapic/v1beta1/types.rst b/docs/gapic/v1beta1/types.rst
deleted file mode 100644
index 579707e..0000000
--- a/docs/gapic/v1beta1/types.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-Types for Google Cloud Memorystore for Redis API Client
-=======================================================
-
-.. automodule:: google.cloud.redis_v1beta1.types
- :members:
\ No newline at end of file
diff --git a/docs/index.rst b/docs/index.rst
index 5338f59..c3208e9 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,11 +1,24 @@
.. include:: README.rst
+.. include:: multiprocessing.rst
+
Api Reference
-------------
.. toctree::
:maxdepth: 2
- gapic/v1/api
- gapic/v1/types
- gapic/v1beta1/api
- gapic/v1beta1/types
+ redis_v1/services
+ redis_v1/types
+ redis_v1beta1/services
+ redis_v1beta1/types
+
+Migration Guide
+---------------
+
+See the guide below for instructions on migrating to the 2.x release of this library.
+
+.. toctree::
+ :maxdepth: 2
+
+ UPGRADING
+
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
new file mode 100644
index 0000000..1cb29d4
--- /dev/null
+++ b/docs/multiprocessing.rst
@@ -0,0 +1,7 @@
+.. note::
+
+ Because this client uses :mod:`grpcio` library, it is safe to
+ share instances across threads. In multiprocessing scenarios, the best
+ practice is to create client instances *after* the invocation of
+ :func:`os.fork` by :class:`multiprocessing.Pool` or
+ :class:`multiprocessing.Process`.
diff --git a/docs/redis_v1/services.rst b/docs/redis_v1/services.rst
new file mode 100644
index 0000000..5b780d7
--- /dev/null
+++ b/docs/redis_v1/services.rst
@@ -0,0 +1,6 @@
+Services for Google Cloud Redis v1 API
+======================================
+
+.. automodule:: google.cloud.redis_v1.services.cloud_redis
+ :members:
+ :inherited-members:
diff --git a/docs/redis_v1/types.rst b/docs/redis_v1/types.rst
new file mode 100644
index 0000000..babee02
--- /dev/null
+++ b/docs/redis_v1/types.rst
@@ -0,0 +1,5 @@
+Types for Google Cloud Redis v1 API
+===================================
+
+.. automodule:: google.cloud.redis_v1.types
+ :members:
diff --git a/docs/redis_v1beta1/services.rst b/docs/redis_v1beta1/services.rst
new file mode 100644
index 0000000..f1a713c
--- /dev/null
+++ b/docs/redis_v1beta1/services.rst
@@ -0,0 +1,6 @@
+Services for Google Cloud Redis v1beta1 API
+===========================================
+
+.. automodule:: google.cloud.redis_v1beta1.services.cloud_redis
+ :members:
+ :inherited-members:
diff --git a/docs/redis_v1beta1/types.rst b/docs/redis_v1beta1/types.rst
new file mode 100644
index 0000000..e552277
--- /dev/null
+++ b/docs/redis_v1beta1/types.rst
@@ -0,0 +1,5 @@
+Types for Google Cloud Redis v1beta1 API
+========================================
+
+.. automodule:: google.cloud.redis_v1beta1.types
+ :members:
diff --git a/google/cloud/redis/__init__.py b/google/cloud/redis/__init__.py
new file mode 100644
index 0000000..3eeb22d
--- /dev/null
+++ b/google/cloud/redis/__init__.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.cloud.redis_v1.services.cloud_redis.async_client import (
+ CloudRedisAsyncClient,
+)
+from google.cloud.redis_v1.services.cloud_redis.client import CloudRedisClient
+from google.cloud.redis_v1.types.cloud_redis import CreateInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import DeleteInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import ExportInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import FailoverInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import GcsDestination
+from google.cloud.redis_v1.types.cloud_redis import GcsSource
+from google.cloud.redis_v1.types.cloud_redis import GetInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import ImportInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import InputConfig
+from google.cloud.redis_v1.types.cloud_redis import Instance
+from google.cloud.redis_v1.types.cloud_redis import ListInstancesRequest
+from google.cloud.redis_v1.types.cloud_redis import ListInstancesResponse
+from google.cloud.redis_v1.types.cloud_redis import LocationMetadata
+from google.cloud.redis_v1.types.cloud_redis import OperationMetadata
+from google.cloud.redis_v1.types.cloud_redis import OutputConfig
+from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import UpgradeInstanceRequest
+from google.cloud.redis_v1.types.cloud_redis import ZoneMetadata
+
+__all__ = (
+ "CloudRedisAsyncClient",
+ "CloudRedisClient",
+ "CreateInstanceRequest",
+ "DeleteInstanceRequest",
+ "ExportInstanceRequest",
+ "FailoverInstanceRequest",
+ "GcsDestination",
+ "GcsSource",
+ "GetInstanceRequest",
+ "ImportInstanceRequest",
+ "InputConfig",
+ "Instance",
+ "ListInstancesRequest",
+ "ListInstancesResponse",
+ "LocationMetadata",
+ "OperationMetadata",
+ "OutputConfig",
+ "UpdateInstanceRequest",
+ "UpgradeInstanceRequest",
+ "ZoneMetadata",
+)
diff --git a/google/cloud/redis/py.typed b/google/cloud/redis/py.typed
new file mode 100644
index 0000000..960151e
--- /dev/null
+++ b/google/cloud/redis/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-redis package uses inline types.
diff --git a/google/cloud/redis_v1/__init__.py b/google/cloud/redis_v1/__init__.py
index 10c3f50..aeda34f 100644
--- a/google/cloud/redis_v1/__init__.py
+++ b/google/cloud/redis_v1/__init__.py
@@ -1,41 +1,59 @@
# -*- coding: utf-8 -*-
-#
+
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
-
-from __future__ import absolute_import
-import sys
-import warnings
-
-from google.cloud.redis_v1 import types
-from google.cloud.redis_v1.gapic import cloud_redis_client
-from google.cloud.redis_v1.gapic import enums
-
-
-if sys.version_info[:2] == (2, 7):
- message = (
- "A future version of this library will drop support for Python 2.7."
- "More details about Python 2 support for Google Cloud Client Libraries"
- "can be found at https://cloud.google.com/python/docs/python2-sunset/"
- )
- warnings.warn(message, DeprecationWarning)
-
-
-class CloudRedisClient(cloud_redis_client.CloudRedisClient):
- __doc__ = cloud_redis_client.CloudRedisClient.__doc__
- enums = enums
-
-
-__all__ = ("enums", "types", "CloudRedisClient")
+from .services.cloud_redis import CloudRedisClient
+from .types.cloud_redis import CreateInstanceRequest
+from .types.cloud_redis import DeleteInstanceRequest
+from .types.cloud_redis import ExportInstanceRequest
+from .types.cloud_redis import FailoverInstanceRequest
+from .types.cloud_redis import GcsDestination
+from .types.cloud_redis import GcsSource
+from .types.cloud_redis import GetInstanceRequest
+from .types.cloud_redis import ImportInstanceRequest
+from .types.cloud_redis import InputConfig
+from .types.cloud_redis import Instance
+from .types.cloud_redis import ListInstancesRequest
+from .types.cloud_redis import ListInstancesResponse
+from .types.cloud_redis import LocationMetadata
+from .types.cloud_redis import OperationMetadata
+from .types.cloud_redis import OutputConfig
+from .types.cloud_redis import UpdateInstanceRequest
+from .types.cloud_redis import UpgradeInstanceRequest
+from .types.cloud_redis import ZoneMetadata
+
+
+__all__ = (
+ "CreateInstanceRequest",
+ "DeleteInstanceRequest",
+ "ExportInstanceRequest",
+ "FailoverInstanceRequest",
+ "GcsDestination",
+ "GcsSource",
+ "GetInstanceRequest",
+ "ImportInstanceRequest",
+ "InputConfig",
+ "Instance",
+ "ListInstancesRequest",
+ "ListInstancesResponse",
+ "LocationMetadata",
+ "OperationMetadata",
+ "OutputConfig",
+ "UpdateInstanceRequest",
+ "UpgradeInstanceRequest",
+ "ZoneMetadata",
+ "CloudRedisClient",
+)
diff --git a/google/cloud/redis_v1/gapic/cloud_redis_client.py b/google/cloud/redis_v1/gapic/cloud_redis_client.py
deleted file mode 100644
index 52dfd83..0000000
--- a/google/cloud/redis_v1/gapic/cloud_redis_client.py
+++ /dev/null
@@ -1,1033 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Accesses the google.cloud.redis.v1 CloudRedis API."""
-
-import functools
-import pkg_resources
-import warnings
-
-from google.oauth2 import service_account
-import google.api_core.client_options
-import google.api_core.gapic_v1.client_info
-import google.api_core.gapic_v1.config
-import google.api_core.gapic_v1.method
-import google.api_core.gapic_v1.routing_header
-import google.api_core.grpc_helpers
-import google.api_core.operation
-import google.api_core.operations_v1
-import google.api_core.page_iterator
-import google.api_core.path_template
-import grpc
-
-from google.cloud.redis_v1.gapic import cloud_redis_client_config
-from google.cloud.redis_v1.gapic import enums
-from google.cloud.redis_v1.gapic.transports import cloud_redis_grpc_transport
-from google.cloud.redis_v1.proto import cloud_redis_pb2
-from google.cloud.redis_v1.proto import cloud_redis_pb2_grpc
-from google.longrunning import operations_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import field_mask_pb2
-
-
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-redis").version
-
-
-class CloudRedisClient(object):
- """
- Configures and manages Cloud Memorystore for Redis instances
-
- Google Cloud Memorystore for Redis v1
-
- The ``redis.googleapis.com`` service implements the Google Cloud
- Memorystore for Redis API and defines the following resource model for
- managing Redis instances:
-
- - The service works with a collection of cloud projects, named:
- ``/projects/*``
- - Each project has a collection of available locations, named:
- ``/locations/*``
- - Each location has a collection of Redis instances, named:
- ``/instances/*``
- - As such, Redis instances are resources of the form:
- ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
-
- Note that location\_id must be referring to a GCP ``region``; for
- example:
-
- - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
- """
-
- SERVICE_ADDRESS = "redis.googleapis.com:443"
- """The default address of the service."""
-
- # The name of the interface for this client. This is the key used to
- # find the method configuration in the client_config dictionary.
- _INTERFACE_NAME = "google.cloud.redis.v1.CloudRedis"
-
- @classmethod
- def from_service_account_file(cls, filename, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- CloudRedisClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- @classmethod
- def instance_path(cls, project, location, instance):
- """Return a fully-qualified instance string."""
- return google.api_core.path_template.expand(
- "projects/{project}/locations/{location}/instances/{instance}",
- project=project,
- location=location,
- instance=instance,
- )
-
- @classmethod
- def location_path(cls, project, location):
- """Return a fully-qualified location string."""
- return google.api_core.path_template.expand(
- "projects/{project}/locations/{location}",
- project=project,
- location=location,
- )
-
- def __init__(
- self,
- transport=None,
- channel=None,
- credentials=None,
- client_config=None,
- client_info=None,
- client_options=None,
- ):
- """Constructor.
-
- Args:
- transport (Union[~.CloudRedisGrpcTransport,
- Callable[[~.Credentials, type], ~.CloudRedisGrpcTransport]): A transport
- instance, responsible for actually making the API calls.
- The default transport uses the gRPC protocol.
- This argument may also be a callable which returns a
- transport instance. Callables will be sent the credentials
- as the first argument and the default transport class as
- the second argument.
- channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
- through which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- This argument is mutually exclusive with providing a
- transport instance to ``transport``; doing so will raise
- an exception.
- client_config (dict): DEPRECATED. A dictionary of call options for
- each method. If not specified, the default configuration is used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- client_options (Union[dict, google.api_core.client_options.ClientOptions]):
- Client options used to set user options on the client. API Endpoint
- should be set through client_options.
- """
- # Raise deprecation warnings for things we want to go away.
- if client_config is not None:
- warnings.warn(
- "The `client_config` argument is deprecated.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
- else:
- client_config = cloud_redis_client_config.config
-
- if channel:
- warnings.warn(
- "The `channel` argument is deprecated; use " "`transport` instead.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
-
- api_endpoint = self.SERVICE_ADDRESS
- if client_options:
- if type(client_options) == dict:
- client_options = google.api_core.client_options.from_dict(
- client_options
- )
- if client_options.api_endpoint:
- api_endpoint = client_options.api_endpoint
-
- # Instantiate the transport.
- # The transport is responsible for handling serialization and
- # deserialization and actually sending data to the service.
- if transport:
- if callable(transport):
- self.transport = transport(
- credentials=credentials,
- default_class=cloud_redis_grpc_transport.CloudRedisGrpcTransport,
- address=api_endpoint,
- )
- else:
- if credentials:
- raise ValueError(
- "Received both a transport instance and "
- "credentials; these are mutually exclusive."
- )
- self.transport = transport
- else:
- self.transport = cloud_redis_grpc_transport.CloudRedisGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
- )
-
- if client_info is None:
- client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
- )
- else:
- client_info.gapic_version = _GAPIC_LIBRARY_VERSION
- self._client_info = client_info
-
- # Parse out the default settings for retry and timeout for each RPC
- # from the client configuration.
- # (Ordinarily, these are the defaults specified in the `*_config.py`
- # file next to this one.)
- self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
- )
-
- # Save a dictionary of cached API call functions.
- # These are the actual callables which invoke the proper
- # transport methods, wrapped with `wrap_method` to add retry,
- # timeout, and the like.
- self._inner_api_calls = {}
-
- # Service calls
- def list_instances(
- self,
- parent,
- page_size=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists all Redis instances owned by a project in either the specified
- location (region) or all locations.
-
- The location should have the following format:
-
- - ``projects/{project_id}/locations/{location_id}``
-
- If ``location_id`` is specified as ``-`` (wildcard), then all regions
- available to the project are queried, and the results are aggregated.
-
- Example:
- >>> from google.cloud import redis_v1
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> parent = client.location_path('[PROJECT]', '[LOCATION]')
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_instances(parent):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_instances(parent).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The resource name of the instance location using the form:
- ``projects/{project_id}/locations/{location_id}`` where ``location_id``
- refers to a GCP region.
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`~google.cloud.redis_v1.types.Instance` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_instances" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_instances"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_instances,
- default_retry=self._method_configs["ListInstances"].retry,
- default_timeout=self._method_configs["ListInstances"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.ListInstancesRequest(
- parent=parent, page_size=page_size
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_instances"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="instances",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
-
- def get_instance(
- self,
- name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets the details of a specific Redis instance.
-
- Example:
- >>> from google.cloud import redis_v1
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
- >>>
- >>> response = client.get_instance(name)
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1.types.Instance` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_instance,
- default_retry=self._method_configs["GetInstance"].retry,
- default_timeout=self._method_configs["GetInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.GetInstanceRequest(name=name)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def create_instance(
- self,
- parent,
- instance_id,
- instance,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Creates a Redis instance based on the specified tier and memory size.
-
- By default, the instance is accessible from the project's `default
- network
`__.
-
- The creation is executed asynchronously and callers may check the
- returned operation to track its progress. Once the operation is
- completed the Redis instance will be fully functional. Completed
- longrunning.Operation will contain the new instance object in the
- response field.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1
- >>> from google.cloud.redis_v1 import enums
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> parent = client.location_path('[PROJECT]', '[LOCATION]')
- >>> instance_id = 'test_instance'
- >>> tier = enums.Instance.Tier.BASIC
- >>> memory_size_gb = 1
- >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb}
- >>>
- >>> response = client.create_instance(parent, instance_id, instance)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- parent (str): Required. The resource name of the instance location using the form:
- ``projects/{project_id}/locations/{location_id}`` where ``location_id``
- refers to a GCP region.
- instance_id (str): Required. The logical name of the Redis instance in the customer project
- with the following restrictions:
-
- - Must contain only lowercase letters, numbers, and hyphens.
- - Must start with a letter.
- - Must be between 1-40 characters.
- - Must end with a number or a letter.
- - Must be unique within the customer project / location
- instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. A Redis [Instance] resource
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1.types.Instance`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "create_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "create_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_instance,
- default_retry=self._method_configs["CreateInstance"].retry,
- default_timeout=self._method_configs["CreateInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.CreateInstanceRequest(
- parent=parent, instance_id=instance_id, instance=instance
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["create_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=cloud_redis_pb2.OperationMetadata,
- )
-
- def update_instance(
- self,
- update_mask,
- instance,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Updates the metadata and configuration of a specific Redis instance.
-
- Completed longrunning.Operation will contain the new instance object
- in the response field. The returned operation is automatically deleted
- after a few hours, so there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> paths_element = 'display_name'
- >>> paths_element_2 = 'memory_size_gb'
- >>> paths = [paths_element, paths_element_2]
- >>> update_mask = {'paths': paths}
- >>> display_name = 'UpdatedDisplayName'
- >>> name = 'projects//locations//instances/'
- >>> memory_size_gb = 4
- >>> instance = {'display_name': display_name, 'name': name, 'memory_size_gb': memory_size_gb}
- >>>
- >>> response = client.update_instance(update_mask, instance)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- update_mask (Union[dict, ~google.cloud.redis_v1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied
- in this field. The elements of the repeated paths field may only include
- these fields from ``Instance``:
-
- - ``displayName``
- - ``labels``
- - ``memorySizeGb``
- - ``redisConfig``
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1.types.FieldMask`
- instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. Update description. Only fields specified in update\_mask are
- updated.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1.types.Instance`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "update_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "update_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.update_instance,
- default_retry=self._method_configs["UpdateInstance"].retry,
- default_timeout=self._method_configs["UpdateInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.UpdateInstanceRequest(
- update_mask=update_mask, instance=instance
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("instance.name", instance.name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["update_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=cloud_redis_pb2.OperationMetadata,
- )
-
- def import_instance(
- self,
- name,
- input_config,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
-
- Redis may stop serving during this operation. Instance state will be
- IMPORTING for entire operation. When complete, the instance will contain
- only data from the imported file.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> # TODO: Initialize `name`:
- >>> name = ''
- >>>
- >>> # TODO: Initialize `input_config`:
- >>> input_config = {}
- >>>
- >>> response = client.import_instance(name, input_config)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- input_config (Union[dict, ~google.cloud.redis_v1.types.InputConfig]): Required. Specify data to be imported.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1.types.InputConfig`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "import_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "import_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.import_instance,
- default_retry=self._method_configs["ImportInstance"].retry,
- default_timeout=self._method_configs["ImportInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.ImportInstanceRequest(
- name=name, input_config=input_config
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["import_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=cloud_redis_pb2.OperationMetadata,
- )
-
- def export_instance(
- self,
- name,
- output_config,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Export Redis instance data into a Redis RDB format file in Cloud Storage.
-
- Redis will continue serving during this operation.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> # TODO: Initialize `name`:
- >>> name = ''
- >>>
- >>> # TODO: Initialize `output_config`:
- >>> output_config = {}
- >>>
- >>> response = client.export_instance(name, output_config)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- output_config (Union[dict, ~google.cloud.redis_v1.types.OutputConfig]): Required. Specify data to be exported.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1.types.OutputConfig`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "export_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "export_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.export_instance,
- default_retry=self._method_configs["ExportInstance"].retry,
- default_timeout=self._method_configs["ExportInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.ExportInstanceRequest(
- name=name, output_config=output_config
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["export_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=cloud_redis_pb2.OperationMetadata,
- )
-
- def failover_instance(
- self,
- name,
- data_protection_mode=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Initiates a failover of the master node to current replica node for a
- specific STANDARD tier Cloud Memorystore for Redis instance.
-
- Example:
- >>> from google.cloud import redis_v1
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
- >>>
- >>> response = client.failover_instance(name)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- data_protection_mode (~google.cloud.redis_v1.types.DataProtectionMode): Optional. Available data protection modes that the user can choose. If
- it's unspecified, data protection mode will be LIMITED\_DATA\_LOSS by
- default.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "failover_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "failover_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.failover_instance,
- default_retry=self._method_configs["FailoverInstance"].retry,
- default_timeout=self._method_configs["FailoverInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.FailoverInstanceRequest(
- name=name, data_protection_mode=data_protection_mode
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["failover_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=cloud_redis_pb2.OperationMetadata,
- )
-
- def delete_instance(
- self,
- name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Deletes a specific Redis instance. Instance stops serving and data is
- deleted.
-
- Example:
- >>> from google.cloud import redis_v1
- >>>
- >>> client = redis_v1.CloudRedisClient()
- >>>
- >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
- >>>
- >>> response = client.delete_instance(name)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "delete_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "delete_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.delete_instance,
- default_retry=self._method_configs["DeleteInstance"].retry,
- default_timeout=self._method_configs["DeleteInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.DeleteInstanceRequest(name=name)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["delete_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- empty_pb2.Empty,
- metadata_type=cloud_redis_pb2.OperationMetadata,
- )
diff --git a/google/cloud/redis_v1/gapic/cloud_redis_client_config.py b/google/cloud/redis_v1/gapic/cloud_redis_client_config.py
deleted file mode 100644
index 9d02648..0000000
--- a/google/cloud/redis_v1/gapic/cloud_redis_client_config.py
+++ /dev/null
@@ -1,63 +0,0 @@
-config = {
- "interfaces": {
- "google.cloud.redis.v1.CloudRedis": {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
- "total_timeout_millis": 600000,
- }
- },
- "methods": {
- "ListInstances": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "GetInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "CreateInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "UpdateInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "ImportInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "ExportInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "FailoverInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "DeleteInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- },
- }
- }
-}
diff --git a/google/cloud/redis_v1/gapic/enums.py b/google/cloud/redis_v1/gapic/enums.py
deleted file mode 100644
index 1292528..0000000
--- a/google/cloud/redis_v1/gapic/enums.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Wrappers for protocol buffer enum types."""
-
-import enum
-
-
-class FailoverInstanceRequest(object):
- class DataProtectionMode(enum.IntEnum):
- """
- Specifies different modes of operation in relation to the data retention.
-
- Attributes:
- DATA_PROTECTION_MODE_UNSPECIFIED (int): Defaults to LIMITED\_DATA\_LOSS if a data protection mode is not
- specified.
- LIMITED_DATA_LOSS (int): Instance failover will be protected with data loss control. More
- specifically, the failover will only be performed if the current
- replication offset diff between master and replica is under a certain
- threshold.
- FORCE_DATA_LOSS (int): Instance failover will be performed without data loss control.
- """
-
- DATA_PROTECTION_MODE_UNSPECIFIED = 0
- LIMITED_DATA_LOSS = 1
- FORCE_DATA_LOSS = 2
-
-
-class Instance(object):
- class ConnectMode(enum.IntEnum):
- """
- Available connection modes.
-
- Attributes:
- CONNECT_MODE_UNSPECIFIED (int): Not set.
- DIRECT_PEERING (int): Connect via directly peering with memorystore redis hosted service.
- PRIVATE_SERVICE_ACCESS (int): Connect with google via private service access and share connection
- across google managed services.
- """
-
- CONNECT_MODE_UNSPECIFIED = 0
- DIRECT_PEERING = 1
- PRIVATE_SERVICE_ACCESS = 2
-
- class State(enum.IntEnum):
- """
- Represents the different states of a Redis instance.
-
- Attributes:
- STATE_UNSPECIFIED (int): Not set.
- CREATING (int): Redis instance is being created.
- READY (int): Redis instance has been created and is fully usable.
- UPDATING (int): Redis instance configuration is being updated. Certain kinds of updates
- may cause the instance to become unusable while the update is in
- progress.
- DELETING (int): Redis instance is being deleted.
- REPAIRING (int): Redis instance is being repaired and may be unusable.
- MAINTENANCE (int): Maintenance is being performed on this Redis instance.
- IMPORTING (int): Redis instance is importing data (availability may be affected).
- FAILING_OVER (int): Redis instance is failing over (availability may be affected).
- """
-
- STATE_UNSPECIFIED = 0
- CREATING = 1
- READY = 2
- UPDATING = 3
- DELETING = 4
- REPAIRING = 5
- MAINTENANCE = 6
- IMPORTING = 8
- FAILING_OVER = 9
-
- class Tier(enum.IntEnum):
- """
- Available service tiers to choose from
-
- Attributes:
- TIER_UNSPECIFIED (int): Not set.
- BASIC (int): BASIC tier: standalone instance
- STANDARD_HA (int): STANDARD\_HA tier: highly available primary/replica instances
- """
-
- TIER_UNSPECIFIED = 0
- BASIC = 1
- STANDARD_HA = 3
diff --git a/google/cloud/redis_v1/gapic/transports/__init__.py b/google/cloud/redis_v1/gapic/transports/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/google/cloud/redis_v1/gapic/transports/cloud_redis_grpc_transport.py b/google/cloud/redis_v1/gapic/transports/cloud_redis_grpc_transport.py
deleted file mode 100644
index 3cb1ef6..0000000
--- a/google/cloud/redis_v1/gapic/transports/cloud_redis_grpc_transport.py
+++ /dev/null
@@ -1,256 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import google.api_core.grpc_helpers
-import google.api_core.operations_v1
-
-from google.cloud.redis_v1.proto import cloud_redis_pb2_grpc
-
-
-class CloudRedisGrpcTransport(object):
- """gRPC transport class providing stubs for
- google.cloud.redis.v1 CloudRedis API.
-
- The transport provides access to the raw gRPC stubs,
- which can be used to take advantage of advanced
- features of gRPC.
- """
-
- # The scopes needed to make gRPC calls to all of the methods defined
- # in this service.
- _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
-
- def __init__(
- self, channel=None, credentials=None, address="redis.googleapis.com:443"
- ):
- """Instantiate the transport class.
-
- Args:
- channel (grpc.Channel): A ``Channel`` instance through
- which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- address (str): The address where the service is hosted.
- """
- # If both `channel` and `credentials` are specified, raise an
- # exception (channels come with credentials baked in already).
- if channel is not None and credentials is not None:
- raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
- )
-
- # Create the channel.
- if channel is None:
- channel = self.create_channel(
- address=address,
- credentials=credentials,
- options={
- "grpc.max_send_message_length": -1,
- "grpc.max_receive_message_length": -1,
- }.items(),
- )
-
- self._channel = channel
-
- # gRPC uses objects called "stubs" that are bound to the
- # channel and provide a basic method for each RPC.
- self._stubs = {"cloud_redis_stub": cloud_redis_pb2_grpc.CloudRedisStub(channel)}
-
- # Because this API includes a method that returns a
- # long-running operation (proto: google.longrunning.Operation),
- # instantiate an LRO client.
- self._operations_client = google.api_core.operations_v1.OperationsClient(
- channel
- )
-
- @classmethod
- def create_channel(
- cls, address="redis.googleapis.com:443", credentials=None, **kwargs
- ):
- """Create and return a gRPC channel object.
-
- Args:
- address (str): The host for the channel to use.
- credentials (~.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If
- none are specified, the client will attempt to ascertain
- the credentials from the environment.
- kwargs (dict): Keyword arguments, which are passed to the
- channel creation.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return google.api_core.grpc_helpers.create_channel(
- address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
- )
-
- @property
- def channel(self):
- """The gRPC channel used by the transport.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return self._channel
-
- @property
- def list_instances(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.list_instances`.
-
- Lists all Redis instances owned by a project in either the specified
- location (region) or all locations.
-
- The location should have the following format:
-
- - ``projects/{project_id}/locations/{location_id}``
-
- If ``location_id`` is specified as ``-`` (wildcard), then all regions
- available to the project are queried, and the results are aggregated.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].ListInstances
-
- @property
- def get_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.get_instance`.
-
- Gets the details of a specific Redis instance.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].GetInstance
-
- @property
- def create_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.create_instance`.
-
- Creates a Redis instance based on the specified tier and memory size.
-
- By default, the instance is accessible from the project's `default
- network `__.
-
- The creation is executed asynchronously and callers may check the
- returned operation to track its progress. Once the operation is
- completed the Redis instance will be fully functional. Completed
- longrunning.Operation will contain the new instance object in the
- response field.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].CreateInstance
-
- @property
- def update_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.update_instance`.
-
- Updates the metadata and configuration of a specific Redis instance.
-
- Completed longrunning.Operation will contain the new instance object
- in the response field. The returned operation is automatically deleted
- after a few hours, so there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].UpdateInstance
-
- @property
- def import_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.import_instance`.
-
- Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
-
- Redis may stop serving during this operation. Instance state will be
- IMPORTING for entire operation. When complete, the instance will contain
- only data from the imported file.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].ImportInstance
-
- @property
- def export_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.export_instance`.
-
- Export Redis instance data into a Redis RDB format file in Cloud Storage.
-
- Redis will continue serving during this operation.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].ExportInstance
-
- @property
- def failover_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.failover_instance`.
-
- Initiates a failover of the master node to current replica node for a
- specific STANDARD tier Cloud Memorystore for Redis instance.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].FailoverInstance
-
- @property
- def delete_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.delete_instance`.
-
- Deletes a specific Redis instance. Instance stops serving and data is
- deleted.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].DeleteInstance
diff --git a/google/cloud/redis_v1/proto/__init__.py b/google/cloud/redis_v1/proto/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/google/cloud/redis_v1/proto/cloud_redis.proto b/google/cloud/redis_v1/proto/cloud_redis.proto
index 1b46694..a45e9c0 100644
--- a/google/cloud/redis_v1/proto/cloud_redis.proto
+++ b/google/cloud/redis_v1/proto/cloud_redis.proto
@@ -75,7 +75,7 @@ service CloudRedis {
// Creates a Redis instance based on the specified tier and memory size.
//
// By default, the instance is accessible from the project's
- // [default network](/compute/docs/networks-and-firewalls#networks).
+ // [default network](https://cloud.google.com/vpc/docs/vpc).
//
// The creation is executed asynchronously and callers may check the returned
// operation to track its progress. Once the operation is completed the Redis
@@ -113,6 +113,20 @@ service CloudRedis {
};
}
+ // Upgrades Redis instance to the newer Redis version specified in the
+ // request.
+ rpc UpgradeInstance(UpgradeInstanceRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v1/{name=projects/*/locations/*/instances/*}:upgrade"
+ body: "*"
+ };
+ option (google.api.method_signature) = "name,redis_version";
+ option (google.longrunning.operation_info) = {
+ response_type: "google.cloud.redis.v1.Instance"
+ metadata_type: "google.cloud.redis.v1.OperationMetadata"
+ };
+ }
+
// Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
//
// Redis may stop serving during this operation. Instance state will be
@@ -235,11 +249,12 @@ message Instance {
// Not set.
CONNECT_MODE_UNSPECIFIED = 0;
- // Connect via directly peering with memorystore redis hosted service.
+ // Connect via direct peering to the Memorystore for Redis hosted service.
DIRECT_PEERING = 1;
- // Connect with google via private service access and share connection
- // across google managed services.
+ // Connect your Memorystore for Redis instance using Private Service
+ // Access. Private services access provides an IP address range for multiple
+ // Google Cloud services, including Memorystore.
PRIVATE_SERVICE_ACCESS = 2;
}
@@ -340,7 +355,7 @@ message Instance {
int32 memory_size_gb = 18 [(google.api.field_behavior) = REQUIRED];
// Optional. The full name of the Google Compute Engine
- // [network](/compute/docs/networks-and-firewalls#networks) to which the
+ // [network](https://cloud.google.com/vpc/docs/vpc) to which the
// instance is connected. If left unspecified, the `default` network
// will be used.
string authorized_network = 20 [(google.api.field_behavior) = OPTIONAL];
@@ -352,9 +367,8 @@ message Instance {
// operation.
string persistence_iam_identity = 21 [(google.api.field_behavior) = OUTPUT_ONLY];
- // Optional. The connect mode of Redis instance.
- // If not provided, default one will be used.
- // Current default: DIRECT_PEERING.
+ // Optional. The network connect mode of the Redis instance.
+ // If not provided, the connect mode defaults to DIRECT_PEERING.
ConnectMode connect_mode = 22 [(google.api.field_behavior) = OPTIONAL];
}
@@ -462,6 +476,22 @@ message UpdateInstanceRequest {
Instance instance = 2 [(google.api.field_behavior) = REQUIRED];
}
+// Request for [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance].
+message UpgradeInstanceRequest {
+ // Required. Redis instance resource name using the form:
+ // `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
+ // where `location_id` refers to a GCP region.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "redis.googleapis.com/Instance"
+ }
+ ];
+
+ // Required. Specifies the target version of Redis software to upgrade to.
+ string redis_version = 2 [(google.api.field_behavior) = REQUIRED];
+}
+
// Request for [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance].
message DeleteInstanceRequest {
// Required. Redis instance resource name using the form:
diff --git a/google/cloud/redis_v1/proto/cloud_redis_pb2.py b/google/cloud/redis_v1/proto/cloud_redis_pb2.py
deleted file mode 100644
index d7657f3..0000000
--- a/google/cloud/redis_v1/proto/cloud_redis_pb2.py
+++ /dev/null
@@ -1,2342 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/redis_v1/proto/cloud_redis.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-from google.api import client_pb2 as google_dot_api_dot_client__pb2
-from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
-from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/redis_v1/proto/cloud_redis.proto",
- package="google.cloud.redis.v1",
- syntax="proto3",
- serialized_options=_b(
- "\n\031com.google.cloud.redis.v1B\030CloudRedisServiceV1ProtoP\001Z:google.golang.org/genproto/googleapis/cloud/redis/v1;redis"
- ),
- serialized_pb=_b(
- '\n-google/cloud/redis_v1/proto/cloud_redis.proto\x12\x15google.cloud.redis.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xeb\t\n\x08Instance\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12;\n\x06labels\x18\x03 \x03(\x0b\x32+.google.cloud.redis.v1.Instance.LabelsEntry\x12\x18\n\x0blocation_id\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x61lternative_location_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rredis_version\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11reserved_ip_range\x18\t \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x04host\x18\n \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04port\x18\x0b \x01(\x05\x42\x03\xe0\x41\x03\x12 \n\x13\x63urrent_location_id\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x39\n\x05state\x18\x0e \x01(\x0e\x32%.google.cloud.redis.v1.Instance.StateB\x03\xe0\x41\x03\x12\x1b\n\x0estatus_message\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12M\n\rredis_configs\x18\x10 \x03(\x0b\x32\x31.google.cloud.redis.v1.Instance.RedisConfigsEntryB\x03\xe0\x41\x01\x12\x37\n\x04tier\x18\x11 \x01(\x0e\x32$.google.cloud.redis.v1.Instance.TierB\x03\xe0\x41\x02\x12\x1b\n\x0ememory_size_gb\x18\x12 \x01(\x05\x42\x03\xe0\x41\x02\x12\x1f\n\x12\x61uthorized_network\x18\x14 \x01(\tB\x03\xe0\x41\x01\x12%\n\x18persistence_iam_identity\x18\x15 \x01(\tB\x03\xe0\x41\x03\x12\x46\n\x0c\x63onnect_mode\x18\x16 \x01(\x0e\x32+.google.cloud.redis.v1.Instance.ConnectModeB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11RedisConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x94\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\r\n\tREPAIRING\x10\x05\x12\x0f\n\x0bMAINTENANCE\x10\x06\x12\r\n\tIMPORTING\x10\x08\x12\x10\n\x0c\x46\x41ILING_OVER\x10\t"8\n\x04Tier\x12\x14\n\x10TIER_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x0f\n\x0bSTANDARD_HA\x10\x03"[\n\x0b\x43onnectMode\x12\x1c\n\x18\x43ONNECT_MODE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x44IRECT_PEERING\x10\x01\x12\x1a\n\x16PRIVATE_SERVICE_ACCESS\x10\x02:`\xea\x41]\n\x1dredis.googleapis.com/Instance\x12\x82\xd3\xe4\x93\x02/\x12-/v1/{parent=projects/*/locations/*}/instances\xda\x41\x06parent\x12\x97\x01\n\x0bGetInstance\x12).google.cloud.redis.v1.GetInstanceRequest\x1a\x1f.google.cloud.redis.v1.Instance"<\x82\xd3\xe4\x93\x02/\x12-/v1/{name=projects/*/locations/*/instances/*}\xda\x41\x04name\x12\x89\x02\n\x0e\x43reateInstance\x12,.google.cloud.redis.v1.CreateInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02\x39"-/v1/{parent=projects/*/locations/*}/instances:\x08instance\xda\x41\x1bparent,instance_id,instance\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\x8b\x02\n\x0eUpdateInstance\x12,.google.cloud.redis.v1.UpdateInstanceRequest\x1a\x1d.google.longrunning.Operation"\xab\x01\x82\xd3\xe4\x93\x02\x42\x32\x36/v1/{instance.name=projects/*/locations/*/instances/*}:\x08instance\xda\x41\x14update_mask,instance\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\xff\x01\n\x0eImportInstance\x12,.google.cloud.redis.v1.ImportInstanceRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02\x39"4/v1/{name=projects/*/locations/*/instances/*}:import:\x01*\xda\x41\x11name,input_config\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\x80\x02\n\x0e\x45xportInstance\x12,.google.cloud.redis.v1.ExportInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa0\x01\x82\xd3\xe4\x93\x02\x39"4/v1/{name=projects/*/locations/*/instances/*}:export:\x01*\xda\x41\x12name,output_config\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\x8d\x02\n\x10\x46\x61iloverInstance\x12..google.cloud.redis.v1.FailoverInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02;"6/v1/{name=projects/*/locations/*/instances/*}:failover:\x01*\xda\x41\x19name,data_protection_mode\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\xde\x01\n\x0e\x44\x65leteInstance\x12,.google.cloud.redis.v1.DeleteInstanceRequest\x1a\x1d.google.longrunning.Operation"\x7f\x82\xd3\xe4\x93\x02/*-/v1/{name=projects/*/locations/*/instances/*}\xda\x41\x04name\xca\x41@\n\x15google.protobuf.Empty\x12\'google.cloud.redis.v1.OperationMetadata\x1aH\xca\x41\x14redis.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBs\n\x19\x63om.google.cloud.redis.v1B\x18\x43loudRedisServiceV1ProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/redis/v1;redisb\x06proto3'
- ),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_api_dot_client__pb2.DESCRIPTOR,
- google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
- google_dot_api_dot_resource__pb2.DESCRIPTOR,
- google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- ],
-)
-
-
-_INSTANCE_STATE = _descriptor.EnumDescriptor(
- name="State",
- full_name="google.cloud.redis.v1.Instance.State",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="STATE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="CREATING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="READY", index=2, number=2, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="UPDATING", index=3, number=3, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="DELETING", index=4, number=4, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="REPAIRING", index=5, number=5, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="MAINTENANCE", index=6, number=6, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="IMPORTING", index=7, number=8, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="FAILING_OVER", index=8, number=9, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1154,
- serialized_end=1302,
-)
-_sym_db.RegisterEnumDescriptor(_INSTANCE_STATE)
-
-_INSTANCE_TIER = _descriptor.EnumDescriptor(
- name="Tier",
- full_name="google.cloud.redis.v1.Instance.Tier",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="TIER_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="BASIC", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="STANDARD_HA", index=2, number=3, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1304,
- serialized_end=1360,
-)
-_sym_db.RegisterEnumDescriptor(_INSTANCE_TIER)
-
-_INSTANCE_CONNECTMODE = _descriptor.EnumDescriptor(
- name="ConnectMode",
- full_name="google.cloud.redis.v1.Instance.ConnectMode",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="CONNECT_MODE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="DIRECT_PEERING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="PRIVATE_SERVICE_ACCESS",
- index=2,
- number=2,
- serialized_options=None,
- type=None,
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1362,
- serialized_end=1453,
-)
-_sym_db.RegisterEnumDescriptor(_INSTANCE_CONNECTMODE)
-
-_FAILOVERINSTANCEREQUEST_DATAPROTECTIONMODE = _descriptor.EnumDescriptor(
- name="DataProtectionMode",
- full_name="google.cloud.redis.v1.FailoverInstanceRequest.DataProtectionMode",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="DATA_PROTECTION_MODE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="LIMITED_DATA_LOSS",
- index=1,
- number=1,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="FORCE_DATA_LOSS",
- index=2,
- number=2,
- serialized_options=None,
- type=None,
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=2898,
- serialized_end=3000,
-)
-_sym_db.RegisterEnumDescriptor(_FAILOVERINSTANCEREQUEST_DATAPROTECTIONMODE)
-
-
-_INSTANCE_LABELSENTRY = _descriptor.Descriptor(
- name="LabelsEntry",
- full_name="google.cloud.redis.v1.Instance.LabelsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.cloud.redis.v1.Instance.LabelsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.cloud.redis.v1.Instance.LabelsEntry.value",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1053,
- serialized_end=1098,
-)
-
-_INSTANCE_REDISCONFIGSENTRY = _descriptor.Descriptor(
- name="RedisConfigsEntry",
- full_name="google.cloud.redis.v1.Instance.RedisConfigsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.cloud.redis.v1.Instance.RedisConfigsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.cloud.redis.v1.Instance.RedisConfigsEntry.value",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1100,
- serialized_end=1151,
-)
-
-_INSTANCE = _descriptor.Descriptor(
- name="Instance",
- full_name="google.cloud.redis.v1.Instance",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.cloud.redis.v1.Instance.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="display_name",
- full_name="google.cloud.redis.v1.Instance.display_name",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="labels",
- full_name="google.cloud.redis.v1.Instance.labels",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="location_id",
- full_name="google.cloud.redis.v1.Instance.location_id",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="alternative_location_id",
- full_name="google.cloud.redis.v1.Instance.alternative_location_id",
- index=4,
- number=5,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="redis_version",
- full_name="google.cloud.redis.v1.Instance.redis_version",
- index=5,
- number=7,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="reserved_ip_range",
- full_name="google.cloud.redis.v1.Instance.reserved_ip_range",
- index=6,
- number=9,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="host",
- full_name="google.cloud.redis.v1.Instance.host",
- index=7,
- number=10,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="port",
- full_name="google.cloud.redis.v1.Instance.port",
- index=8,
- number=11,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="current_location_id",
- full_name="google.cloud.redis.v1.Instance.current_location_id",
- index=9,
- number=12,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="create_time",
- full_name="google.cloud.redis.v1.Instance.create_time",
- index=10,
- number=13,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="state",
- full_name="google.cloud.redis.v1.Instance.state",
- index=11,
- number=14,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="status_message",
- full_name="google.cloud.redis.v1.Instance.status_message",
- index=12,
- number=15,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="redis_configs",
- full_name="google.cloud.redis.v1.Instance.redis_configs",
- index=13,
- number=16,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="tier",
- full_name="google.cloud.redis.v1.Instance.tier",
- index=14,
- number=17,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="memory_size_gb",
- full_name="google.cloud.redis.v1.Instance.memory_size_gb",
- index=15,
- number=18,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="authorized_network",
- full_name="google.cloud.redis.v1.Instance.authorized_network",
- index=16,
- number=20,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="persistence_iam_identity",
- full_name="google.cloud.redis.v1.Instance.persistence_iam_identity",
- index=17,
- number=21,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="connect_mode",
- full_name="google.cloud.redis.v1.Instance.connect_mode",
- index=18,
- number=22,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_INSTANCE_LABELSENTRY, _INSTANCE_REDISCONFIGSENTRY],
- enum_types=[_INSTANCE_STATE, _INSTANCE_TIER, _INSTANCE_CONNECTMODE],
- serialized_options=_b(
- "\352A]\n\035redis.googleapis.com/Instance\022`__ to which
- the instance is connected. If left unspecified, the
- ``default`` network will be used.
- persistence_iam_identity:
- Output only. Cloud IAM identity used by import / export
- operations to transfer data to/from Cloud Storage. Format is
- "serviceAccount:". The value may change over time for a given
- instance so should be checked before each import/export
- operation.
- connect_mode:
- Optional. The connect mode of Redis instance. If not provided,
- default one will be used. Current default: DIRECT\_PEERING.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.Instance)
- ),
-)
-_sym_db.RegisterMessage(Instance)
-_sym_db.RegisterMessage(Instance.LabelsEntry)
-_sym_db.RegisterMessage(Instance.RedisConfigsEntry)
-
-ListInstancesRequest = _reflection.GeneratedProtocolMessageType(
- "ListInstancesRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINSTANCESREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
- Attributes:
- parent:
- Required. The resource name of the instance location using the
- form: ``projects/{project_id}/locations/{location_id}`` where
- ``location_id`` refers to a GCP region.
- page_size:
- The maximum number of items to return. If not specified, a
- default value of 1000 will be used by the service. Regardless
- of the page\_size value, the response may include a partial
- list and a caller should only rely on response's [``next_page_
- token``][google.cloud.redis.v1.ListInstancesResponse.next\_pag
- e\_token] to determine if there are more instances left to be
- queried.
- page_token:
- The ``next_page_token`` value returned from a previous [ListIn
- stances][google.cloud.redis.v1.CloudRedis.ListInstances]
- request, if any.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.ListInstancesRequest)
- ),
-)
-_sym_db.RegisterMessage(ListInstancesRequest)
-
-ListInstancesResponse = _reflection.GeneratedProtocolMessageType(
- "ListInstancesResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINSTANCESRESPONSE,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Response for
- [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
- Attributes:
- instances:
- A list of Redis instances in the project in the specified
- location, or across all locations. If the ``location_id`` in
- the parent field of the request is "-", all regions available
- to the project are queried, and the results aggregated. If in
- such an aggregated query a location is unavailable, a dummy
- Redis entry is included in the response with the ``name``
- field set to a value of the form
- ``projects/{project_id}/locations/{location_id}/instances/``-
- and the ``status`` field set to ERROR and ``status_message``
- field set to "location not available for ListInstances".
- next_page_token:
- Token to retrieve the next page of results, or empty if there
- are no more results in the list.
- unreachable:
- Locations that could not be reached.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.ListInstancesResponse)
- ),
-)
-_sym_db.RegisterMessage(ListInstancesResponse)
-
-GetInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "GetInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETINSTANCEREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.GetInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(GetInstanceRequest)
-
-CreateInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "CreateInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATEINSTANCEREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance].
- Attributes:
- parent:
- Required. The resource name of the instance location using the
- form: ``projects/{project_id}/locations/{location_id}`` where
- ``location_id`` refers to a GCP region.
- instance_id:
- Required. The logical name of the Redis instance in the
- customer project with the following restrictions: - Must
- contain only lowercase letters, numbers, and hyphens. - Must
- start with a letter. - Must be between 1-40 characters. -
- Must end with a number or a letter. - Must be unique within
- the customer project / location
- instance:
- Required. A Redis [Instance] resource
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.CreateInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(CreateInstanceRequest)
-
-UpdateInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "UpdateInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATEINSTANCEREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance].
- Attributes:
- update_mask:
- Required. Mask of fields to update. At least one path must be
- supplied in this field. The elements of the repeated paths
- field may only include these fields from
- [Instance][google.cloud.redis.v1.Instance]: -
- ``displayName`` - ``labels`` - ``memorySizeGb`` -
- ``redisConfig``
- instance:
- Required. Update description. Only fields specified in
- update\_mask are updated.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.UpdateInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(UpdateInstanceRequest)
-
-DeleteInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "DeleteInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETEINSTANCEREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.DeleteInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(DeleteInstanceRequest)
-
-GcsSource = _reflection.GeneratedProtocolMessageType(
- "GcsSource",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GCSSOURCE,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""The Cloud Storage location for the input content
- Attributes:
- uri:
- Required. Source data URI. (e.g.
- 'gs://my\_bucket/my\_object').
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.GcsSource)
- ),
-)
-_sym_db.RegisterMessage(GcsSource)
-
-InputConfig = _reflection.GeneratedProtocolMessageType(
- "InputConfig",
- (_message.Message,),
- dict(
- DESCRIPTOR=_INPUTCONFIG,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""The input content
- Attributes:
- source:
- Required. Specify source location of input data
- gcs_source:
- Google Cloud Storage location where input content is located.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.InputConfig)
- ),
-)
-_sym_db.RegisterMessage(InputConfig)
-
-ImportInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "ImportInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_IMPORTINSTANCEREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for [Import][google.cloud.redis.v1.CloudRedis.ImportInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- input_config:
- Required. Specify data to be imported.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.ImportInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(ImportInstanceRequest)
-
-GcsDestination = _reflection.GeneratedProtocolMessageType(
- "GcsDestination",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GCSDESTINATION,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""The Cloud Storage location for the output content
- Attributes:
- uri:
- Required. Data destination URI (e.g.
- 'gs://my\_bucket/my\_object'). Existing files will be
- overwritten.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.GcsDestination)
- ),
-)
-_sym_db.RegisterMessage(GcsDestination)
-
-OutputConfig = _reflection.GeneratedProtocolMessageType(
- "OutputConfig",
- (_message.Message,),
- dict(
- DESCRIPTOR=_OUTPUTCONFIG,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""The output content
- Attributes:
- destination:
- Required. Specify destination location of output data
- gcs_destination:
- Google Cloud Storage destination for output content.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.OutputConfig)
- ),
-)
-_sym_db.RegisterMessage(OutputConfig)
-
-ExportInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "ExportInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_EXPORTINSTANCEREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for [Export][google.cloud.redis.v1.CloudRedis.ExportInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- output_config:
- Required. Specify data to be exported.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.ExportInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(ExportInstanceRequest)
-
-FailoverInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "FailoverInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_FAILOVERINSTANCEREQUEST,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- data_protection_mode:
- Optional. Available data protection modes that the user can
- choose. If it's unspecified, data protection mode will be
- LIMITED\_DATA\_LOSS by default.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.FailoverInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(FailoverInstanceRequest)
-
-OperationMetadata = _reflection.GeneratedProtocolMessageType(
- "OperationMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_OPERATIONMETADATA,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Represents the v1 metadata of the long-running operation.
- Attributes:
- create_time:
- Creation timestamp.
- end_time:
- End timestamp.
- target:
- Operation target.
- verb:
- Operation verb.
- status_detail:
- Operation status details.
- cancel_requested:
- Specifies if cancellation was requested for the operation.
- api_version:
- API version.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.OperationMetadata)
- ),
-)
-_sym_db.RegisterMessage(OperationMetadata)
-
-LocationMetadata = _reflection.GeneratedProtocolMessageType(
- "LocationMetadata",
- (_message.Message,),
- dict(
- AvailableZonesEntry=_reflection.GeneratedProtocolMessageType(
- "AvailableZonesEntry",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LOCATIONMETADATA_AVAILABLEZONESENTRY,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.LocationMetadata.AvailableZonesEntry)
- ),
- ),
- DESCRIPTOR=_LOCATIONMETADATA,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""This location metadata represents additional configuration options for
- a given location where a Redis instance may be created. All fields are
- output only. It is returned as content of the
- ``google.cloud.location.Location.metadata`` field.
- Attributes:
- available_zones:
- Output only. The set of available zones in the location. The
- map is keyed by the lowercase ID of each zone, as defined by
- GCE. These keys can be specified in ``location_id`` or
- ``alternative_location_id`` fields when creating a Redis
- instance.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.LocationMetadata)
- ),
-)
-_sym_db.RegisterMessage(LocationMetadata)
-_sym_db.RegisterMessage(LocationMetadata.AvailableZonesEntry)
-
-ZoneMetadata = _reflection.GeneratedProtocolMessageType(
- "ZoneMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_ZONEMETADATA,
- __module__="google.cloud.redis_v1.proto.cloud_redis_pb2",
- __doc__="""Defines specific information for a particular zone. Currently empty
- and reserved for future use only.""",
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1.ZoneMetadata)
- ),
-)
-_sym_db.RegisterMessage(ZoneMetadata)
-
-
-DESCRIPTOR._options = None
-_INSTANCE_LABELSENTRY._options = None
-_INSTANCE_REDISCONFIGSENTRY._options = None
-_INSTANCE.fields_by_name["name"]._options = None
-_INSTANCE.fields_by_name["location_id"]._options = None
-_INSTANCE.fields_by_name["alternative_location_id"]._options = None
-_INSTANCE.fields_by_name["redis_version"]._options = None
-_INSTANCE.fields_by_name["reserved_ip_range"]._options = None
-_INSTANCE.fields_by_name["host"]._options = None
-_INSTANCE.fields_by_name["port"]._options = None
-_INSTANCE.fields_by_name["current_location_id"]._options = None
-_INSTANCE.fields_by_name["create_time"]._options = None
-_INSTANCE.fields_by_name["state"]._options = None
-_INSTANCE.fields_by_name["status_message"]._options = None
-_INSTANCE.fields_by_name["redis_configs"]._options = None
-_INSTANCE.fields_by_name["tier"]._options = None
-_INSTANCE.fields_by_name["memory_size_gb"]._options = None
-_INSTANCE.fields_by_name["authorized_network"]._options = None
-_INSTANCE.fields_by_name["persistence_iam_identity"]._options = None
-_INSTANCE.fields_by_name["connect_mode"]._options = None
-_INSTANCE._options = None
-_LISTINSTANCESREQUEST.fields_by_name["parent"]._options = None
-_GETINSTANCEREQUEST.fields_by_name["name"]._options = None
-_CREATEINSTANCEREQUEST.fields_by_name["parent"]._options = None
-_CREATEINSTANCEREQUEST.fields_by_name["instance_id"]._options = None
-_CREATEINSTANCEREQUEST.fields_by_name["instance"]._options = None
-_UPDATEINSTANCEREQUEST.fields_by_name["update_mask"]._options = None
-_UPDATEINSTANCEREQUEST.fields_by_name["instance"]._options = None
-_DELETEINSTANCEREQUEST.fields_by_name["name"]._options = None
-_GCSSOURCE.fields_by_name["uri"]._options = None
-_IMPORTINSTANCEREQUEST.fields_by_name["name"]._options = None
-_IMPORTINSTANCEREQUEST.fields_by_name["input_config"]._options = None
-_GCSDESTINATION.fields_by_name["uri"]._options = None
-_EXPORTINSTANCEREQUEST.fields_by_name["name"]._options = None
-_EXPORTINSTANCEREQUEST.fields_by_name["output_config"]._options = None
-_FAILOVERINSTANCEREQUEST.fields_by_name["name"]._options = None
-_FAILOVERINSTANCEREQUEST.fields_by_name["data_protection_mode"]._options = None
-_LOCATIONMETADATA_AVAILABLEZONESENTRY._options = None
-_LOCATIONMETADATA.fields_by_name["available_zones"]._options = None
-
-_CLOUDREDIS = _descriptor.ServiceDescriptor(
- name="CloudRedis",
- full_name="google.cloud.redis.v1.CloudRedis",
- file=DESCRIPTOR,
- index=0,
- serialized_options=_b(
- "\312A\024redis.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform"
- ),
- serialized_start=3440,
- serialized_end=5405,
- methods=[
- _descriptor.MethodDescriptor(
- name="ListInstances",
- full_name="google.cloud.redis.v1.CloudRedis.ListInstances",
- index=0,
- containing_service=None,
- input_type=_LISTINSTANCESREQUEST,
- output_type=_LISTINSTANCESRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\002/\022-/v1/{parent=projects/*/locations/*}/instances\332A\006parent"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="GetInstance",
- full_name="google.cloud.redis.v1.CloudRedis.GetInstance",
- index=1,
- containing_service=None,
- input_type=_GETINSTANCEREQUEST,
- output_type=_INSTANCE,
- serialized_options=_b(
- "\202\323\344\223\002/\022-/v1/{name=projects/*/locations/*/instances/*}\332A\004name"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="CreateInstance",
- full_name="google.cloud.redis.v1.CloudRedis.CreateInstance",
- index=2,
- containing_service=None,
- input_type=_CREATEINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\0029\"-/v1/{parent=projects/*/locations/*}/instances:\010instance\332A\033parent,instance_id,instance\312AI\n\036google.cloud.redis.v1.Instance\022'google.cloud.redis.v1.OperationMetadata"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="UpdateInstance",
- full_name="google.cloud.redis.v1.CloudRedis.UpdateInstance",
- index=3,
- containing_service=None,
- input_type=_UPDATEINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\002B26/v1/{instance.name=projects/*/locations/*/instances/*}:\010instance\332A\024update_mask,instance\312AI\n\036google.cloud.redis.v1.Instance\022'google.cloud.redis.v1.OperationMetadata"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ImportInstance",
- full_name="google.cloud.redis.v1.CloudRedis.ImportInstance",
- index=4,
- containing_service=None,
- input_type=_IMPORTINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\0029\"4/v1/{name=projects/*/locations/*/instances/*}:import:\001*\332A\021name,input_config\312AI\n\036google.cloud.redis.v1.Instance\022'google.cloud.redis.v1.OperationMetadata"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ExportInstance",
- full_name="google.cloud.redis.v1.CloudRedis.ExportInstance",
- index=5,
- containing_service=None,
- input_type=_EXPORTINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\0029\"4/v1/{name=projects/*/locations/*/instances/*}:export:\001*\332A\022name,output_config\312AI\n\036google.cloud.redis.v1.Instance\022'google.cloud.redis.v1.OperationMetadata"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="FailoverInstance",
- full_name="google.cloud.redis.v1.CloudRedis.FailoverInstance",
- index=6,
- containing_service=None,
- input_type=_FAILOVERINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\002;\"6/v1/{name=projects/*/locations/*/instances/*}:failover:\001*\332A\031name,data_protection_mode\312AI\n\036google.cloud.redis.v1.Instance\022'google.cloud.redis.v1.OperationMetadata"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="DeleteInstance",
- full_name="google.cloud.redis.v1.CloudRedis.DeleteInstance",
- index=7,
- containing_service=None,
- input_type=_DELETEINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\002/*-/v1/{name=projects/*/locations/*/instances/*}\332A\004name\312A@\n\025google.protobuf.Empty\022'google.cloud.redis.v1.OperationMetadata"
- ),
- ),
- ],
-)
-_sym_db.RegisterServiceDescriptor(_CLOUDREDIS)
-
-DESCRIPTOR.services_by_name["CloudRedis"] = _CLOUDREDIS
-
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/redis_v1/proto/cloud_redis_pb2_grpc.py b/google/cloud/redis_v1/proto/cloud_redis_pb2_grpc.py
deleted file mode 100644
index 42829d3..0000000
--- a/google/cloud/redis_v1/proto/cloud_redis_pb2_grpc.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-from google.cloud.redis_v1.proto import (
- cloud_redis_pb2 as google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2,
-)
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-
-
-class CloudRedisStub(object):
- """Configures and manages Cloud Memorystore for Redis instances
-
- Google Cloud Memorystore for Redis v1
-
- The `redis.googleapis.com` service implements the Google Cloud Memorystore
- for Redis API and defines the following resource model for managing Redis
- instances:
- * The service works with a collection of cloud projects, named: `/projects/*`
- * Each project has a collection of available locations, named: `/locations/*`
- * Each location has a collection of Redis instances, named: `/instances/*`
- * As such, Redis instances are resources of the form:
- `/projects/{project_id}/locations/{location_id}/instances/{instance_id}`
-
- Note that location_id must be referring to a GCP `region`; for example:
- * `projects/redpepper-1290/locations/us-central1/instances/my-redis`
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.ListInstances = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/ListInstances",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ListInstancesRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ListInstancesResponse.FromString,
- )
- self.GetInstance = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/GetInstance",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.GetInstanceRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.Instance.FromString,
- )
- self.CreateInstance = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/CreateInstance",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.CreateInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.UpdateInstance = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/UpdateInstance",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.UpdateInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ImportInstance = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/ImportInstance",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ImportInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ExportInstance = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/ExportInstance",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ExportInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.FailoverInstance = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/FailoverInstance",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.FailoverInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.DeleteInstance = channel.unary_unary(
- "/google.cloud.redis.v1.CloudRedis/DeleteInstance",
- request_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.DeleteInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
-
-
-class CloudRedisServicer(object):
- """Configures and manages Cloud Memorystore for Redis instances
-
- Google Cloud Memorystore for Redis v1
-
- The `redis.googleapis.com` service implements the Google Cloud Memorystore
- for Redis API and defines the following resource model for managing Redis
- instances:
- * The service works with a collection of cloud projects, named: `/projects/*`
- * Each project has a collection of available locations, named: `/locations/*`
- * Each location has a collection of Redis instances, named: `/instances/*`
- * As such, Redis instances are resources of the form:
- `/projects/{project_id}/locations/{location_id}/instances/{instance_id}`
-
- Note that location_id must be referring to a GCP `region`; for example:
- * `projects/redpepper-1290/locations/us-central1/instances/my-redis`
- """
-
- def ListInstances(self, request, context):
- """Lists all Redis instances owned by a project in either the specified
- location (region) or all locations.
-
- The location should have the following format:
-
- * `projects/{project_id}/locations/{location_id}`
-
- If `location_id` is specified as `-` (wildcard), then all regions
- available to the project are queried, and the results are aggregated.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def GetInstance(self, request, context):
- """Gets the details of a specific Redis instance.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def CreateInstance(self, request, context):
- """Creates a Redis instance based on the specified tier and memory size.
-
- By default, the instance is accessible from the project's
- [default network](/compute/docs/networks-and-firewalls#networks).
-
- The creation is executed asynchronously and callers may check the returned
- operation to track its progress. Once the operation is completed the Redis
- instance will be fully functional. Completed longrunning.Operation will
- contain the new instance object in the response field.
-
- The returned operation is automatically deleted after a few hours, so there
- is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def UpdateInstance(self, request, context):
- """Updates the metadata and configuration of a specific Redis instance.
-
- Completed longrunning.Operation will contain the new instance object
- in the response field. The returned operation is automatically deleted
- after a few hours, so there is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ImportInstance(self, request, context):
- """Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
-
- Redis may stop serving during this operation. Instance state will be
- IMPORTING for entire operation. When complete, the instance will contain
- only data from the imported file.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ExportInstance(self, request, context):
- """Export Redis instance data into a Redis RDB format file in Cloud Storage.
-
- Redis will continue serving during this operation.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def FailoverInstance(self, request, context):
- """Initiates a failover of the master node to current replica node for a
- specific STANDARD tier Cloud Memorystore for Redis instance.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def DeleteInstance(self, request, context):
- """Deletes a specific Redis instance. Instance stops serving and data is
- deleted.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
-
-def add_CloudRedisServicer_to_server(servicer, server):
- rpc_method_handlers = {
- "ListInstances": grpc.unary_unary_rpc_method_handler(
- servicer.ListInstances,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ListInstancesRequest.FromString,
- response_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ListInstancesResponse.SerializeToString,
- ),
- "GetInstance": grpc.unary_unary_rpc_method_handler(
- servicer.GetInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.GetInstanceRequest.FromString,
- response_serializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.Instance.SerializeToString,
- ),
- "CreateInstance": grpc.unary_unary_rpc_method_handler(
- servicer.CreateInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.CreateInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "UpdateInstance": grpc.unary_unary_rpc_method_handler(
- servicer.UpdateInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.UpdateInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ImportInstance": grpc.unary_unary_rpc_method_handler(
- servicer.ImportInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ImportInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ExportInstance": grpc.unary_unary_rpc_method_handler(
- servicer.ExportInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.ExportInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "FailoverInstance": grpc.unary_unary_rpc_method_handler(
- servicer.FailoverInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.FailoverInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "DeleteInstance": grpc.unary_unary_rpc_method_handler(
- servicer.DeleteInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1_dot_proto_dot_cloud__redis__pb2.DeleteInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- "google.cloud.redis.v1.CloudRedis", rpc_method_handlers
- )
- server.add_generic_rpc_handlers((generic_handler,))
diff --git a/google/cloud/redis_v1/py.typed b/google/cloud/redis_v1/py.typed
new file mode 100644
index 0000000..960151e
--- /dev/null
+++ b/google/cloud/redis_v1/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-redis package uses inline types.
diff --git a/google/cloud/redis.py b/google/cloud/redis_v1/services/__init__.py
similarity index 66%
rename from google/cloud/redis.py
rename to google/cloud/redis_v1/services/__init__.py
index b30f4a9..42ffdf2 100644
--- a/google/cloud/redis.py
+++ b/google/cloud/redis_v1/services/__init__.py
@@ -1,25 +1,16 @@
# -*- coding: utf-8 -*-
-#
+
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
-from __future__ import absolute_import
-
-from google.cloud.redis_v1 import CloudRedisClient
-from google.cloud.redis_v1 import enums
-from google.cloud.redis_v1 import types
-
-
-__all__ = ("enums", "types", "CloudRedisClient")
+#
diff --git a/google/__init__.py b/google/cloud/redis_v1/services/cloud_redis/__init__.py
similarity index 71%
rename from google/__init__.py
rename to google/cloud/redis_v1/services/cloud_redis/__init__.py
index 9a1b64a..e66c1aa 100644
--- a/google/__init__.py
+++ b/google/cloud/redis_v1/services/cloud_redis/__init__.py
@@ -1,24 +1,24 @@
# -*- coding: utf-8 -*-
-#
+
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
-try:
- import pkg_resources
-
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
+from .client import CloudRedisClient
+from .async_client import CloudRedisAsyncClient
- __path__ = pkgutil.extend_path(__path__, __name__)
+__all__ = (
+ "CloudRedisClient",
+ "CloudRedisAsyncClient",
+)
diff --git a/google/cloud/redis_v1/services/cloud_redis/async_client.py b/google/cloud/redis_v1/services/cloud_redis/async_client.py
new file mode 100644
index 0000000..87e7a30
--- /dev/null
+++ b/google/cloud/redis_v1/services/cloud_redis/async_client.py
@@ -0,0 +1,1008 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import builtins
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api_core import operation
+from google.api_core import operation_async
+from google.cloud.redis_v1.services.cloud_redis import pagers
+from google.cloud.redis_v1.types import cloud_redis
+from google.protobuf import empty_pb2 as empty # type: ignore
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport
+from .client import CloudRedisClient
+
+
+class CloudRedisAsyncClient:
+ """Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be referring to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+ """
+
+ _client: CloudRedisClient
+
+ DEFAULT_ENDPOINT = CloudRedisClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = CloudRedisClient.DEFAULT_MTLS_ENDPOINT
+
+ instance_path = staticmethod(CloudRedisClient.instance_path)
+ parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path)
+
+ from_service_account_file = CloudRedisClient.from_service_account_file
+ from_service_account_json = from_service_account_file
+
+ get_transport_class = functools.partial(
+ type(CloudRedisClient).get_transport_class, type(CloudRedisClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, CloudRedisTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the cloud redis client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.CloudRedisTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = CloudRedisClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def list_instances(
+ self,
+ request: cloud_redis.ListInstancesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListInstancesAsyncPager:
+ r"""Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Args:
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The request object. Request for
+ [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListInstancesAsyncPager:
+ Response for
+ [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([parent]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.ListInstancesRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_instances,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListInstancesAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def get_instance(
+ self,
+ request: cloud_redis.GetInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> cloud_redis.Instance:
+ r"""Gets the details of a specific Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.GetInstanceRequest`):
+ The request object. Request for
+ [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.cloud_redis.Instance:
+ A Google Cloud Redis instance.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.GetInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def create_instance(
+ self,
+ request: cloud_redis.CreateInstanceRequest = None,
+ *,
+ parent: str = None,
+ instance_id: str = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.CreateInstanceRequest`):
+ The request object. Request for
+ [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance_id (:class:`str`):
+ Required. The logical name of the Redis instance in the
+ customer project with the following restrictions:
+
+ - Must contain only lowercase letters, numbers, and
+ hyphens.
+ - Must start with a letter.
+ - Must be between 1-40 characters.
+ - Must end with a number or a letter.
+ - Must be unique within the customer project / location
+ This corresponds to the ``instance_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. A Redis [Instance] resource
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([parent, instance_id, instance]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.CreateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if instance_id is not None:
+ request.instance_id = instance_id
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def update_instance(
+ self,
+ request: cloud_redis.UpdateInstanceRequest = None,
+ *,
+ update_mask: field_mask.FieldMask = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.UpdateInstanceRequest`):
+ The request object. Request for
+ [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance].
+ update_mask (:class:`~.field_mask.FieldMask`):
+ Required. Mask of fields to update. At least one path
+ must be supplied in this field. The elements of the
+ repeated paths field may only include these fields from
+ [Instance][google.cloud.redis.v1.Instance]:
+
+ - ``displayName``
+ - ``labels``
+ - ``memorySizeGb``
+ - ``redisConfig``
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. Update description. Only fields specified in
+ update_mask are updated.
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([update_mask, instance]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.UpdateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if update_mask is not None:
+ request.update_mask = update_mask
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.update_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("instance.name", request.instance.name),)
+ ),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def upgrade_instance(
+ self,
+ request: cloud_redis.UpgradeInstanceRequest = None,
+ *,
+ name: str = None,
+ redis_version: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Args:
+ request (:class:`~.cloud_redis.UpgradeInstanceRequest`):
+ The request object. Request for
+ [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ redis_version (:class:`str`):
+ Required. Specifies the target
+ version of Redis software to upgrade to.
+ This corresponds to the ``redis_version`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, redis_version]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.UpgradeInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if redis_version is not None:
+ request.redis_version = redis_version
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.upgrade_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def import_instance(
+ self,
+ request: cloud_redis.ImportInstanceRequest = None,
+ *,
+ name: str = None,
+ input_config: cloud_redis.InputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ImportInstanceRequest`):
+ The request object. Request for
+ [Import][google.cloud.redis.v1.CloudRedis.ImportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (:class:`~.cloud_redis.InputConfig`):
+ Required. Specify data to be
+ imported.
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, input_config]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.ImportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if input_config is not None:
+ request.input_config = input_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.import_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def export_instance(
+ self,
+ request: cloud_redis.ExportInstanceRequest = None,
+ *,
+ name: str = None,
+ output_config: cloud_redis.OutputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ExportInstanceRequest`):
+ The request object. Request for
+ [Export][google.cloud.redis.v1.CloudRedis.ExportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ output_config (:class:`~.cloud_redis.OutputConfig`):
+ Required. Specify data to be
+ exported.
+ This corresponds to the ``output_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, output_config]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.ExportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if output_config is not None:
+ request.output_config = output_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.export_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def failover_instance(
+ self,
+ request: cloud_redis.FailoverInstanceRequest = None,
+ *,
+ name: str = None,
+ data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.FailoverInstanceRequest`):
+ The request object. Request for
+ [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ data_protection_mode (:class:`~.cloud_redis.FailoverInstanceRequest.DataProtectionMode`):
+ Optional. Available data protection modes that the user
+ can choose. If it's unspecified, data protection mode
+ will be LIMITED_DATA_LOSS by default.
+ This corresponds to the ``data_protection_mode`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, data_protection_mode]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.FailoverInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if data_protection_mode is not None:
+ request.data_protection_mode = data_protection_mode
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.failover_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def delete_instance(
+ self,
+ request: cloud_redis.DeleteInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Args:
+ request (:class:`~.cloud_redis.DeleteInstanceRequest`):
+ The request object. Request for
+ [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.empty.Empty``: A generic empty message that
+ you can re-use to avoid defining duplicated empty
+ messages in your APIs. A typical example is to use it as
+ the request or the response type of an API method. For
+ instance:
+
+ ::
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+
+ The JSON representation for ``Empty`` is empty JSON
+ object ``{}``.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.DeleteInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ empty.Empty,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-redis",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("CloudRedisAsyncClient",)
diff --git a/google/cloud/redis_v1/services/cloud_redis/client.py b/google/cloud/redis_v1/services/cloud_redis/client.py
new file mode 100644
index 0000000..b09d1ae
--- /dev/null
+++ b/google/cloud/redis_v1/services/cloud_redis/client.py
@@ -0,0 +1,1185 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import builtins
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api_core import operation
+from google.api_core import operation_async
+from google.cloud.redis_v1.services.cloud_redis import pagers
+from google.cloud.redis_v1.types import cloud_redis
+from google.protobuf import empty_pb2 as empty # type: ignore
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import CloudRedisGrpcTransport
+from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport
+
+
+class CloudRedisClientMeta(type):
+ """Metaclass for the CloudRedis client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]]
+ _transport_registry["grpc"] = CloudRedisGrpcTransport
+ _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport
+
+ def get_transport_class(cls, label: str = None,) -> Type[CloudRedisTransport]:
+ """Return an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class CloudRedisClient(metaclass=CloudRedisClientMeta):
+ """Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be referring to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "redis.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ {@api.name}: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @staticmethod
+ def instance_path(project: str, location: str, instance: str,) -> str:
+ """Return a fully-qualified instance string."""
+ return "projects/{project}/locations/{location}/instances/{instance}".format(
+ project=project, location=location, instance=instance,
+ )
+
+ @staticmethod
+ def parse_instance_path(path: str) -> Dict[str, str]:
+ """Parse a instance path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, CloudRedisTransport] = None,
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the cloud redis client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.CloudRedisTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = ClientOptions.from_dict(client_options)
+ if client_options is None:
+ client_options = ClientOptions.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, CloudRedisTransport):
+ # transport is a CloudRedisTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ ssl_channel_credentials=ssl_credentials,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ )
+
+ def list_instances(
+ self,
+ request: cloud_redis.ListInstancesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListInstancesPager:
+ r"""Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Args:
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The request object. Request for
+ [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListInstancesPager:
+ Response for
+ [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.ListInstancesRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.ListInstancesRequest):
+ request = cloud_redis.ListInstancesRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_instances]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListInstancesPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_instance(
+ self,
+ request: cloud_redis.GetInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> cloud_redis.Instance:
+ r"""Gets the details of a specific Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.GetInstanceRequest`):
+ The request object. Request for
+ [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.cloud_redis.Instance:
+ A Google Cloud Redis instance.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.GetInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.GetInstanceRequest):
+ request = cloud_redis.GetInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def create_instance(
+ self,
+ request: cloud_redis.CreateInstanceRequest = None,
+ *,
+ parent: str = None,
+ instance_id: str = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.CreateInstanceRequest`):
+ The request object. Request for
+ [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance_id (:class:`str`):
+ Required. The logical name of the Redis instance in the
+ customer project with the following restrictions:
+
+ - Must contain only lowercase letters, numbers, and
+ hyphens.
+ - Must start with a letter.
+ - Must be between 1-40 characters.
+ - Must end with a number or a letter.
+ - Must be unique within the customer project / location
+ This corresponds to the ``instance_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. A Redis [Instance] resource
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([parent, instance_id, instance])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.CreateInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.CreateInstanceRequest):
+ request = cloud_redis.CreateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if instance_id is not None:
+ request.instance_id = instance_id
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.create_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def update_instance(
+ self,
+ request: cloud_redis.UpdateInstanceRequest = None,
+ *,
+ update_mask: field_mask.FieldMask = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.UpdateInstanceRequest`):
+ The request object. Request for
+ [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance].
+ update_mask (:class:`~.field_mask.FieldMask`):
+ Required. Mask of fields to update. At least one path
+ must be supplied in this field. The elements of the
+ repeated paths field may only include these fields from
+ [Instance][google.cloud.redis.v1.Instance]:
+
+ - ``displayName``
+ - ``labels``
+ - ``memorySizeGb``
+ - ``redisConfig``
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. Update description. Only fields specified in
+ update_mask are updated.
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([update_mask, instance])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.UpdateInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.UpdateInstanceRequest):
+ request = cloud_redis.UpdateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if update_mask is not None:
+ request.update_mask = update_mask
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.update_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("instance.name", request.instance.name),)
+ ),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def upgrade_instance(
+ self,
+ request: cloud_redis.UpgradeInstanceRequest = None,
+ *,
+ name: str = None,
+ redis_version: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Args:
+ request (:class:`~.cloud_redis.UpgradeInstanceRequest`):
+ The request object. Request for
+ [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ redis_version (:class:`str`):
+ Required. Specifies the target
+ version of Redis software to upgrade to.
+ This corresponds to the ``redis_version`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, redis_version])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.UpgradeInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.UpgradeInstanceRequest):
+ request = cloud_redis.UpgradeInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if redis_version is not None:
+ request.redis_version = redis_version
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.upgrade_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def import_instance(
+ self,
+ request: cloud_redis.ImportInstanceRequest = None,
+ *,
+ name: str = None,
+ input_config: cloud_redis.InputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ImportInstanceRequest`):
+ The request object. Request for
+ [Import][google.cloud.redis.v1.CloudRedis.ImportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (:class:`~.cloud_redis.InputConfig`):
+ Required. Specify data to be
+ imported.
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, input_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.ImportInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.ImportInstanceRequest):
+ request = cloud_redis.ImportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if input_config is not None:
+ request.input_config = input_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.import_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def export_instance(
+ self,
+ request: cloud_redis.ExportInstanceRequest = None,
+ *,
+ name: str = None,
+ output_config: cloud_redis.OutputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ExportInstanceRequest`):
+ The request object. Request for
+ [Export][google.cloud.redis.v1.CloudRedis.ExportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ output_config (:class:`~.cloud_redis.OutputConfig`):
+ Required. Specify data to be
+ exported.
+ This corresponds to the ``output_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, output_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.ExportInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.ExportInstanceRequest):
+ request = cloud_redis.ExportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if output_config is not None:
+ request.output_config = output_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.export_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def failover_instance(
+ self,
+ request: cloud_redis.FailoverInstanceRequest = None,
+ *,
+ name: str = None,
+ data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.FailoverInstanceRequest`):
+ The request object. Request for
+ [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ data_protection_mode (:class:`~.cloud_redis.FailoverInstanceRequest.DataProtectionMode`):
+ Optional. Available data protection modes that the user
+ can choose. If it's unspecified, data protection mode
+ will be LIMITED_DATA_LOSS by default.
+ This corresponds to the ``data_protection_mode`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, data_protection_mode])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.FailoverInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.FailoverInstanceRequest):
+ request = cloud_redis.FailoverInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if data_protection_mode is not None:
+ request.data_protection_mode = data_protection_mode
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.failover_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def delete_instance(
+ self,
+ request: cloud_redis.DeleteInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Args:
+ request (:class:`~.cloud_redis.DeleteInstanceRequest`):
+ The request object. Request for
+ [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.empty.Empty``: A generic empty message that
+ you can re-use to avoid defining duplicated empty
+ messages in your APIs. A typical example is to use it as
+ the request or the response type of an API method. For
+ instance:
+
+ ::
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+
+ The JSON representation for ``Empty`` is empty JSON
+ object ``{}``.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.DeleteInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.DeleteInstanceRequest):
+ request = cloud_redis.DeleteInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ empty.Empty,
+ metadata_type=cloud_redis.OperationMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-redis",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("CloudRedisClient",)
diff --git a/google/cloud/redis_v1/services/cloud_redis/pagers.py b/google/cloud/redis_v1/services/cloud_redis/pagers.py
new file mode 100644
index 0000000..77af010
--- /dev/null
+++ b/google/cloud/redis_v1/services/cloud_redis/pagers.py
@@ -0,0 +1,148 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+
+from google.cloud.redis_v1.types import cloud_redis
+
+
+class ListInstancesPager:
+ """A pager for iterating through ``list_instances`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.cloud_redis.ListInstancesResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``instances`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListInstances`` requests and continue to iterate
+ through the ``instances`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.cloud_redis.ListInstancesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., cloud_redis.ListInstancesResponse],
+ request: cloud_redis.ListInstancesRequest,
+ response: cloud_redis.ListInstancesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The initial request object.
+ response (:class:`~.cloud_redis.ListInstancesResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = cloud_redis.ListInstancesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[cloud_redis.ListInstancesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[cloud_redis.Instance]:
+ for page in self.pages:
+ yield from page.instances
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListInstancesAsyncPager:
+ """A pager for iterating through ``list_instances`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.cloud_redis.ListInstancesResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``instances`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListInstances`` requests and continue to iterate
+ through the ``instances`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.cloud_redis.ListInstancesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]],
+ request: cloud_redis.ListInstancesRequest,
+ response: cloud_redis.ListInstancesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The initial request object.
+ response (:class:`~.cloud_redis.ListInstancesResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = cloud_redis.ListInstancesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[cloud_redis.ListInstancesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[cloud_redis.Instance]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.instances:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py
new file mode 100644
index 0000000..3b0088e
--- /dev/null
+++ b/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import CloudRedisTransport
+from .grpc import CloudRedisGrpcTransport
+from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]]
+_transport_registry["grpc"] = CloudRedisGrpcTransport
+_transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport
+
+
+__all__ = (
+ "CloudRedisTransport",
+ "CloudRedisGrpcTransport",
+ "CloudRedisGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/google/cloud/redis_v1/services/cloud_redis/transports/base.py
new file mode 100644
index 0000000..f0fc19e
--- /dev/null
+++ b/google/cloud/redis_v1/services/cloud_redis/transports/base.py
@@ -0,0 +1,228 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import abc
+import typing
+import pkg_resources
+
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google.auth import credentials # type: ignore
+
+from google.cloud.redis_v1.types import cloud_redis
+from google.longrunning import operations_pb2 as operations # type: ignore
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-redis",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class CloudRedisTransport(abc.ABC):
+ """Abstract transport class for CloudRedis."""
+
+ AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
+
+ def __init__(
+ self,
+ *,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ # Lifted into its own function so it can be stubbed out during tests.
+ self._prep_wrapped_messages(client_info)
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_instances: gapic_v1.method.wrap_method(
+ self.list_instances, default_timeout=600.0, client_info=client_info,
+ ),
+ self.get_instance: gapic_v1.method.wrap_method(
+ self.get_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.create_instance: gapic_v1.method.wrap_method(
+ self.create_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.update_instance: gapic_v1.method.wrap_method(
+ self.update_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.upgrade_instance: gapic_v1.method.wrap_method(
+ self.upgrade_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.import_instance: gapic_v1.method.wrap_method(
+ self.import_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.export_instance: gapic_v1.method.wrap_method(
+ self.export_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.failover_instance: gapic_v1.method.wrap_method(
+ self.failover_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.delete_instance: gapic_v1.method.wrap_method(
+ self.delete_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ }
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsClient:
+ """Return the client designed to process long-running operations."""
+ raise NotImplementedError()
+
+ @property
+ def list_instances(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.ListInstancesRequest],
+ typing.Union[
+ cloud_redis.ListInstancesResponse,
+ typing.Awaitable[cloud_redis.ListInstancesResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.GetInstanceRequest],
+ typing.Union[cloud_redis.Instance, typing.Awaitable[cloud_redis.Instance]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def create_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.CreateInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def update_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.UpdateInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def upgrade_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.UpgradeInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def import_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.ImportInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def export_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.ExportInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def failover_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.FailoverInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.DeleteInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("CloudRedisTransport",)
diff --git a/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py
new file mode 100644
index 0000000..b54f906
--- /dev/null
+++ b/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py
@@ -0,0 +1,550 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import grpc_helpers # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+
+from google.cloud.redis_v1.types import cloud_redis
+from google.longrunning import operations_pb2 as operations # type: ignore
+
+from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+
+
+class CloudRedisGrpcTransport(CloudRedisTransport):
+ """gRPC backend transport for CloudRedis.
+
+ Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be referring to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _stubs: Dict[str, Callable]
+
+ def __init__(
+ self,
+ *,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ channel (Optional[grpc.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ self._stubs = {} # type: Dict[str, Callable]
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> grpc.Channel:
+ """Create and return a gRPC channel object.
+ Args:
+ address (Optionsl[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ @property
+ def grpc_channel(self) -> grpc.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if "operations_client" not in self.__dict__:
+ self.__dict__["operations_client"] = operations_v1.OperationsClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self.__dict__["operations_client"]
+
+ @property
+ def list_instances(
+ self,
+ ) -> Callable[
+ [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse
+ ]:
+ r"""Return a callable for the list instances method over gRPC.
+
+ Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Returns:
+ Callable[[~.ListInstancesRequest],
+ ~.ListInstancesResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_instances" not in self._stubs:
+ self._stubs["list_instances"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/ListInstances",
+ request_serializer=cloud_redis.ListInstancesRequest.serialize,
+ response_deserializer=cloud_redis.ListInstancesResponse.deserialize,
+ )
+ return self._stubs["list_instances"]
+
+ @property
+ def get_instance(
+ self,
+ ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]:
+ r"""Return a callable for the get instance method over gRPC.
+
+ Gets the details of a specific Redis instance.
+
+ Returns:
+ Callable[[~.GetInstanceRequest],
+ ~.Instance]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_instance" not in self._stubs:
+ self._stubs["get_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/GetInstance",
+ request_serializer=cloud_redis.GetInstanceRequest.serialize,
+ response_deserializer=cloud_redis.Instance.deserialize,
+ )
+ return self._stubs["get_instance"]
+
+ @property
+ def create_instance(
+ self,
+ ) -> Callable[[cloud_redis.CreateInstanceRequest], operations.Operation]:
+ r"""Return a callable for the create instance method over gRPC.
+
+ Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.CreateInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_instance" not in self._stubs:
+ self._stubs["create_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/CreateInstance",
+ request_serializer=cloud_redis.CreateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["create_instance"]
+
+ @property
+ def update_instance(
+ self,
+ ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations.Operation]:
+ r"""Return a callable for the update instance method over gRPC.
+
+ Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.UpdateInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_instance" not in self._stubs:
+ self._stubs["update_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/UpdateInstance",
+ request_serializer=cloud_redis.UpdateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["update_instance"]
+
+ @property
+ def upgrade_instance(
+ self,
+ ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations.Operation]:
+ r"""Return a callable for the upgrade instance method over gRPC.
+
+ Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Returns:
+ Callable[[~.UpgradeInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "upgrade_instance" not in self._stubs:
+ self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/UpgradeInstance",
+ request_serializer=cloud_redis.UpgradeInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["upgrade_instance"]
+
+ @property
+ def import_instance(
+ self,
+ ) -> Callable[[cloud_redis.ImportInstanceRequest], operations.Operation]:
+ r"""Return a callable for the import instance method over gRPC.
+
+ Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ImportInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_instance" not in self._stubs:
+ self._stubs["import_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/ImportInstance",
+ request_serializer=cloud_redis.ImportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_instance"]
+
+ @property
+ def export_instance(
+ self,
+ ) -> Callable[[cloud_redis.ExportInstanceRequest], operations.Operation]:
+ r"""Return a callable for the export instance method over gRPC.
+
+ Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ExportInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "export_instance" not in self._stubs:
+ self._stubs["export_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/ExportInstance",
+ request_serializer=cloud_redis.ExportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["export_instance"]
+
+ @property
+ def failover_instance(
+ self,
+ ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations.Operation]:
+ r"""Return a callable for the failover instance method over gRPC.
+
+ Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Returns:
+ Callable[[~.FailoverInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "failover_instance" not in self._stubs:
+ self._stubs["failover_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/FailoverInstance",
+ request_serializer=cloud_redis.FailoverInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["failover_instance"]
+
+ @property
+ def delete_instance(
+ self,
+ ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations.Operation]:
+ r"""Return a callable for the delete instance method over gRPC.
+
+ Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Returns:
+ Callable[[~.DeleteInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_instance" not in self._stubs:
+ self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/DeleteInstance",
+ request_serializer=cloud_redis.DeleteInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["delete_instance"]
+
+
+__all__ = ("CloudRedisGrpcTransport",)
diff --git a/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py
new file mode 100644
index 0000000..30cdc40
--- /dev/null
+++ b/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py
@@ -0,0 +1,554 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.redis_v1.types import cloud_redis
+from google.longrunning import operations_pb2 as operations # type: ignore
+
+from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+from .grpc import CloudRedisGrpcTransport
+
+
+class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport):
+ """gRPC AsyncIO backend transport for CloudRedis.
+
+ Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be referring to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ address (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ self._stubs = {}
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsAsyncClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if "operations_client" not in self.__dict__:
+ self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self.__dict__["operations_client"]
+
+ @property
+ def list_instances(
+ self,
+ ) -> Callable[
+ [cloud_redis.ListInstancesRequest], Awaitable[cloud_redis.ListInstancesResponse]
+ ]:
+ r"""Return a callable for the list instances method over gRPC.
+
+ Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Returns:
+ Callable[[~.ListInstancesRequest],
+ Awaitable[~.ListInstancesResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_instances" not in self._stubs:
+ self._stubs["list_instances"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/ListInstances",
+ request_serializer=cloud_redis.ListInstancesRequest.serialize,
+ response_deserializer=cloud_redis.ListInstancesResponse.deserialize,
+ )
+ return self._stubs["list_instances"]
+
+ @property
+ def get_instance(
+ self,
+ ) -> Callable[[cloud_redis.GetInstanceRequest], Awaitable[cloud_redis.Instance]]:
+ r"""Return a callable for the get instance method over gRPC.
+
+ Gets the details of a specific Redis instance.
+
+ Returns:
+ Callable[[~.GetInstanceRequest],
+ Awaitable[~.Instance]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_instance" not in self._stubs:
+ self._stubs["get_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/GetInstance",
+ request_serializer=cloud_redis.GetInstanceRequest.serialize,
+ response_deserializer=cloud_redis.Instance.deserialize,
+ )
+ return self._stubs["get_instance"]
+
+ @property
+ def create_instance(
+ self,
+ ) -> Callable[[cloud_redis.CreateInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the create instance method over gRPC.
+
+ Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.CreateInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_instance" not in self._stubs:
+ self._stubs["create_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/CreateInstance",
+ request_serializer=cloud_redis.CreateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["create_instance"]
+
+ @property
+ def update_instance(
+ self,
+ ) -> Callable[[cloud_redis.UpdateInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the update instance method over gRPC.
+
+ Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.UpdateInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_instance" not in self._stubs:
+ self._stubs["update_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/UpdateInstance",
+ request_serializer=cloud_redis.UpdateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["update_instance"]
+
+ @property
+ def upgrade_instance(
+ self,
+ ) -> Callable[
+ [cloud_redis.UpgradeInstanceRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the upgrade instance method over gRPC.
+
+ Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Returns:
+ Callable[[~.UpgradeInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "upgrade_instance" not in self._stubs:
+ self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/UpgradeInstance",
+ request_serializer=cloud_redis.UpgradeInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["upgrade_instance"]
+
+ @property
+ def import_instance(
+ self,
+ ) -> Callable[[cloud_redis.ImportInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the import instance method over gRPC.
+
+ Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ImportInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_instance" not in self._stubs:
+ self._stubs["import_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/ImportInstance",
+ request_serializer=cloud_redis.ImportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_instance"]
+
+ @property
+ def export_instance(
+ self,
+ ) -> Callable[[cloud_redis.ExportInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the export instance method over gRPC.
+
+ Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ExportInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "export_instance" not in self._stubs:
+ self._stubs["export_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/ExportInstance",
+ request_serializer=cloud_redis.ExportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["export_instance"]
+
+ @property
+ def failover_instance(
+ self,
+ ) -> Callable[
+ [cloud_redis.FailoverInstanceRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the failover instance method over gRPC.
+
+ Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Returns:
+ Callable[[~.FailoverInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "failover_instance" not in self._stubs:
+ self._stubs["failover_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/FailoverInstance",
+ request_serializer=cloud_redis.FailoverInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["failover_instance"]
+
+ @property
+ def delete_instance(
+ self,
+ ) -> Callable[[cloud_redis.DeleteInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the delete instance method over gRPC.
+
+ Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Returns:
+ Callable[[~.DeleteInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_instance" not in self._stubs:
+ self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1.CloudRedis/DeleteInstance",
+ request_serializer=cloud_redis.DeleteInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["delete_instance"]
+
+
+__all__ = ("CloudRedisGrpcAsyncIOTransport",)
diff --git a/google/cloud/redis_v1/types.py b/google/cloud/redis_v1/types.py
deleted file mode 100644
index a5a0b20..0000000
--- a/google/cloud/redis_v1/types.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from __future__ import absolute_import
-import sys
-
-from google.api_core.protobuf_helpers import get_messages
-
-from google.cloud.redis_v1.proto import cloud_redis_pb2
-from google.longrunning import operations_pb2
-from google.protobuf import any_pb2
-from google.protobuf import field_mask_pb2
-from google.protobuf import timestamp_pb2
-from google.rpc import status_pb2
-
-
-_shared_modules = [operations_pb2, any_pb2, field_mask_pb2, timestamp_pb2, status_pb2]
-
-_local_modules = [cloud_redis_pb2]
-
-names = []
-
-for module in _shared_modules: # pragma: NO COVER
- for name, message in get_messages(module).items():
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-for module in _local_modules:
- for name, message in get_messages(module).items():
- message.__module__ = "google.cloud.redis_v1.types"
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-
-
-__all__ = tuple(sorted(names))
diff --git a/google/cloud/redis_v1/types/__init__.py b/google/cloud/redis_v1/types/__init__.py
new file mode 100644
index 0000000..fb1f7e5
--- /dev/null
+++ b/google/cloud/redis_v1/types/__init__.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .cloud_redis import (
+ Instance,
+ ListInstancesRequest,
+ ListInstancesResponse,
+ GetInstanceRequest,
+ CreateInstanceRequest,
+ UpdateInstanceRequest,
+ UpgradeInstanceRequest,
+ DeleteInstanceRequest,
+ GcsSource,
+ InputConfig,
+ ImportInstanceRequest,
+ GcsDestination,
+ OutputConfig,
+ ExportInstanceRequest,
+ FailoverInstanceRequest,
+ OperationMetadata,
+ LocationMetadata,
+ ZoneMetadata,
+)
+
+
+__all__ = (
+ "Instance",
+ "ListInstancesRequest",
+ "ListInstancesResponse",
+ "GetInstanceRequest",
+ "CreateInstanceRequest",
+ "UpdateInstanceRequest",
+ "UpgradeInstanceRequest",
+ "DeleteInstanceRequest",
+ "GcsSource",
+ "InputConfig",
+ "ImportInstanceRequest",
+ "GcsDestination",
+ "OutputConfig",
+ "ExportInstanceRequest",
+ "FailoverInstanceRequest",
+ "OperationMetadata",
+ "LocationMetadata",
+ "ZoneMetadata",
+)
diff --git a/google/cloud/redis_v1/types/cloud_redis.py b/google/cloud/redis_v1/types/cloud_redis.py
new file mode 100644
index 0000000..a95e46b
--- /dev/null
+++ b/google/cloud/redis_v1/types/cloud_redis.py
@@ -0,0 +1,583 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.cloud.redis.v1",
+ manifest={
+ "Instance",
+ "ListInstancesRequest",
+ "ListInstancesResponse",
+ "GetInstanceRequest",
+ "CreateInstanceRequest",
+ "UpdateInstanceRequest",
+ "UpgradeInstanceRequest",
+ "DeleteInstanceRequest",
+ "GcsSource",
+ "InputConfig",
+ "ImportInstanceRequest",
+ "GcsDestination",
+ "OutputConfig",
+ "ExportInstanceRequest",
+ "FailoverInstanceRequest",
+ "OperationMetadata",
+ "LocationMetadata",
+ "ZoneMetadata",
+ },
+)
+
+
+class Instance(proto.Message):
+ r"""A Google Cloud Redis instance.
+
+ Attributes:
+ name (str):
+ Required. Unique name of the resource in this scope
+ including project and location using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note: Redis instances are managed and addressed at regional
+ level so location_id here refers to a GCP region; however,
+ users may choose which specific zone (or collection of zones
+ for cross-zone instances) an instance should be provisioned
+ in. Refer to
+ [location_id][google.cloud.redis.v1.Instance.location_id]
+ and
+ [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id]
+ fields for more details.
+ display_name (str):
+ An arbitrary and optional user-provided name
+ for the instance.
+ labels (Sequence[~.cloud_redis.Instance.LabelsEntry]):
+ Resource labels to represent user provided
+ metadata
+ location_id (str):
+ Optional. The zone where the instance will be provisioned.
+ If not provided, the service will choose a zone for the
+ instance. For STANDARD_HA tier, instances will be created
+ across two zones for protection against zonal failures. If
+ [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id]
+ is also provided, it must be different from
+ [location_id][google.cloud.redis.v1.Instance.location_id].
+ alternative_location_id (str):
+ Optional. Only applicable to STANDARD_HA tier which protects
+ the instance against zonal failures by provisioning it
+ across two zones. If provided, it must be a different zone
+ from the one provided in
+ [location_id][google.cloud.redis.v1.Instance.location_id].
+ redis_version (str):
+ Optional. The version of Redis software. If not provided,
+ latest supported version will be used. Currently, the
+ supported values are:
+
+ - ``REDIS_3_2`` for Redis 3.2 compatibility
+ - ``REDIS_4_0`` for Redis 4.0 compatibility (default)
+ - ``REDIS_5_0`` for Redis 5.0 compatibility
+ reserved_ip_range (str):
+ Optional. The CIDR range of internal
+ addresses that are reserved for this instance.
+ If not provided, the service will choose an
+ unused /29 block, for example, 10.0.0.0/29 or
+ 192.168.0.0/29. Ranges must be unique and non-
+ overlapping with existing subnets in an
+ authorized network.
+ host (str):
+ Output only. Hostname or IP address of the
+ exposed Redis endpoint used by clients to
+ connect to the service.
+ port (int):
+ Output only. The port number of the exposed
+ Redis endpoint.
+ current_location_id (str):
+ Output only. The current zone where the Redis endpoint is
+ placed. For Basic Tier instances, this will always be the
+ same as the
+ [location_id][google.cloud.redis.v1.Instance.location_id]
+ provided by the user at creation time. For Standard Tier
+ instances, this can be either
+ [location_id][google.cloud.redis.v1.Instance.location_id] or
+ [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id]
+ and can change after a failover event.
+ create_time (~.timestamp.Timestamp):
+ Output only. The time the instance was
+ created.
+ state (~.cloud_redis.Instance.State):
+ Output only. The current state of this
+ instance.
+ status_message (str):
+ Output only. Additional information about the
+ current status of this instance, if available.
+ redis_configs (Sequence[~.cloud_redis.Instance.RedisConfigsEntry]):
+ Optional. Redis configuration parameters, according to
+ http://redis.io/topics/config. Currently, the only supported
+ parameters are:
+
+ Redis version 3.2 and newer:
+
+ - maxmemory-policy
+ - notify-keyspace-events
+
+ Redis version 4.0 and newer:
+
+ - activedefrag
+ - lfu-decay-time
+ - lfu-log-factor
+ - maxmemory-gb
+
+ Redis version 5.0 and newer:
+
+ - stream-node-max-bytes
+ - stream-node-max-entries
+ tier (~.cloud_redis.Instance.Tier):
+ Required. The service tier of the instance.
+ memory_size_gb (int):
+ Required. Redis memory size in GiB.
+ authorized_network (str):
+ Optional. The full name of the Google Compute Engine
+ `network `__ to which
+ the instance is connected. If left unspecified, the
+ ``default`` network will be used.
+ persistence_iam_identity (str):
+ Output only. Cloud IAM identity used by import / export
+ operations to transfer data to/from Cloud Storage. Format is
+ "serviceAccount:". The value may
+ change over time for a given instance so should be checked
+ before each import/export operation.
+ connect_mode (~.cloud_redis.Instance.ConnectMode):
+ Optional. The network connect mode of the Redis instance. If
+ not provided, the connect mode defaults to DIRECT_PEERING.
+ """
+
+ class State(proto.Enum):
+ r"""Represents the different states of a Redis instance."""
+ STATE_UNSPECIFIED = 0
+ CREATING = 1
+ READY = 2
+ UPDATING = 3
+ DELETING = 4
+ REPAIRING = 5
+ MAINTENANCE = 6
+ IMPORTING = 8
+ FAILING_OVER = 9
+
+ class Tier(proto.Enum):
+ r"""Available service tiers to choose from"""
+ TIER_UNSPECIFIED = 0
+ BASIC = 1
+ STANDARD_HA = 3
+
+ class ConnectMode(proto.Enum):
+ r"""Available connection modes."""
+ CONNECT_MODE_UNSPECIFIED = 0
+ DIRECT_PEERING = 1
+ PRIVATE_SERVICE_ACCESS = 2
+
+ name = proto.Field(proto.STRING, number=1)
+
+ display_name = proto.Field(proto.STRING, number=2)
+
+ labels = proto.MapField(proto.STRING, proto.STRING, number=3)
+
+ location_id = proto.Field(proto.STRING, number=4)
+
+ alternative_location_id = proto.Field(proto.STRING, number=5)
+
+ redis_version = proto.Field(proto.STRING, number=7)
+
+ reserved_ip_range = proto.Field(proto.STRING, number=9)
+
+ host = proto.Field(proto.STRING, number=10)
+
+ port = proto.Field(proto.INT32, number=11)
+
+ current_location_id = proto.Field(proto.STRING, number=12)
+
+ create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,)
+
+ state = proto.Field(proto.ENUM, number=14, enum=State,)
+
+ status_message = proto.Field(proto.STRING, number=15)
+
+ redis_configs = proto.MapField(proto.STRING, proto.STRING, number=16)
+
+ tier = proto.Field(proto.ENUM, number=17, enum=Tier,)
+
+ memory_size_gb = proto.Field(proto.INT32, number=18)
+
+ authorized_network = proto.Field(proto.STRING, number=20)
+
+ persistence_iam_identity = proto.Field(proto.STRING, number=21)
+
+ connect_mode = proto.Field(proto.ENUM, number=22, enum=ConnectMode,)
+
+
+class ListInstancesRequest(proto.Message):
+ r"""Request for
+ [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
+
+ Attributes:
+ parent (str):
+ Required. The resource name of the instance location using
+ the form: ``projects/{project_id}/locations/{location_id}``
+ where ``location_id`` refers to a GCP region.
+ page_size (int):
+ The maximum number of items to return.
+
+ If not specified, a default value of 1000 will be used by
+ the service. Regardless of the page_size value, the response
+ may include a partial list and a caller should only rely on
+ response's
+ [``next_page_token``][google.cloud.redis.v1.ListInstancesResponse.next_page_token]
+ to determine if there are more instances left to be queried.
+ page_token (str):
+ The ``next_page_token`` value returned from a previous
+ [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]
+ request, if any.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ page_size = proto.Field(proto.INT32, number=2)
+
+ page_token = proto.Field(proto.STRING, number=3)
+
+
+class ListInstancesResponse(proto.Message):
+ r"""Response for
+ [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances].
+
+ Attributes:
+ instances (Sequence[~.cloud_redis.Instance]):
+ A list of Redis instances in the project in the specified
+ location, or across all locations.
+
+ If the ``location_id`` in the parent field of the request is
+ "-", all regions available to the project are queried, and
+ the results aggregated. If in such an aggregated query a
+ location is unavailable, a dummy Redis entry is included in
+ the response with the ``name`` field set to a value of the
+ form
+ ``projects/{project_id}/locations/{location_id}/instances/``-
+ and the ``status`` field set to ERROR and ``status_message``
+ field set to "location not available for ListInstances".
+ next_page_token (str):
+ Token to retrieve the next page of results,
+ or empty if there are no more results in the
+ list.
+ unreachable (Sequence[str]):
+ Locations that could not be reached.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ instances = proto.RepeatedField(proto.MESSAGE, number=1, message=Instance,)
+
+ next_page_token = proto.Field(proto.STRING, number=2)
+
+ unreachable = proto.RepeatedField(proto.STRING, number=3)
+
+
+class GetInstanceRequest(proto.Message):
+ r"""Request for
+ [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class CreateInstanceRequest(proto.Message):
+ r"""Request for
+ [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance].
+
+ Attributes:
+ parent (str):
+ Required. The resource name of the instance location using
+ the form: ``projects/{project_id}/locations/{location_id}``
+ where ``location_id`` refers to a GCP region.
+ instance_id (str):
+ Required. The logical name of the Redis instance in the
+ customer project with the following restrictions:
+
+ - Must contain only lowercase letters, numbers, and
+ hyphens.
+ - Must start with a letter.
+ - Must be between 1-40 characters.
+ - Must end with a number or a letter.
+ - Must be unique within the customer project / location
+ instance (~.cloud_redis.Instance):
+ Required. A Redis [Instance] resource
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ instance_id = proto.Field(proto.STRING, number=2)
+
+ instance = proto.Field(proto.MESSAGE, number=3, message=Instance,)
+
+
+class UpdateInstanceRequest(proto.Message):
+ r"""Request for
+ [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance].
+
+ Attributes:
+ update_mask (~.field_mask.FieldMask):
+ Required. Mask of fields to update. At least one path must
+ be supplied in this field. The elements of the repeated
+ paths field may only include these fields from
+ [Instance][google.cloud.redis.v1.Instance]:
+
+ - ``displayName``
+ - ``labels``
+ - ``memorySizeGb``
+ - ``redisConfig``
+ instance (~.cloud_redis.Instance):
+ Required. Update description. Only fields specified in
+ update_mask are updated.
+ """
+
+ update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,)
+
+ instance = proto.Field(proto.MESSAGE, number=2, message=Instance,)
+
+
+class UpgradeInstanceRequest(proto.Message):
+ r"""Request for
+ [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ redis_version (str):
+ Required. Specifies the target version of
+ Redis software to upgrade to.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ redis_version = proto.Field(proto.STRING, number=2)
+
+
+class DeleteInstanceRequest(proto.Message):
+ r"""Request for
+ [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class GcsSource(proto.Message):
+ r"""The Cloud Storage location for the input content
+
+ Attributes:
+ uri (str):
+ Required. Source data URI. (e.g.
+ 'gs://my_bucket/my_object').
+ """
+
+ uri = proto.Field(proto.STRING, number=1)
+
+
+class InputConfig(proto.Message):
+ r"""The input content
+
+ Attributes:
+ gcs_source (~.cloud_redis.GcsSource):
+ Google Cloud Storage location where input
+ content is located.
+ """
+
+ gcs_source = proto.Field(
+ proto.MESSAGE, number=1, oneof="source", message=GcsSource,
+ )
+
+
+class ImportInstanceRequest(proto.Message):
+ r"""Request for
+ [Import][google.cloud.redis.v1.CloudRedis.ImportInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ input_config (~.cloud_redis.InputConfig):
+ Required. Specify data to be imported.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ input_config = proto.Field(proto.MESSAGE, number=3, message=InputConfig,)
+
+
+class GcsDestination(proto.Message):
+ r"""The Cloud Storage location for the output content
+
+ Attributes:
+ uri (str):
+ Required. Data destination URI (e.g.
+ 'gs://my_bucket/my_object'). Existing files will be
+ overwritten.
+ """
+
+ uri = proto.Field(proto.STRING, number=1)
+
+
+class OutputConfig(proto.Message):
+ r"""The output content
+
+ Attributes:
+ gcs_destination (~.cloud_redis.GcsDestination):
+ Google Cloud Storage destination for output
+ content.
+ """
+
+ gcs_destination = proto.Field(
+ proto.MESSAGE, number=1, oneof="destination", message=GcsDestination,
+ )
+
+
+class ExportInstanceRequest(proto.Message):
+ r"""Request for
+ [Export][google.cloud.redis.v1.CloudRedis.ExportInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ output_config (~.cloud_redis.OutputConfig):
+ Required. Specify data to be exported.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ output_config = proto.Field(proto.MESSAGE, number=3, message=OutputConfig,)
+
+
+class FailoverInstanceRequest(proto.Message):
+ r"""Request for
+ [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ data_protection_mode (~.cloud_redis.FailoverInstanceRequest.DataProtectionMode):
+ Optional. Available data protection modes that the user can
+ choose. If it's unspecified, data protection mode will be
+ LIMITED_DATA_LOSS by default.
+ """
+
+ class DataProtectionMode(proto.Enum):
+ r"""Specifies different modes of operation in relation to the
+ data retention.
+ """
+ DATA_PROTECTION_MODE_UNSPECIFIED = 0
+ LIMITED_DATA_LOSS = 1
+ FORCE_DATA_LOSS = 2
+
+ name = proto.Field(proto.STRING, number=1)
+
+ data_protection_mode = proto.Field(proto.ENUM, number=2, enum=DataProtectionMode,)
+
+
+class OperationMetadata(proto.Message):
+ r"""Represents the v1 metadata of the long-running operation.
+
+ Attributes:
+ create_time (~.timestamp.Timestamp):
+ Creation timestamp.
+ end_time (~.timestamp.Timestamp):
+ End timestamp.
+ target (str):
+ Operation target.
+ verb (str):
+ Operation verb.
+ status_detail (str):
+ Operation status details.
+ cancel_requested (bool):
+ Specifies if cancellation was requested for
+ the operation.
+ api_version (str):
+ API version.
+ """
+
+ create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
+
+ end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+ target = proto.Field(proto.STRING, number=3)
+
+ verb = proto.Field(proto.STRING, number=4)
+
+ status_detail = proto.Field(proto.STRING, number=5)
+
+ cancel_requested = proto.Field(proto.BOOL, number=6)
+
+ api_version = proto.Field(proto.STRING, number=7)
+
+
+class LocationMetadata(proto.Message):
+ r"""This location metadata represents additional configuration options
+ for a given location where a Redis instance may be created. All
+ fields are output only. It is returned as content of the
+ ``google.cloud.location.Location.metadata`` field.
+
+ Attributes:
+ available_zones (Sequence[~.cloud_redis.LocationMetadata.AvailableZonesEntry]):
+ Output only. The set of available zones in the location. The
+ map is keyed by the lowercase ID of each zone, as defined by
+ GCE. These keys can be specified in ``location_id`` or
+ ``alternative_location_id`` fields when creating a Redis
+ instance.
+ """
+
+ available_zones = proto.MapField(
+ proto.STRING, proto.MESSAGE, number=1, message="ZoneMetadata",
+ )
+
+
+class ZoneMetadata(proto.Message):
+ r"""Defines specific information for a particular zone. Currently
+ empty and reserved for future use only.
+ """
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/redis_v1beta1/__init__.py b/google/cloud/redis_v1beta1/__init__.py
index 6fb67cb..ab39293 100644
--- a/google/cloud/redis_v1beta1/__init__.py
+++ b/google/cloud/redis_v1beta1/__init__.py
@@ -1,41 +1,57 @@
# -*- coding: utf-8 -*-
-#
+
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
-
-from __future__ import absolute_import
-import sys
-import warnings
-
-from google.cloud.redis_v1beta1 import types
-from google.cloud.redis_v1beta1.gapic import cloud_redis_client
-from google.cloud.redis_v1beta1.gapic import enums
-
-
-if sys.version_info[:2] == (2, 7):
- message = (
- "A future version of this library will drop support for Python 2.7."
- "More details about Python 2 support for Google Cloud Client Libraries"
- "can be found at https://cloud.google.com/python/docs/python2-sunset/"
- )
- warnings.warn(message, DeprecationWarning)
-
-
-class CloudRedisClient(cloud_redis_client.CloudRedisClient):
- __doc__ = cloud_redis_client.CloudRedisClient.__doc__
- enums = enums
-
-
-__all__ = ("enums", "types", "CloudRedisClient")
+from .services.cloud_redis import CloudRedisClient
+from .types.cloud_redis import CreateInstanceRequest
+from .types.cloud_redis import DeleteInstanceRequest
+from .types.cloud_redis import ExportInstanceRequest
+from .types.cloud_redis import FailoverInstanceRequest
+from .types.cloud_redis import GcsDestination
+from .types.cloud_redis import GcsSource
+from .types.cloud_redis import GetInstanceRequest
+from .types.cloud_redis import ImportInstanceRequest
+from .types.cloud_redis import InputConfig
+from .types.cloud_redis import Instance
+from .types.cloud_redis import ListInstancesRequest
+from .types.cloud_redis import ListInstancesResponse
+from .types.cloud_redis import LocationMetadata
+from .types.cloud_redis import OutputConfig
+from .types.cloud_redis import UpdateInstanceRequest
+from .types.cloud_redis import UpgradeInstanceRequest
+from .types.cloud_redis import ZoneMetadata
+
+
+__all__ = (
+ "CreateInstanceRequest",
+ "DeleteInstanceRequest",
+ "ExportInstanceRequest",
+ "FailoverInstanceRequest",
+ "GcsDestination",
+ "GcsSource",
+ "GetInstanceRequest",
+ "ImportInstanceRequest",
+ "InputConfig",
+ "Instance",
+ "ListInstancesRequest",
+ "ListInstancesResponse",
+ "LocationMetadata",
+ "OutputConfig",
+ "UpdateInstanceRequest",
+ "UpgradeInstanceRequest",
+ "ZoneMetadata",
+ "CloudRedisClient",
+)
diff --git a/google/cloud/redis_v1beta1/gapic/__init__.py b/google/cloud/redis_v1beta1/gapic/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py b/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py
deleted file mode 100644
index f132abb..0000000
--- a/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py
+++ /dev/null
@@ -1,1128 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Accesses the google.cloud.redis.v1beta1 CloudRedis API."""
-
-import functools
-import pkg_resources
-import warnings
-
-from google.oauth2 import service_account
-import google.api_core.client_options
-import google.api_core.gapic_v1.client_info
-import google.api_core.gapic_v1.config
-import google.api_core.gapic_v1.method
-import google.api_core.gapic_v1.routing_header
-import google.api_core.grpc_helpers
-import google.api_core.operation
-import google.api_core.operations_v1
-import google.api_core.page_iterator
-import google.api_core.path_template
-import grpc
-
-from google.cloud.redis_v1beta1.gapic import cloud_redis_client_config
-from google.cloud.redis_v1beta1.gapic import enums
-from google.cloud.redis_v1beta1.gapic.transports import cloud_redis_grpc_transport
-from google.cloud.redis_v1beta1.proto import cloud_redis_pb2
-from google.cloud.redis_v1beta1.proto import cloud_redis_pb2_grpc
-from google.longrunning import operations_pb2
-from google.protobuf import any_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import field_mask_pb2
-
-
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-redis").version
-
-
-class CloudRedisClient(object):
- """
- Configures and manages Cloud Memorystore for Redis instances
-
- Google Cloud Memorystore for Redis v1beta1
-
- The ``redis.googleapis.com`` service implements the Google Cloud
- Memorystore for Redis API and defines the following resource model for
- managing Redis instances:
-
- - The service works with a collection of cloud projects, named:
- ``/projects/*``
- - Each project has a collection of available locations, named:
- ``/locations/*``
- - Each location has a collection of Redis instances, named:
- ``/instances/*``
- - As such, Redis instances are resources of the form:
- ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
-
- Note that location\_id must be refering to a GCP ``region``; for
- example:
-
- - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
- """
-
- SERVICE_ADDRESS = "redis.googleapis.com:443"
- """The default address of the service."""
-
- # The name of the interface for this client. This is the key used to
- # find the method configuration in the client_config dictionary.
- _INTERFACE_NAME = "google.cloud.redis.v1beta1.CloudRedis"
-
- @classmethod
- def from_service_account_file(cls, filename, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- CloudRedisClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- @classmethod
- def instance_path(cls, project, location, instance):
- """Return a fully-qualified instance string."""
- return google.api_core.path_template.expand(
- "projects/{project}/locations/{location}/instances/{instance}",
- project=project,
- location=location,
- instance=instance,
- )
-
- @classmethod
- def location_path(cls, project, location):
- """Return a fully-qualified location string."""
- return google.api_core.path_template.expand(
- "projects/{project}/locations/{location}",
- project=project,
- location=location,
- )
-
- def __init__(
- self,
- transport=None,
- channel=None,
- credentials=None,
- client_config=None,
- client_info=None,
- client_options=None,
- ):
- """Constructor.
-
- Args:
- transport (Union[~.CloudRedisGrpcTransport,
- Callable[[~.Credentials, type], ~.CloudRedisGrpcTransport]): A transport
- instance, responsible for actually making the API calls.
- The default transport uses the gRPC protocol.
- This argument may also be a callable which returns a
- transport instance. Callables will be sent the credentials
- as the first argument and the default transport class as
- the second argument.
- channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
- through which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- This argument is mutually exclusive with providing a
- transport instance to ``transport``; doing so will raise
- an exception.
- client_config (dict): DEPRECATED. A dictionary of call options for
- each method. If not specified, the default configuration is used.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
- your own client library.
- client_options (Union[dict, google.api_core.client_options.ClientOptions]):
- Client options used to set user options on the client. API Endpoint
- should be set through client_options.
- """
- # Raise deprecation warnings for things we want to go away.
- if client_config is not None:
- warnings.warn(
- "The `client_config` argument is deprecated.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
- else:
- client_config = cloud_redis_client_config.config
-
- if channel:
- warnings.warn(
- "The `channel` argument is deprecated; use " "`transport` instead.",
- PendingDeprecationWarning,
- stacklevel=2,
- )
-
- api_endpoint = self.SERVICE_ADDRESS
- if client_options:
- if type(client_options) == dict:
- client_options = google.api_core.client_options.from_dict(
- client_options
- )
- if client_options.api_endpoint:
- api_endpoint = client_options.api_endpoint
-
- # Instantiate the transport.
- # The transport is responsible for handling serialization and
- # deserialization and actually sending data to the service.
- if transport:
- if callable(transport):
- self.transport = transport(
- credentials=credentials,
- default_class=cloud_redis_grpc_transport.CloudRedisGrpcTransport,
- address=api_endpoint,
- )
- else:
- if credentials:
- raise ValueError(
- "Received both a transport instance and "
- "credentials; these are mutually exclusive."
- )
- self.transport = transport
- else:
- self.transport = cloud_redis_grpc_transport.CloudRedisGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
- )
-
- if client_info is None:
- client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
- )
- else:
- client_info.gapic_version = _GAPIC_LIBRARY_VERSION
- self._client_info = client_info
-
- # Parse out the default settings for retry and timeout for each RPC
- # from the client configuration.
- # (Ordinarily, these are the defaults specified in the `*_config.py`
- # file next to this one.)
- self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
- )
-
- # Save a dictionary of cached API call functions.
- # These are the actual callables which invoke the proper
- # transport methods, wrapped with `wrap_method` to add retry,
- # timeout, and the like.
- self._inner_api_calls = {}
-
- # Service calls
- def list_instances(
- self,
- parent,
- page_size=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Lists all Redis instances owned by a project in either the specified
- location (region) or all locations.
-
- The location should have the following format:
-
- - ``projects/{project_id}/locations/{location_id}``
-
- If ``location_id`` is specified as ``-`` (wildcard), then all regions
- available to the project are queried, and the results are aggregated.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> parent = client.location_path('[PROJECT]', '[LOCATION]')
- >>>
- >>> # Iterate over all results
- >>> for element in client.list_instances(parent):
- ... # process element
- ... pass
- >>>
- >>>
- >>> # Alternatively:
- >>>
- >>> # Iterate over results one page at a time
- >>> for page in client.list_instances(parent).pages:
- ... for element in page:
- ... # process element
- ... pass
-
- Args:
- parent (str): Required. The resource name of the instance location using the form:
- ``projects/{project_id}/locations/{location_id}`` where ``location_id``
- refers to a GCP region.
- page_size (int): The maximum number of resources contained in the
- underlying API response. If page streaming is performed per-
- resource, this parameter does not affect the return value. If page
- streaming is performed per-page, this determines the maximum number
- of resources in a page.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.api_core.page_iterator.PageIterator` instance.
- An iterable of :class:`~google.cloud.redis_v1beta1.types.Instance` instances.
- You can also iterate over the pages of the response
- using its `pages` property.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "list_instances" not in self._inner_api_calls:
- self._inner_api_calls[
- "list_instances"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.list_instances,
- default_retry=self._method_configs["ListInstances"].retry,
- default_timeout=self._method_configs["ListInstances"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.ListInstancesRequest(
- parent=parent, page_size=page_size
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- iterator = google.api_core.page_iterator.GRPCIterator(
- client=None,
- method=functools.partial(
- self._inner_api_calls["list_instances"],
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- ),
- request=request,
- items_field="instances",
- request_token_field="page_token",
- response_token_field="next_page_token",
- )
- return iterator
-
- def get_instance(
- self,
- name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets the details of a specific Redis instance.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
- >>>
- >>> response = client.get_instance(name)
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types.Instance` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_instance,
- default_retry=self._method_configs["GetInstance"].retry,
- default_timeout=self._method_configs["GetInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.GetInstanceRequest(name=name)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def create_instance(
- self,
- parent,
- instance_id,
- instance,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Creates a Redis instance based on the specified tier and memory size.
-
- By default, the instance is accessible from the project's `default
- network `__.
-
- The creation is executed asynchronously and callers may check the
- returned operation to track its progress. Once the operation is
- completed the Redis instance will be fully functional. Completed
- longrunning.Operation will contain the new instance object in the
- response field.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>> from google.cloud.redis_v1beta1 import enums
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> parent = client.location_path('[PROJECT]', '[LOCATION]')
- >>> instance_id = 'test_instance'
- >>> tier = enums.Instance.Tier.BASIC
- >>> memory_size_gb = 1
- >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb}
- >>>
- >>> response = client.create_instance(parent, instance_id, instance)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- parent (str): Required. The resource name of the instance location using the form:
- ``projects/{project_id}/locations/{location_id}`` where ``location_id``
- refers to a GCP region.
- instance_id (str): Required. The logical name of the Redis instance in the customer project
- with the following restrictions:
-
- - Must contain only lowercase letters, numbers, and hyphens.
- - Must start with a letter.
- - Must be between 1-40 characters.
- - Must end with a number or a letter.
- - Must be unique within the customer project / location
- instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. A Redis [Instance] resource
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1beta1.types.Instance`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "create_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "create_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_instance,
- default_retry=self._method_configs["CreateInstance"].retry,
- default_timeout=self._method_configs["CreateInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.CreateInstanceRequest(
- parent=parent, instance_id=instance_id, instance=instance
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["create_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=any_pb2.Any,
- )
-
- def update_instance(
- self,
- update_mask,
- instance,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Updates the metadata and configuration of a specific Redis instance.
-
- Completed longrunning.Operation will contain the new instance object
- in the response field. The returned operation is automatically deleted
- after a few hours, so there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> paths_element = 'display_name'
- >>> paths_element_2 = 'memory_size_gb'
- >>> paths = [paths_element, paths_element_2]
- >>> update_mask = {'paths': paths}
- >>> display_name = 'UpdatedDisplayName'
- >>> name = 'projects//locations//instances/'
- >>> memory_size_gb = 4
- >>> instance = {'display_name': display_name, 'name': name, 'memory_size_gb': memory_size_gb}
- >>>
- >>> response = client.update_instance(update_mask, instance)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied
- in this field. The elements of the repeated paths field may only include
- these fields from ``Instance``:
-
- - ``displayName``
- - ``labels``
- - ``memorySizeGb``
- - ``redisConfig``
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1beta1.types.FieldMask`
- instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. Only fields specified in update\_mask are
- updated.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1beta1.types.Instance`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "update_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "update_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.update_instance,
- default_retry=self._method_configs["UpdateInstance"].retry,
- default_timeout=self._method_configs["UpdateInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.UpdateInstanceRequest(
- update_mask=update_mask, instance=instance
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("instance.name", instance.name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["update_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=any_pb2.Any,
- )
-
- def import_instance(
- self,
- name,
- input_config,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
-
- Redis may stop serving during this operation. Instance state will be
- IMPORTING for entire operation. When complete, the instance will contain
- only data from the imported file.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> # TODO: Initialize `name`:
- >>> name = ''
- >>>
- >>> # TODO: Initialize `input_config`:
- >>> input_config = {}
- >>>
- >>> response = client.import_instance(name, input_config)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- input_config (Union[dict, ~google.cloud.redis_v1beta1.types.InputConfig]): Required. Specify data to be imported.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1beta1.types.InputConfig`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "import_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "import_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.import_instance,
- default_retry=self._method_configs["ImportInstance"].retry,
- default_timeout=self._method_configs["ImportInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.ImportInstanceRequest(
- name=name, input_config=input_config
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["import_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=any_pb2.Any,
- )
-
- def export_instance(
- self,
- name,
- output_config,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Export Redis instance data into a Redis RDB format file in Cloud Storage.
-
- Redis will continue serving during this operation.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> # TODO: Initialize `name`:
- >>> name = ''
- >>>
- >>> # TODO: Initialize `output_config`:
- >>> output_config = {}
- >>>
- >>> response = client.export_instance(name, output_config)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- output_config (Union[dict, ~google.cloud.redis_v1beta1.types.OutputConfig]): Required. Specify data to be exported.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.redis_v1beta1.types.OutputConfig`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "export_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "export_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.export_instance,
- default_retry=self._method_configs["ExportInstance"].retry,
- default_timeout=self._method_configs["ExportInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.ExportInstanceRequest(
- name=name, output_config=output_config
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["export_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=any_pb2.Any,
- )
-
- def failover_instance(
- self,
- name,
- data_protection_mode=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Initiates a failover of the master node to current replica node for a
- specific STANDARD tier Cloud Memorystore for Redis instance.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
- >>>
- >>> response = client.failover_instance(name)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- data_protection_mode (~google.cloud.redis_v1beta1.types.DataProtectionMode): Optional. Available data protection modes that the user can choose. If
- it's unspecified, data protection mode will be LIMITED\_DATA\_LOSS by
- default.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "failover_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "failover_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.failover_instance,
- default_retry=self._method_configs["FailoverInstance"].retry,
- default_timeout=self._method_configs["FailoverInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.FailoverInstanceRequest(
- name=name, data_protection_mode=data_protection_mode
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["failover_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=any_pb2.Any,
- )
-
- def delete_instance(
- self,
- name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Deletes a specific Redis instance. Instance stops serving and data is
- deleted.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
- >>>
- >>> response = client.delete_instance(name)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "delete_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "delete_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.delete_instance,
- default_retry=self._method_configs["DeleteInstance"].retry,
- default_timeout=self._method_configs["DeleteInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.DeleteInstanceRequest(name=name)
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["delete_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- empty_pb2.Empty,
- metadata_type=any_pb2.Any,
- )
-
- def upgrade_instance(
- self,
- name,
- redis_version,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Upgrades Redis instance to the newer Redis version specified in the
- request.
-
- Example:
- >>> from google.cloud import redis_v1beta1
- >>>
- >>> client = redis_v1beta1.CloudRedisClient()
- >>>
- >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
- >>>
- >>> # TODO: Initialize `redis_version`:
- >>> redis_version = ''
- >>>
- >>> response = client.upgrade_instance(name, redis_version)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- name (str): Required. Redis instance resource name using the form:
- ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
- where ``location_id`` refers to a GCP region.
- redis_version (str): Required. Specifies the target version of Redis software to upgrade to.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "upgrade_instance" not in self._inner_api_calls:
- self._inner_api_calls[
- "upgrade_instance"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.upgrade_instance,
- default_retry=self._method_configs["UpgradeInstance"].retry,
- default_timeout=self._method_configs["UpgradeInstance"].timeout,
- client_info=self._client_info,
- )
-
- request = cloud_redis_pb2.UpgradeInstanceRequest(
- name=name, redis_version=redis_version
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- operation = self._inner_api_calls["upgrade_instance"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- cloud_redis_pb2.Instance,
- metadata_type=any_pb2.Any,
- )
diff --git a/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py b/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py
deleted file mode 100644
index f121cc9..0000000
--- a/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py
+++ /dev/null
@@ -1,68 +0,0 @@
-config = {
- "interfaces": {
- "google.cloud.redis.v1beta1.CloudRedis": {
- "retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "non_idempotent": [],
- },
- "retry_params": {
- "default": {
- "initial_retry_delay_millis": 100,
- "retry_delay_multiplier": 1.3,
- "max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
- "rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
- "total_timeout_millis": 600000,
- }
- },
- "methods": {
- "ListInstances": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "GetInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "CreateInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "UpdateInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "ImportInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "ExportInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "FailoverInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "DeleteInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "UpgradeInstance": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- },
- }
- }
-}
diff --git a/google/cloud/redis_v1beta1/gapic/enums.py b/google/cloud/redis_v1beta1/gapic/enums.py
deleted file mode 100644
index 085c094..0000000
--- a/google/cloud/redis_v1beta1/gapic/enums.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Wrappers for protocol buffer enum types."""
-
-import enum
-
-
-class FailoverInstanceRequest(object):
- class DataProtectionMode(enum.IntEnum):
- """
- Specifies different modes of operation in relation to the data retention.
-
- Attributes:
- DATA_PROTECTION_MODE_UNSPECIFIED (int): Defaults to LIMITED\_DATA\_LOSS if a data protection mode is not
- specified.
- LIMITED_DATA_LOSS (int): Instance failover will be protected with data loss control. More
- specifically, the failover will only be performed if the current
- replication offset diff between master and replica is under a certain
- threshold.
- FORCE_DATA_LOSS (int): Instance failover will be performed without data loss control.
- """
-
- DATA_PROTECTION_MODE_UNSPECIFIED = 0
- LIMITED_DATA_LOSS = 1
- FORCE_DATA_LOSS = 2
-
-
-class Instance(object):
- class ConnectMode(enum.IntEnum):
- """
- Available connection modes.
-
- Attributes:
- CONNECT_MODE_UNSPECIFIED (int): Not set.
- DIRECT_PEERING (int): Connect via directly peering with memorystore redis hosted service.
- PRIVATE_SERVICE_ACCESS (int): Connect with google via private service access and share connection
- across google managed services.
- """
-
- CONNECT_MODE_UNSPECIFIED = 0
- DIRECT_PEERING = 1
- PRIVATE_SERVICE_ACCESS = 2
-
- class State(enum.IntEnum):
- """
- Represents the different states of a Redis instance.
-
- Attributes:
- STATE_UNSPECIFIED (int): Not set.
- CREATING (int): Redis instance is being created.
- READY (int): Redis instance has been created and is fully usable.
- UPDATING (int): Redis instance configuration is being updated. Certain kinds of updates
- may cause the instance to become unusable while the update is in
- progress.
- DELETING (int): Redis instance is being deleted.
- REPAIRING (int): Redis instance is being repaired and may be unusable.
- MAINTENANCE (int): Maintenance is being performed on this Redis instance.
- IMPORTING (int): Redis instance is importing data (availability may be affected).
- FAILING_OVER (int): Redis instance is failing over (availability may be affected).
- """
-
- STATE_UNSPECIFIED = 0
- CREATING = 1
- READY = 2
- UPDATING = 3
- DELETING = 4
- REPAIRING = 5
- MAINTENANCE = 6
- IMPORTING = 8
- FAILING_OVER = 10
-
- class Tier(enum.IntEnum):
- """
- Available service tiers to choose from
-
- Attributes:
- TIER_UNSPECIFIED (int): Not set.
- BASIC (int): BASIC tier: standalone instance
- STANDARD_HA (int): STANDARD\_HA tier: highly available primary/replica instances
- """
-
- TIER_UNSPECIFIED = 0
- BASIC = 1
- STANDARD_HA = 3
diff --git a/google/cloud/redis_v1beta1/gapic/transports/__init__.py b/google/cloud/redis_v1beta1/gapic/transports/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/google/cloud/redis_v1beta1/gapic/transports/cloud_redis_grpc_transport.py b/google/cloud/redis_v1beta1/gapic/transports/cloud_redis_grpc_transport.py
deleted file mode 100644
index f5c24d6..0000000
--- a/google/cloud/redis_v1beta1/gapic/transports/cloud_redis_grpc_transport.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import google.api_core.grpc_helpers
-import google.api_core.operations_v1
-
-from google.cloud.redis_v1beta1.proto import cloud_redis_pb2_grpc
-
-
-class CloudRedisGrpcTransport(object):
- """gRPC transport class providing stubs for
- google.cloud.redis.v1beta1 CloudRedis API.
-
- The transport provides access to the raw gRPC stubs,
- which can be used to take advantage of advanced
- features of gRPC.
- """
-
- # The scopes needed to make gRPC calls to all of the methods defined
- # in this service.
- _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
-
- def __init__(
- self, channel=None, credentials=None, address="redis.googleapis.com:443"
- ):
- """Instantiate the transport class.
-
- Args:
- channel (grpc.Channel): A ``Channel`` instance through
- which to make calls. This argument is mutually exclusive
- with ``credentials``; providing both will raise an exception.
- credentials (google.auth.credentials.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If none
- are specified, the client will attempt to ascertain the
- credentials from the environment.
- address (str): The address where the service is hosted.
- """
- # If both `channel` and `credentials` are specified, raise an
- # exception (channels come with credentials baked in already).
- if channel is not None and credentials is not None:
- raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
- )
-
- # Create the channel.
- if channel is None:
- channel = self.create_channel(
- address=address,
- credentials=credentials,
- options={
- "grpc.max_send_message_length": -1,
- "grpc.max_receive_message_length": -1,
- }.items(),
- )
-
- self._channel = channel
-
- # gRPC uses objects called "stubs" that are bound to the
- # channel and provide a basic method for each RPC.
- self._stubs = {"cloud_redis_stub": cloud_redis_pb2_grpc.CloudRedisStub(channel)}
-
- # Because this API includes a method that returns a
- # long-running operation (proto: google.longrunning.Operation),
- # instantiate an LRO client.
- self._operations_client = google.api_core.operations_v1.OperationsClient(
- channel
- )
-
- @classmethod
- def create_channel(
- cls, address="redis.googleapis.com:443", credentials=None, **kwargs
- ):
- """Create and return a gRPC channel object.
-
- Args:
- address (str): The host for the channel to use.
- credentials (~.Credentials): The
- authorization credentials to attach to requests. These
- credentials identify this application to the service. If
- none are specified, the client will attempt to ascertain
- the credentials from the environment.
- kwargs (dict): Keyword arguments, which are passed to the
- channel creation.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return google.api_core.grpc_helpers.create_channel(
- address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
- )
-
- @property
- def channel(self):
- """The gRPC channel used by the transport.
-
- Returns:
- grpc.Channel: A gRPC channel object.
- """
- return self._channel
-
- @property
- def list_instances(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.list_instances`.
-
- Lists all Redis instances owned by a project in either the specified
- location (region) or all locations.
-
- The location should have the following format:
-
- - ``projects/{project_id}/locations/{location_id}``
-
- If ``location_id`` is specified as ``-`` (wildcard), then all regions
- available to the project are queried, and the results are aggregated.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].ListInstances
-
- @property
- def get_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.get_instance`.
-
- Gets the details of a specific Redis instance.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].GetInstance
-
- @property
- def create_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.create_instance`.
-
- Creates a Redis instance based on the specified tier and memory size.
-
- By default, the instance is accessible from the project's `default
- network `__.
-
- The creation is executed asynchronously and callers may check the
- returned operation to track its progress. Once the operation is
- completed the Redis instance will be fully functional. Completed
- longrunning.Operation will contain the new instance object in the
- response field.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].CreateInstance
-
- @property
- def update_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.update_instance`.
-
- Updates the metadata and configuration of a specific Redis instance.
-
- Completed longrunning.Operation will contain the new instance object
- in the response field. The returned operation is automatically deleted
- after a few hours, so there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].UpdateInstance
-
- @property
- def import_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.import_instance`.
-
- Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
-
- Redis may stop serving during this operation. Instance state will be
- IMPORTING for entire operation. When complete, the instance will contain
- only data from the imported file.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].ImportInstance
-
- @property
- def export_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.export_instance`.
-
- Export Redis instance data into a Redis RDB format file in Cloud Storage.
-
- Redis will continue serving during this operation.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].ExportInstance
-
- @property
- def failover_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.failover_instance`.
-
- Initiates a failover of the master node to current replica node for a
- specific STANDARD tier Cloud Memorystore for Redis instance.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].FailoverInstance
-
- @property
- def delete_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.delete_instance`.
-
- Deletes a specific Redis instance. Instance stops serving and data is
- deleted.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].DeleteInstance
-
- @property
- def upgrade_instance(self):
- """Return the gRPC stub for :meth:`CloudRedisClient.upgrade_instance`.
-
- Upgrades Redis instance to the newer Redis version specified in the
- request.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["cloud_redis_stub"].UpgradeInstance
diff --git a/google/cloud/redis_v1beta1/proto/__init__.py b/google/cloud/redis_v1beta1/proto/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/google/cloud/redis_v1beta1/proto/cloud_redis.proto b/google/cloud/redis_v1beta1/proto/cloud_redis.proto
index 47d9a6f..3eb99d8 100644
--- a/google/cloud/redis_v1beta1/proto/cloud_redis.proto
+++ b/google/cloud/redis_v1beta1/proto/cloud_redis.proto
@@ -75,7 +75,7 @@ service CloudRedis {
// Creates a Redis instance based on the specified tier and memory size.
//
// By default, the instance is accessible from the project's
- // [default network](/compute/docs/networks-and-firewalls#networks).
+ // [default network](https://cloud.google.com/vpc/docs/vpc).
//
// The creation is executed asynchronously and callers may check the returned
// operation to track its progress. Once the operation is completed the Redis
@@ -354,7 +354,7 @@ message Instance {
int32 memory_size_gb = 18 [(google.api.field_behavior) = REQUIRED];
// Optional. The full name of the Google Compute Engine
- // [network](/compute/docs/networks-and-firewalls#networks) to which the
+ // [network](https://cloud.google.com/vpc/docs/vpc) to which the
// instance is connected. If left unspecified, the `default` network
// will be used.
string authorized_network = 20 [(google.api.field_behavior) = OPTIONAL];
diff --git a/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py b/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py
deleted file mode 100644
index c3f7861..0000000
--- a/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py
+++ /dev/null
@@ -1,2256 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/cloud/redis_v1beta1/proto/cloud_redis.proto
-
-import sys
-
-_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
-from google.api import client_pb2 as google_dot_api_dot_client__pb2
-from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
-from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name="google/cloud/redis_v1beta1/proto/cloud_redis.proto",
- package="google.cloud.redis.v1beta1",
- syntax="proto3",
- serialized_options=_b(
- "\n\036com.google.cloud.redis.v1beta1B\032CloudRedisServiceBetaProtoP\001Z?google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redis"
- ),
- serialized_pb=_b(
- '\n2google/cloud/redis_v1beta1/proto/cloud_redis.proto\x12\x1agoogle.cloud.redis.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x84\n\n\x08Instance\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12@\n\x06labels\x18\x03 \x03(\x0b\x32\x30.google.cloud.redis.v1beta1.Instance.LabelsEntry\x12\x18\n\x0blocation_id\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x61lternative_location_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rredis_version\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11reserved_ip_range\x18\t \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x04host\x18\n \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04port\x18\x0b \x01(\x05\x42\x03\xe0\x41\x03\x12 \n\x13\x63urrent_location_id\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12>\n\x05state\x18\x0e \x01(\x0e\x32*.google.cloud.redis.v1beta1.Instance.StateB\x03\xe0\x41\x03\x12\x1b\n\x0estatus_message\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12R\n\rredis_configs\x18\x10 \x03(\x0b\x32\x36.google.cloud.redis.v1beta1.Instance.RedisConfigsEntryB\x03\xe0\x41\x01\x12<\n\x04tier\x18\x11 \x01(\x0e\x32).google.cloud.redis.v1beta1.Instance.TierB\x03\xe0\x41\x02\x12\x1b\n\x0ememory_size_gb\x18\x12 \x01(\x05\x42\x03\xe0\x41\x02\x12\x1f\n\x12\x61uthorized_network\x18\x14 \x01(\tB\x03\xe0\x41\x01\x12%\n\x18persistence_iam_identity\x18\x15 \x01(\tB\x03\xe0\x41\x03\x12K\n\x0c\x63onnect_mode\x18\x16 \x01(\x0e\x32\x30.google.cloud.redis.v1beta1.Instance.ConnectModeB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11RedisConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x94\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\r\n\tREPAIRING\x10\x05\x12\x0f\n\x0bMAINTENANCE\x10\x06\x12\r\n\tIMPORTING\x10\x08\x12\x10\n\x0c\x46\x41ILING_OVER\x10\n"8\n\x04Tier\x12\x14\n\x10TIER_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x0f\n\x0bSTANDARD_HA\x10\x03"[\n\x0b\x43onnectMode\x12\x1c\n\x18\x43ONNECT_MODE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x44IRECT_PEERING\x10\x01\x12\x1a\n\x16PRIVATE_SERVICE_ACCESS\x10\x02:`\xea\x41]\n\x1dredis.googleapis.com/Instance\x12"2/v1beta1/{parent=projects/*/locations/*}/instances:\x08instance\xda\x41\x1bparent,instance_id,instance\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\x86\x02\n\x0eUpdateInstance\x12\x31.google.cloud.redis.v1beta1.UpdateInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa1\x01\x82\xd3\xe4\x93\x02G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\x08instance\xda\x41\x14update_mask,instance\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\xfe\x01\n\x0fUpgradeInstance\x12\x32.google.cloud.redis.v1beta1.UpgradeInstanceRequest\x1a\x1d.google.longrunning.Operation"\x97\x01\x82\xd3\xe4\x93\x02?":/v1beta1/{name=projects/*/locations/*/instances/*}:upgrade:\x01*\xda\x41\x12name,redis_version\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\xfa\x01\n\x0eImportInstance\x12\x31.google.cloud.redis.v1beta1.ImportInstanceRequest\x1a\x1d.google.longrunning.Operation"\x95\x01\x82\xd3\xe4\x93\x02>"9/v1beta1/{name=projects/*/locations/*/instances/*}:import:\x01*\xda\x41\x11name,input_config\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\xfb\x01\n\x0e\x45xportInstance\x12\x31.google.cloud.redis.v1beta1.ExportInstanceRequest\x1a\x1d.google.longrunning.Operation"\x96\x01\x82\xd3\xe4\x93\x02>"9/v1beta1/{name=projects/*/locations/*/instances/*}:export:\x01*\xda\x41\x12name,output_config\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\x88\x02\n\x10\x46\x61iloverInstance\x12\x33.google.cloud.redis.v1beta1.FailoverInstanceRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02@";/v1beta1/{name=projects/*/locations/*/instances/*}:failover:\x01*\xda\x41\x19name,data_protection_mode\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\xd4\x01\n\x0e\x44\x65leteInstance\x12\x31.google.cloud.redis.v1beta1.DeleteInstanceRequest\x1a\x1d.google.longrunning.Operation"p\x82\xd3\xe4\x93\x02\x34*2/v1beta1/{name=projects/*/locations/*/instances/*}\xda\x41\x04name\xca\x41,\n\x15google.protobuf.Empty\x12\x13google.protobuf.Any\x1aH\xca\x41\x14redis.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x7f\n\x1e\x63om.google.cloud.redis.v1beta1B\x1a\x43loudRedisServiceBetaProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redisb\x06proto3'
- ),
- dependencies=[
- google_dot_api_dot_annotations__pb2.DESCRIPTOR,
- google_dot_api_dot_client__pb2.DESCRIPTOR,
- google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,
- google_dot_api_dot_resource__pb2.DESCRIPTOR,
- google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,
- google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
- ],
-)
-
-
-_INSTANCE_STATE = _descriptor.EnumDescriptor(
- name="State",
- full_name="google.cloud.redis.v1beta1.Instance.State",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="STATE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="CREATING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="READY", index=2, number=2, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="UPDATING", index=3, number=3, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="DELETING", index=4, number=4, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="REPAIRING", index=5, number=5, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="MAINTENANCE", index=6, number=6, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="IMPORTING", index=7, number=8, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="FAILING_OVER", index=8, number=10, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1189,
- serialized_end=1337,
-)
-_sym_db.RegisterEnumDescriptor(_INSTANCE_STATE)
-
-_INSTANCE_TIER = _descriptor.EnumDescriptor(
- name="Tier",
- full_name="google.cloud.redis.v1beta1.Instance.Tier",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="TIER_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="BASIC", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="STANDARD_HA", index=2, number=3, serialized_options=None, type=None
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1339,
- serialized_end=1395,
-)
-_sym_db.RegisterEnumDescriptor(_INSTANCE_TIER)
-
-_INSTANCE_CONNECTMODE = _descriptor.EnumDescriptor(
- name="ConnectMode",
- full_name="google.cloud.redis.v1beta1.Instance.ConnectMode",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="CONNECT_MODE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="DIRECT_PEERING", index=1, number=1, serialized_options=None, type=None
- ),
- _descriptor.EnumValueDescriptor(
- name="PRIVATE_SERVICE_ACCESS",
- index=2,
- number=2,
- serialized_options=None,
- type=None,
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=1397,
- serialized_end=1488,
-)
-_sym_db.RegisterEnumDescriptor(_INSTANCE_CONNECTMODE)
-
-_FAILOVERINSTANCEREQUEST_DATAPROTECTIONMODE = _descriptor.EnumDescriptor(
- name="DataProtectionMode",
- full_name="google.cloud.redis.v1beta1.FailoverInstanceRequest.DataProtectionMode",
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name="DATA_PROTECTION_MODE_UNSPECIFIED",
- index=0,
- number=0,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="LIMITED_DATA_LOSS",
- index=1,
- number=1,
- serialized_options=None,
- type=None,
- ),
- _descriptor.EnumValueDescriptor(
- name="FORCE_DATA_LOSS",
- index=2,
- number=2,
- serialized_options=None,
- type=None,
- ),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=3080,
- serialized_end=3182,
-)
-_sym_db.RegisterEnumDescriptor(_FAILOVERINSTANCEREQUEST_DATAPROTECTIONMODE)
-
-
-_INSTANCE_LABELSENTRY = _descriptor.Descriptor(
- name="LabelsEntry",
- full_name="google.cloud.redis.v1beta1.Instance.LabelsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.cloud.redis.v1beta1.Instance.LabelsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.cloud.redis.v1beta1.Instance.LabelsEntry.value",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1088,
- serialized_end=1133,
-)
-
-_INSTANCE_REDISCONFIGSENTRY = _descriptor.Descriptor(
- name="RedisConfigsEntry",
- full_name="google.cloud.redis.v1beta1.Instance.RedisConfigsEntry",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="key",
- full_name="google.cloud.redis.v1beta1.Instance.RedisConfigsEntry.key",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="value",
- full_name="google.cloud.redis.v1beta1.Instance.RedisConfigsEntry.value",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[],
- enum_types=[],
- serialized_options=_b("8\001"),
- is_extendable=False,
- syntax="proto3",
- extension_ranges=[],
- oneofs=[],
- serialized_start=1135,
- serialized_end=1186,
-)
-
-_INSTANCE = _descriptor.Descriptor(
- name="Instance",
- full_name="google.cloud.redis.v1beta1.Instance",
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name="name",
- full_name="google.cloud.redis.v1beta1.Instance.name",
- index=0,
- number=1,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="display_name",
- full_name="google.cloud.redis.v1beta1.Instance.display_name",
- index=1,
- number=2,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="labels",
- full_name="google.cloud.redis.v1beta1.Instance.labels",
- index=2,
- number=3,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=None,
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="location_id",
- full_name="google.cloud.redis.v1beta1.Instance.location_id",
- index=3,
- number=4,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="alternative_location_id",
- full_name="google.cloud.redis.v1beta1.Instance.alternative_location_id",
- index=4,
- number=5,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="redis_version",
- full_name="google.cloud.redis.v1beta1.Instance.redis_version",
- index=5,
- number=7,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="reserved_ip_range",
- full_name="google.cloud.redis.v1beta1.Instance.reserved_ip_range",
- index=6,
- number=9,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="host",
- full_name="google.cloud.redis.v1beta1.Instance.host",
- index=7,
- number=10,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="port",
- full_name="google.cloud.redis.v1beta1.Instance.port",
- index=8,
- number=11,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="current_location_id",
- full_name="google.cloud.redis.v1beta1.Instance.current_location_id",
- index=9,
- number=12,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="create_time",
- full_name="google.cloud.redis.v1beta1.Instance.create_time",
- index=10,
- number=13,
- type=11,
- cpp_type=10,
- label=1,
- has_default_value=False,
- default_value=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="state",
- full_name="google.cloud.redis.v1beta1.Instance.state",
- index=11,
- number=14,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="status_message",
- full_name="google.cloud.redis.v1beta1.Instance.status_message",
- index=12,
- number=15,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="redis_configs",
- full_name="google.cloud.redis.v1beta1.Instance.redis_configs",
- index=13,
- number=16,
- type=11,
- cpp_type=10,
- label=3,
- has_default_value=False,
- default_value=[],
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="tier",
- full_name="google.cloud.redis.v1beta1.Instance.tier",
- index=14,
- number=17,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="memory_size_gb",
- full_name="google.cloud.redis.v1beta1.Instance.memory_size_gb",
- index=15,
- number=18,
- type=5,
- cpp_type=1,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\002"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="authorized_network",
- full_name="google.cloud.redis.v1beta1.Instance.authorized_network",
- index=16,
- number=20,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="persistence_iam_identity",
- full_name="google.cloud.redis.v1beta1.Instance.persistence_iam_identity",
- index=17,
- number=21,
- type=9,
- cpp_type=9,
- label=1,
- has_default_value=False,
- default_value=_b("").decode("utf-8"),
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\003"),
- file=DESCRIPTOR,
- ),
- _descriptor.FieldDescriptor(
- name="connect_mode",
- full_name="google.cloud.redis.v1beta1.Instance.connect_mode",
- index=18,
- number=22,
- type=14,
- cpp_type=8,
- label=1,
- has_default_value=False,
- default_value=0,
- message_type=None,
- enum_type=None,
- containing_type=None,
- is_extension=False,
- extension_scope=None,
- serialized_options=_b("\340A\001"),
- file=DESCRIPTOR,
- ),
- ],
- extensions=[],
- nested_types=[_INSTANCE_LABELSENTRY, _INSTANCE_REDISCONFIGSENTRY],
- enum_types=[_INSTANCE_STATE, _INSTANCE_TIER, _INSTANCE_CONNECTMODE],
- serialized_options=_b(
- "\352A]\n\035redis.googleapis.com/Instance\022`__ to which
- the instance is connected. If left unspecified, the
- ``default`` network will be used.
- persistence_iam_identity:
- Output only. Cloud IAM identity used by import / export
- operations to transfer data to/from Cloud Storage. Format is
- "serviceAccount:". The value may change over time for a given
- instance so should be checked before each import/export
- operation.
- connect_mode:
- Optional. The connect mode of Redis instance. If not provided,
- default one will be used. Current default: DIRECT\_PEERING.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.Instance)
- ),
-)
-_sym_db.RegisterMessage(Instance)
-_sym_db.RegisterMessage(Instance.LabelsEntry)
-_sym_db.RegisterMessage(Instance.RedisConfigsEntry)
-
-ListInstancesRequest = _reflection.GeneratedProtocolMessageType(
- "ListInstancesRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINSTANCESREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
- Attributes:
- parent:
- Required. The resource name of the instance location using the
- form: ``projects/{project_id}/locations/{location_id}`` where
- ``location_id`` refers to a GCP region.
- page_size:
- The maximum number of items to return. If not specified, a
- default value of 1000 will be used by the service. Regardless
- of the page\_size value, the response may include a partial
- list and a caller should only rely on response's [``next_page_
- token``][google.cloud.redis.v1beta1.ListInstancesResponse.next
- \_page\_token] to determine if there are more instances left
- to be queried.
- page_token:
- The ``next_page_token`` value returned from a previous [ListIn
- stances][google.cloud.redis.v1beta1.CloudRedis.ListInstances]
- request, if any.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ListInstancesRequest)
- ),
-)
-_sym_db.RegisterMessage(ListInstancesRequest)
-
-ListInstancesResponse = _reflection.GeneratedProtocolMessageType(
- "ListInstancesResponse",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LISTINSTANCESRESPONSE,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Response for
- [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
- Attributes:
- instances:
- A list of Redis instances in the project in the specified
- location, or across all locations. If the ``location_id`` in
- the parent field of the request is "-", all regions available
- to the project are queried, and the results aggregated. If in
- such an aggregated query a location is unavailable, a dummy
- Redis entry is included in the response with the ``name``
- field set to a value of the form
- ``projects/{project_id}/locations/{location_id}/instances/``-
- and the ``status`` field set to ERROR and ``status_message``
- field set to "location not available for ListInstances".
- next_page_token:
- Token to retrieve the next page of results, or empty if there
- are no more results in the list.
- unreachable:
- Locations that could not be reached.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ListInstancesResponse)
- ),
-)
-_sym_db.RegisterMessage(ListInstancesResponse)
-
-GetInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "GetInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GETINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [GetInstance][google.cloud.redis.v1beta1.CloudRedis.GetInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.GetInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(GetInstanceRequest)
-
-CreateInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "CreateInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_CREATEINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.Cre
- ateInstance].
- Attributes:
- parent:
- Required. The resource name of the instance location using the
- form: ``projects/{project_id}/locations/{location_id}`` where
- ``location_id`` refers to a GCP region.
- instance_id:
- Required. The logical name of the Redis instance in the
- customer project with the following restrictions: - Must
- contain only lowercase letters, numbers, and hyphens. - Must
- start with a letter. - Must be between 1-40 characters. -
- Must end with a number or a letter. - Must be unique within
- the customer project / location
- instance:
- Required. A Redis [Instance] resource
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.CreateInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(CreateInstanceRequest)
-
-UpdateInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "UpdateInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPDATEINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.Upd
- ateInstance].
- Attributes:
- update_mask:
- Required. Mask of fields to update. At least one path must be
- supplied in this field. The elements of the repeated paths
- field may only include these fields from
- [Instance][google.cloud.redis.v1beta1.Instance]: -
- ``displayName`` - ``labels`` - ``memorySizeGb`` -
- ``redisConfig``
- instance:
- Required. Update description. Only fields specified in
- update\_mask are updated.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.UpdateInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(UpdateInstanceRequest)
-
-UpgradeInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "UpgradeInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_UPGRADEINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for [UpgradeInstance][google.cloud.redis.v1beta1.CloudRedis.Up
- gradeInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- redis_version:
- Required. Specifies the target version of Redis software to
- upgrade to.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.UpgradeInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(UpgradeInstanceRequest)
-
-DeleteInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "DeleteInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_DELETEINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.Del
- eteInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.DeleteInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(DeleteInstanceRequest)
-
-GcsSource = _reflection.GeneratedProtocolMessageType(
- "GcsSource",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GCSSOURCE,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""The Cloud Storage location for the input content
- Attributes:
- uri:
- Required. Source data URI. (e.g.
- 'gs://my\_bucket/my\_object').
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.GcsSource)
- ),
-)
-_sym_db.RegisterMessage(GcsSource)
-
-InputConfig = _reflection.GeneratedProtocolMessageType(
- "InputConfig",
- (_message.Message,),
- dict(
- DESCRIPTOR=_INPUTCONFIG,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""The input content
- Attributes:
- source:
- Required. Specify source location of input data
- gcs_source:
- Google Cloud Storage location where input content is located.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.InputConfig)
- ),
-)
-_sym_db.RegisterMessage(InputConfig)
-
-ImportInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "ImportInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_IMPORTINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [Import][google.cloud.redis.v1beta1.CloudRedis.ImportInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- input_config:
- Required. Specify data to be imported.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ImportInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(ImportInstanceRequest)
-
-GcsDestination = _reflection.GeneratedProtocolMessageType(
- "GcsDestination",
- (_message.Message,),
- dict(
- DESCRIPTOR=_GCSDESTINATION,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""The Cloud Storage location for the output content
- Attributes:
- uri:
- Required. Data destination URI (e.g.
- 'gs://my\_bucket/my\_object'). Existing files will be
- overwritten.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.GcsDestination)
- ),
-)
-_sym_db.RegisterMessage(GcsDestination)
-
-OutputConfig = _reflection.GeneratedProtocolMessageType(
- "OutputConfig",
- (_message.Message,),
- dict(
- DESCRIPTOR=_OUTPUTCONFIG,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""The output content
- Attributes:
- destination:
- Required. Specify destination location of output data
- gcs_destination:
- Google Cloud Storage destination for output content.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.OutputConfig)
- ),
-)
-_sym_db.RegisterMessage(OutputConfig)
-
-ExportInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "ExportInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_EXPORTINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [Export][google.cloud.redis.v1beta1.CloudRedis.ExportInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- output_config:
- Required. Specify data to be exported.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ExportInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(ExportInstanceRequest)
-
-FailoverInstanceRequest = _reflection.GeneratedProtocolMessageType(
- "FailoverInstanceRequest",
- (_message.Message,),
- dict(
- DESCRIPTOR=_FAILOVERINSTANCEREQUEST,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Request for
- [Failover][google.cloud.redis.v1beta1.CloudRedis.FailoverInstance].
- Attributes:
- name:
- Required. Redis instance resource name using the form: ``proje
- cts/{project_id}/locations/{location_id}/instances/{instance_i
- d}`` where ``location_id`` refers to a GCP region.
- data_protection_mode:
- Optional. Available data protection modes that the user can
- choose. If it's unspecified, data protection mode will be
- LIMITED\_DATA\_LOSS by default.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.FailoverInstanceRequest)
- ),
-)
-_sym_db.RegisterMessage(FailoverInstanceRequest)
-
-LocationMetadata = _reflection.GeneratedProtocolMessageType(
- "LocationMetadata",
- (_message.Message,),
- dict(
- AvailableZonesEntry=_reflection.GeneratedProtocolMessageType(
- "AvailableZonesEntry",
- (_message.Message,),
- dict(
- DESCRIPTOR=_LOCATIONMETADATA_AVAILABLEZONESENTRY,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2"
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry)
- ),
- ),
- DESCRIPTOR=_LOCATIONMETADATA,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""This location metadata represents additional configuration options for
- a given location where a Redis instance may be created. All fields are
- output only. It is returned as content of the
- ``google.cloud.location.Location.metadata`` field.
- Attributes:
- available_zones:
- Output only. The set of available zones in the location. The
- map is keyed by the lowercase ID of each zone, as defined by
- GCE. These keys can be specified in ``location_id`` or
- ``alternative_location_id`` fields when creating a Redis
- instance.
- """,
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.LocationMetadata)
- ),
-)
-_sym_db.RegisterMessage(LocationMetadata)
-_sym_db.RegisterMessage(LocationMetadata.AvailableZonesEntry)
-
-ZoneMetadata = _reflection.GeneratedProtocolMessageType(
- "ZoneMetadata",
- (_message.Message,),
- dict(
- DESCRIPTOR=_ZONEMETADATA,
- __module__="google.cloud.redis_v1beta1.proto.cloud_redis_pb2",
- __doc__="""Defines specific information for a particular zone. Currently empty
- and reserved for future use only.""",
- # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ZoneMetadata)
- ),
-)
-_sym_db.RegisterMessage(ZoneMetadata)
-
-
-DESCRIPTOR._options = None
-_INSTANCE_LABELSENTRY._options = None
-_INSTANCE_REDISCONFIGSENTRY._options = None
-_INSTANCE.fields_by_name["name"]._options = None
-_INSTANCE.fields_by_name["location_id"]._options = None
-_INSTANCE.fields_by_name["alternative_location_id"]._options = None
-_INSTANCE.fields_by_name["redis_version"]._options = None
-_INSTANCE.fields_by_name["reserved_ip_range"]._options = None
-_INSTANCE.fields_by_name["host"]._options = None
-_INSTANCE.fields_by_name["port"]._options = None
-_INSTANCE.fields_by_name["current_location_id"]._options = None
-_INSTANCE.fields_by_name["create_time"]._options = None
-_INSTANCE.fields_by_name["state"]._options = None
-_INSTANCE.fields_by_name["status_message"]._options = None
-_INSTANCE.fields_by_name["redis_configs"]._options = None
-_INSTANCE.fields_by_name["tier"]._options = None
-_INSTANCE.fields_by_name["memory_size_gb"]._options = None
-_INSTANCE.fields_by_name["authorized_network"]._options = None
-_INSTANCE.fields_by_name["persistence_iam_identity"]._options = None
-_INSTANCE.fields_by_name["connect_mode"]._options = None
-_INSTANCE._options = None
-_LISTINSTANCESREQUEST.fields_by_name["parent"]._options = None
-_GETINSTANCEREQUEST.fields_by_name["name"]._options = None
-_CREATEINSTANCEREQUEST.fields_by_name["parent"]._options = None
-_CREATEINSTANCEREQUEST.fields_by_name["instance_id"]._options = None
-_CREATEINSTANCEREQUEST.fields_by_name["instance"]._options = None
-_UPDATEINSTANCEREQUEST.fields_by_name["update_mask"]._options = None
-_UPDATEINSTANCEREQUEST.fields_by_name["instance"]._options = None
-_UPGRADEINSTANCEREQUEST.fields_by_name["name"]._options = None
-_UPGRADEINSTANCEREQUEST.fields_by_name["redis_version"]._options = None
-_DELETEINSTANCEREQUEST.fields_by_name["name"]._options = None
-_GCSSOURCE.fields_by_name["uri"]._options = None
-_IMPORTINSTANCEREQUEST.fields_by_name["name"]._options = None
-_IMPORTINSTANCEREQUEST.fields_by_name["input_config"]._options = None
-_GCSDESTINATION.fields_by_name["uri"]._options = None
-_EXPORTINSTANCEREQUEST.fields_by_name["name"]._options = None
-_EXPORTINSTANCEREQUEST.fields_by_name["output_config"]._options = None
-_FAILOVERINSTANCEREQUEST.fields_by_name["name"]._options = None
-_FAILOVERINSTANCEREQUEST.fields_by_name["data_protection_mode"]._options = None
-_LOCATIONMETADATA_AVAILABLEZONESENTRY._options = None
-_LOCATIONMETADATA.fields_by_name["available_zones"]._options = None
-
-_CLOUDREDIS = _descriptor.ServiceDescriptor(
- name="CloudRedis",
- full_name="google.cloud.redis.v1beta1.CloudRedis",
- file=DESCRIPTOR,
- index=0,
- serialized_options=_b(
- "\312A\024redis.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform"
- ),
- serialized_start=3415,
- serialized_end=5632,
- methods=[
- _descriptor.MethodDescriptor(
- name="ListInstances",
- full_name="google.cloud.redis.v1beta1.CloudRedis.ListInstances",
- index=0,
- containing_service=None,
- input_type=_LISTINSTANCESREQUEST,
- output_type=_LISTINSTANCESRESPONSE,
- serialized_options=_b(
- "\202\323\344\223\0024\0222/v1beta1/{parent=projects/*/locations/*}/instances\332A\006parent"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="GetInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.GetInstance",
- index=1,
- containing_service=None,
- input_type=_GETINSTANCEREQUEST,
- output_type=_INSTANCE,
- serialized_options=_b(
- "\202\323\344\223\0024\0222/v1beta1/{name=projects/*/locations/*/instances/*}\332A\004name"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="CreateInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.CreateInstance",
- index=2,
- containing_service=None,
- input_type=_CREATEINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\002>"2/v1beta1/{parent=projects/*/locations/*}/instances:\010instance\332A\033parent,instance_id,instance\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="UpdateInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.UpdateInstance",
- index=3,
- containing_service=None,
- input_type=_UPDATEINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\002G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\010instance\332A\024update_mask,instance\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any"
- ),
- ),
- _descriptor.MethodDescriptor(
- name="UpgradeInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.UpgradeInstance",
- index=4,
- containing_service=None,
- input_type=_UPGRADEINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\002?":/v1beta1/{name=projects/*/locations/*/instances/*}:upgrade:\001*\332A\022name,redis_version\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ImportInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.ImportInstance",
- index=5,
- containing_service=None,
- input_type=_IMPORTINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\002>"9/v1beta1/{name=projects/*/locations/*/instances/*}:import:\001*\332A\021name,input_config\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="ExportInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.ExportInstance",
- index=6,
- containing_service=None,
- input_type=_EXPORTINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\002>"9/v1beta1/{name=projects/*/locations/*/instances/*}:export:\001*\332A\022name,output_config\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="FailoverInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.FailoverInstance",
- index=7,
- containing_service=None,
- input_type=_FAILOVERINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- '\202\323\344\223\002@";/v1beta1/{name=projects/*/locations/*/instances/*}:failover:\001*\332A\031name,data_protection_mode\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any'
- ),
- ),
- _descriptor.MethodDescriptor(
- name="DeleteInstance",
- full_name="google.cloud.redis.v1beta1.CloudRedis.DeleteInstance",
- index=8,
- containing_service=None,
- input_type=_DELETEINSTANCEREQUEST,
- output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=_b(
- "\202\323\344\223\0024*2/v1beta1/{name=projects/*/locations/*/instances/*}\332A\004name\312A,\n\025google.protobuf.Empty\022\023google.protobuf.Any"
- ),
- ),
- ],
-)
-_sym_db.RegisterServiceDescriptor(_CLOUDREDIS)
-
-DESCRIPTOR.services_by_name["CloudRedis"] = _CLOUDREDIS
-
-# @@protoc_insertion_point(module_scope)
diff --git a/google/cloud/redis_v1beta1/proto/cloud_redis_pb2_grpc.py b/google/cloud/redis_v1beta1/proto/cloud_redis_pb2_grpc.py
deleted file mode 100644
index 3f4556b..0000000
--- a/google/cloud/redis_v1beta1/proto/cloud_redis_pb2_grpc.py
+++ /dev/null
@@ -1,254 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-from google.cloud.redis_v1beta1.proto import (
- cloud_redis_pb2 as google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2,
-)
-from google.longrunning import (
- operations_pb2 as google_dot_longrunning_dot_operations__pb2,
-)
-
-
-class CloudRedisStub(object):
- """Configures and manages Cloud Memorystore for Redis instances
-
- Google Cloud Memorystore for Redis v1beta1
-
- The `redis.googleapis.com` service implements the Google Cloud Memorystore
- for Redis API and defines the following resource model for managing Redis
- instances:
- * The service works with a collection of cloud projects, named: `/projects/*`
- * Each project has a collection of available locations, named: `/locations/*`
- * Each location has a collection of Redis instances, named: `/instances/*`
- * As such, Redis instances are resources of the form:
- `/projects/{project_id}/locations/{location_id}/instances/{instance_id}`
-
- Note that location_id must be refering to a GCP `region`; for example:
- * `projects/redpepper-1290/locations/us-central1/instances/my-redis`
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.ListInstances = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/ListInstances",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesResponse.FromString,
- )
- self.GetInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/GetInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.GetInstanceRequest.SerializeToString,
- response_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.Instance.FromString,
- )
- self.CreateInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/CreateInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.CreateInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.UpdateInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/UpdateInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.UpdateInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.UpgradeInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/UpgradeInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.UpgradeInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ImportInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/ImportInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ImportInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.ExportInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/ExportInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ExportInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.FailoverInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/FailoverInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.FailoverInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
- self.DeleteInstance = channel.unary_unary(
- "/google.cloud.redis.v1beta1.CloudRedis/DeleteInstance",
- request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.DeleteInstanceRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
- )
-
-
-class CloudRedisServicer(object):
- """Configures and manages Cloud Memorystore for Redis instances
-
- Google Cloud Memorystore for Redis v1beta1
-
- The `redis.googleapis.com` service implements the Google Cloud Memorystore
- for Redis API and defines the following resource model for managing Redis
- instances:
- * The service works with a collection of cloud projects, named: `/projects/*`
- * Each project has a collection of available locations, named: `/locations/*`
- * Each location has a collection of Redis instances, named: `/instances/*`
- * As such, Redis instances are resources of the form:
- `/projects/{project_id}/locations/{location_id}/instances/{instance_id}`
-
- Note that location_id must be refering to a GCP `region`; for example:
- * `projects/redpepper-1290/locations/us-central1/instances/my-redis`
- """
-
- def ListInstances(self, request, context):
- """Lists all Redis instances owned by a project in either the specified
- location (region) or all locations.
-
- The location should have the following format:
-
- * `projects/{project_id}/locations/{location_id}`
-
- If `location_id` is specified as `-` (wildcard), then all regions
- available to the project are queried, and the results are aggregated.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def GetInstance(self, request, context):
- """Gets the details of a specific Redis instance.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def CreateInstance(self, request, context):
- """Creates a Redis instance based on the specified tier and memory size.
-
- By default, the instance is accessible from the project's
- [default network](/compute/docs/networks-and-firewalls#networks).
-
- The creation is executed asynchronously and callers may check the returned
- operation to track its progress. Once the operation is completed the Redis
- instance will be fully functional. Completed longrunning.Operation will
- contain the new instance object in the response field.
-
- The returned operation is automatically deleted after a few hours, so there
- is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def UpdateInstance(self, request, context):
- """Updates the metadata and configuration of a specific Redis instance.
-
- Completed longrunning.Operation will contain the new instance object
- in the response field. The returned operation is automatically deleted
- after a few hours, so there is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def UpgradeInstance(self, request, context):
- """Upgrades Redis instance to the newer Redis version specified in the
- request.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ImportInstance(self, request, context):
- """Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
-
- Redis may stop serving during this operation. Instance state will be
- IMPORTING for entire operation. When complete, the instance will contain
- only data from the imported file.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def ExportInstance(self, request, context):
- """Export Redis instance data into a Redis RDB format file in Cloud Storage.
-
- Redis will continue serving during this operation.
-
- The returned operation is automatically deleted after a few hours, so
- there is no need to call DeleteOperation.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def FailoverInstance(self, request, context):
- """Initiates a failover of the master node to current replica node for a
- specific STANDARD tier Cloud Memorystore for Redis instance.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
- def DeleteInstance(self, request, context):
- """Deletes a specific Redis instance. Instance stops serving and data is
- deleted.
- """
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details("Method not implemented!")
- raise NotImplementedError("Method not implemented!")
-
-
-def add_CloudRedisServicer_to_server(servicer, server):
- rpc_method_handlers = {
- "ListInstances": grpc.unary_unary_rpc_method_handler(
- servicer.ListInstances,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesRequest.FromString,
- response_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesResponse.SerializeToString,
- ),
- "GetInstance": grpc.unary_unary_rpc_method_handler(
- servicer.GetInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.GetInstanceRequest.FromString,
- response_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.Instance.SerializeToString,
- ),
- "CreateInstance": grpc.unary_unary_rpc_method_handler(
- servicer.CreateInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.CreateInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "UpdateInstance": grpc.unary_unary_rpc_method_handler(
- servicer.UpdateInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.UpdateInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "UpgradeInstance": grpc.unary_unary_rpc_method_handler(
- servicer.UpgradeInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.UpgradeInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ImportInstance": grpc.unary_unary_rpc_method_handler(
- servicer.ImportInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ImportInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "ExportInstance": grpc.unary_unary_rpc_method_handler(
- servicer.ExportInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ExportInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "FailoverInstance": grpc.unary_unary_rpc_method_handler(
- servicer.FailoverInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.FailoverInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- "DeleteInstance": grpc.unary_unary_rpc_method_handler(
- servicer.DeleteInstance,
- request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.DeleteInstanceRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- "google.cloud.redis.v1beta1.CloudRedis", rpc_method_handlers
- )
- server.add_generic_rpc_handlers((generic_handler,))
diff --git a/google/cloud/redis_v1beta1/py.typed b/google/cloud/redis_v1beta1/py.typed
new file mode 100644
index 0000000..960151e
--- /dev/null
+++ b/google/cloud/redis_v1beta1/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-redis package uses inline types.
diff --git a/google/cloud/redis_v1beta1/services/__init__.py b/google/cloud/redis_v1beta1/services/__init__.py
new file mode 100644
index 0000000..42ffdf2
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google/cloud/__init__.py b/google/cloud/redis_v1beta1/services/cloud_redis/__init__.py
similarity index 71%
rename from google/cloud/__init__.py
rename to google/cloud/redis_v1beta1/services/cloud_redis/__init__.py
index 9a1b64a..e66c1aa 100644
--- a/google/cloud/__init__.py
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/__init__.py
@@ -1,24 +1,24 @@
# -*- coding: utf-8 -*-
-#
+
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
-try:
- import pkg_resources
-
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
+from .client import CloudRedisClient
+from .async_client import CloudRedisAsyncClient
- __path__ = pkgutil.extend_path(__path__, __name__)
+__all__ = (
+ "CloudRedisClient",
+ "CloudRedisAsyncClient",
+)
diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py b/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py
new file mode 100644
index 0000000..49b248a
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/async_client.py
@@ -0,0 +1,1009 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import builtins
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api_core import operation
+from google.api_core import operation_async
+from google.cloud.redis_v1beta1.services.cloud_redis import pagers
+from google.cloud.redis_v1beta1.types import cloud_redis
+from google.protobuf import any_pb2 as any # type: ignore
+from google.protobuf import empty_pb2 as empty # type: ignore
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport
+from .client import CloudRedisClient
+
+
+class CloudRedisAsyncClient:
+ """Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1beta1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be refering to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+ """
+
+ _client: CloudRedisClient
+
+ DEFAULT_ENDPOINT = CloudRedisClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = CloudRedisClient.DEFAULT_MTLS_ENDPOINT
+
+ instance_path = staticmethod(CloudRedisClient.instance_path)
+ parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path)
+
+ from_service_account_file = CloudRedisClient.from_service_account_file
+ from_service_account_json = from_service_account_file
+
+ get_transport_class = functools.partial(
+ type(CloudRedisClient).get_transport_class, type(CloudRedisClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, CloudRedisTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the cloud redis client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.CloudRedisTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = CloudRedisClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def list_instances(
+ self,
+ request: cloud_redis.ListInstancesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListInstancesAsyncPager:
+ r"""Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Args:
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The request object. Request for
+ [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListInstancesAsyncPager:
+ Response for
+ [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([parent]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.ListInstancesRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_instances,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListInstancesAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def get_instance(
+ self,
+ request: cloud_redis.GetInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> cloud_redis.Instance:
+ r"""Gets the details of a specific Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.GetInstanceRequest`):
+ The request object. Request for
+ [GetInstance][google.cloud.redis.v1beta1.CloudRedis.GetInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.cloud_redis.Instance:
+ A Google Cloud Redis instance.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.GetInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def create_instance(
+ self,
+ request: cloud_redis.CreateInstanceRequest = None,
+ *,
+ parent: str = None,
+ instance_id: str = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.CreateInstanceRequest`):
+ The request object. Request for
+ [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.CreateInstance].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance_id (:class:`str`):
+ Required. The logical name of the Redis instance in the
+ customer project with the following restrictions:
+
+ - Must contain only lowercase letters, numbers, and
+ hyphens.
+ - Must start with a letter.
+ - Must be between 1-40 characters.
+ - Must end with a number or a letter.
+ - Must be unique within the customer project / location
+ This corresponds to the ``instance_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. A Redis [Instance] resource
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([parent, instance_id, instance]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.CreateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if instance_id is not None:
+ request.instance_id = instance_id
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def update_instance(
+ self,
+ request: cloud_redis.UpdateInstanceRequest = None,
+ *,
+ update_mask: field_mask.FieldMask = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.UpdateInstanceRequest`):
+ The request object. Request for
+ [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.UpdateInstance].
+ update_mask (:class:`~.field_mask.FieldMask`):
+ Required. Mask of fields to update. At least one path
+ must be supplied in this field. The elements of the
+ repeated paths field may only include these fields from
+ [Instance][google.cloud.redis.v1beta1.Instance]:
+
+ - ``displayName``
+ - ``labels``
+ - ``memorySizeGb``
+ - ``redisConfig``
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. Update description. Only fields specified in
+ update_mask are updated.
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([update_mask, instance]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.UpdateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if update_mask is not None:
+ request.update_mask = update_mask
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.update_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("instance.name", request.instance.name),)
+ ),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def upgrade_instance(
+ self,
+ request: cloud_redis.UpgradeInstanceRequest = None,
+ *,
+ name: str = None,
+ redis_version: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Args:
+ request (:class:`~.cloud_redis.UpgradeInstanceRequest`):
+ The request object. Request for
+ [UpgradeInstance][google.cloud.redis.v1beta1.CloudRedis.UpgradeInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ redis_version (:class:`str`):
+ Required. Specifies the target
+ version of Redis software to upgrade to.
+ This corresponds to the ``redis_version`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, redis_version]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.UpgradeInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if redis_version is not None:
+ request.redis_version = redis_version
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.upgrade_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def import_instance(
+ self,
+ request: cloud_redis.ImportInstanceRequest = None,
+ *,
+ name: str = None,
+ input_config: cloud_redis.InputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ImportInstanceRequest`):
+ The request object. Request for
+ [Import][google.cloud.redis.v1beta1.CloudRedis.ImportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (:class:`~.cloud_redis.InputConfig`):
+ Required. Specify data to be
+ imported.
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, input_config]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.ImportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if input_config is not None:
+ request.input_config = input_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.import_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def export_instance(
+ self,
+ request: cloud_redis.ExportInstanceRequest = None,
+ *,
+ name: str = None,
+ output_config: cloud_redis.OutputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ExportInstanceRequest`):
+ The request object. Request for
+ [Export][google.cloud.redis.v1beta1.CloudRedis.ExportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ output_config (:class:`~.cloud_redis.OutputConfig`):
+ Required. Specify data to be
+ exported.
+ This corresponds to the ``output_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, output_config]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.ExportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if output_config is not None:
+ request.output_config = output_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.export_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def failover_instance(
+ self,
+ request: cloud_redis.FailoverInstanceRequest = None,
+ *,
+ name: str = None,
+ data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.FailoverInstanceRequest`):
+ The request object. Request for
+ [Failover][google.cloud.redis.v1beta1.CloudRedis.FailoverInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ data_protection_mode (:class:`~.cloud_redis.FailoverInstanceRequest.DataProtectionMode`):
+ Optional. Available data protection modes that the user
+ can choose. If it's unspecified, data protection mode
+ will be LIMITED_DATA_LOSS by default.
+ This corresponds to the ``data_protection_mode`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name, data_protection_mode]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.FailoverInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if data_protection_mode is not None:
+ request.data_protection_mode = data_protection_mode
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.failover_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def delete_instance(
+ self,
+ request: cloud_redis.DeleteInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Args:
+ request (:class:`~.cloud_redis.DeleteInstanceRequest`):
+ The request object. Request for
+ [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.DeleteInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.empty.Empty``: A generic empty message that
+ you can re-use to avoid defining duplicated empty
+ messages in your APIs. A typical example is to use it as
+ the request or the response type of an API method. For
+ instance:
+
+ ::
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+
+ The JSON representation for ``Empty`` is empty JSON
+ object ``{}``.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and builtins.any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = cloud_redis.DeleteInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_instance,
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ empty.Empty,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-redis",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("CloudRedisAsyncClient",)
diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/client.py b/google/cloud/redis_v1beta1/services/cloud_redis/client.py
new file mode 100644
index 0000000..e9ef707
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/client.py
@@ -0,0 +1,1186 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import builtins
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api_core import operation
+from google.api_core import operation_async
+from google.cloud.redis_v1beta1.services.cloud_redis import pagers
+from google.cloud.redis_v1beta1.types import cloud_redis
+from google.protobuf import any_pb2 as any # type: ignore
+from google.protobuf import empty_pb2 as empty # type: ignore
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import CloudRedisGrpcTransport
+from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport
+
+
+class CloudRedisClientMeta(type):
+ """Metaclass for the CloudRedis client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]]
+ _transport_registry["grpc"] = CloudRedisGrpcTransport
+ _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport
+
+ def get_transport_class(cls, label: str = None,) -> Type[CloudRedisTransport]:
+ """Return an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class CloudRedisClient(metaclass=CloudRedisClientMeta):
+ """Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1beta1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be refering to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "redis.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ {@api.name}: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @staticmethod
+ def instance_path(project: str, location: str, instance: str,) -> str:
+ """Return a fully-qualified instance string."""
+ return "projects/{project}/locations/{location}/instances/{instance}".format(
+ project=project, location=location, instance=instance,
+ )
+
+ @staticmethod
+ def parse_instance_path(path: str) -> Dict[str, str]:
+ """Parse a instance path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, CloudRedisTransport] = None,
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the cloud redis client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.CloudRedisTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = ClientOptions.from_dict(client_options)
+ if client_options is None:
+ client_options = ClientOptions.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, CloudRedisTransport):
+ # transport is a CloudRedisTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ ssl_channel_credentials=ssl_credentials,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ )
+
+ def list_instances(
+ self,
+ request: cloud_redis.ListInstancesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListInstancesPager:
+ r"""Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Args:
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The request object. Request for
+ [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListInstancesPager:
+ Response for
+ [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.ListInstancesRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.ListInstancesRequest):
+ request = cloud_redis.ListInstancesRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_instances]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListInstancesPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_instance(
+ self,
+ request: cloud_redis.GetInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> cloud_redis.Instance:
+ r"""Gets the details of a specific Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.GetInstanceRequest`):
+ The request object. Request for
+ [GetInstance][google.cloud.redis.v1beta1.CloudRedis.GetInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.cloud_redis.Instance:
+ A Google Cloud Redis instance.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.GetInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.GetInstanceRequest):
+ request = cloud_redis.GetInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def create_instance(
+ self,
+ request: cloud_redis.CreateInstanceRequest = None,
+ *,
+ parent: str = None,
+ instance_id: str = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.CreateInstanceRequest`):
+ The request object. Request for
+ [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.CreateInstance].
+ parent (:class:`str`):
+ Required. The resource name of the instance location
+ using the form:
+ ``projects/{project_id}/locations/{location_id}`` where
+ ``location_id`` refers to a GCP region.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance_id (:class:`str`):
+ Required. The logical name of the Redis instance in the
+ customer project with the following restrictions:
+
+ - Must contain only lowercase letters, numbers, and
+ hyphens.
+ - Must start with a letter.
+ - Must be between 1-40 characters.
+ - Must end with a number or a letter.
+ - Must be unique within the customer project / location
+ This corresponds to the ``instance_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. A Redis [Instance] resource
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([parent, instance_id, instance])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.CreateInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.CreateInstanceRequest):
+ request = cloud_redis.CreateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if instance_id is not None:
+ request.instance_id = instance_id
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.create_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ def update_instance(
+ self,
+ request: cloud_redis.UpdateInstanceRequest = None,
+ *,
+ update_mask: field_mask.FieldMask = None,
+ instance: cloud_redis.Instance = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.UpdateInstanceRequest`):
+ The request object. Request for
+ [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.UpdateInstance].
+ update_mask (:class:`~.field_mask.FieldMask`):
+ Required. Mask of fields to update. At least one path
+ must be supplied in this field. The elements of the
+ repeated paths field may only include these fields from
+ [Instance][google.cloud.redis.v1beta1.Instance]:
+
+ - ``displayName``
+ - ``labels``
+ - ``memorySizeGb``
+ - ``redisConfig``
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ instance (:class:`~.cloud_redis.Instance`):
+ Required. Update description. Only fields specified in
+ update_mask are updated.
+ This corresponds to the ``instance`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([update_mask, instance])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.UpdateInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.UpdateInstanceRequest):
+ request = cloud_redis.UpdateInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if update_mask is not None:
+ request.update_mask = update_mask
+ if instance is not None:
+ request.instance = instance
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.update_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata(
+ (("instance.name", request.instance.name),)
+ ),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ def upgrade_instance(
+ self,
+ request: cloud_redis.UpgradeInstanceRequest = None,
+ *,
+ name: str = None,
+ redis_version: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Args:
+ request (:class:`~.cloud_redis.UpgradeInstanceRequest`):
+ The request object. Request for
+ [UpgradeInstance][google.cloud.redis.v1beta1.CloudRedis.UpgradeInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ redis_version (:class:`str`):
+ Required. Specifies the target
+ version of Redis software to upgrade to.
+ This corresponds to the ``redis_version`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, redis_version])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.UpgradeInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.UpgradeInstanceRequest):
+ request = cloud_redis.UpgradeInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if redis_version is not None:
+ request.redis_version = redis_version
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.upgrade_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ def import_instance(
+ self,
+ request: cloud_redis.ImportInstanceRequest = None,
+ *,
+ name: str = None,
+ input_config: cloud_redis.InputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ImportInstanceRequest`):
+ The request object. Request for
+ [Import][google.cloud.redis.v1beta1.CloudRedis.ImportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (:class:`~.cloud_redis.InputConfig`):
+ Required. Specify data to be
+ imported.
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, input_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.ImportInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.ImportInstanceRequest):
+ request = cloud_redis.ImportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if input_config is not None:
+ request.input_config = input_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.import_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ def export_instance(
+ self,
+ request: cloud_redis.ExportInstanceRequest = None,
+ *,
+ name: str = None,
+ output_config: cloud_redis.OutputConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Args:
+ request (:class:`~.cloud_redis.ExportInstanceRequest`):
+ The request object. Request for
+ [Export][google.cloud.redis.v1beta1.CloudRedis.ExportInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ output_config (:class:`~.cloud_redis.OutputConfig`):
+ Required. Specify data to be
+ exported.
+ This corresponds to the ``output_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, output_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.ExportInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.ExportInstanceRequest):
+ request = cloud_redis.ExportInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if output_config is not None:
+ request.output_config = output_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.export_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ def failover_instance(
+ self,
+ request: cloud_redis.FailoverInstanceRequest = None,
+ *,
+ name: str = None,
+ data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Args:
+ request (:class:`~.cloud_redis.FailoverInstanceRequest`):
+ The request object. Request for
+ [Failover][google.cloud.redis.v1beta1.CloudRedis.FailoverInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ data_protection_mode (:class:`~.cloud_redis.FailoverInstanceRequest.DataProtectionMode`):
+ Optional. Available data protection modes that the user
+ can choose. If it's unspecified, data protection mode
+ will be LIMITED_DATA_LOSS by default.
+ This corresponds to the ``data_protection_mode`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.cloud_redis.Instance``: A Google Cloud Redis
+ instance.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name, data_protection_mode])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.FailoverInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.FailoverInstanceRequest):
+ request = cloud_redis.FailoverInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if data_protection_mode is not None:
+ request.data_protection_mode = data_protection_mode
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.failover_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ cloud_redis.Instance,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+ def delete_instance(
+ self,
+ request: cloud_redis.DeleteInstanceRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation.Operation:
+ r"""Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Args:
+ request (:class:`~.cloud_redis.DeleteInstanceRequest`):
+ The request object. Request for
+ [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.DeleteInstance].
+ name (:class:`str`):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operation.Operation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be
+ :class:``~.empty.Empty``: A generic empty message that
+ you can re-use to avoid defining duplicated empty
+ messages in your APIs. A typical example is to use it as
+ the request or the response type of an API method. For
+ instance:
+
+ ::
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+
+ The JSON representation for ``Empty`` is empty JSON
+ object ``{}``.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = builtins.any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a cloud_redis.DeleteInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, cloud_redis.DeleteInstanceRequest):
+ request = cloud_redis.DeleteInstanceRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_instance]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation.from_gapic(
+ response,
+ self._transport.operations_client,
+ empty.Empty,
+ metadata_type=any.Any,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-redis",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("CloudRedisClient",)
diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/pagers.py b/google/cloud/redis_v1beta1/services/cloud_redis/pagers.py
new file mode 100644
index 0000000..16ca1d0
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/pagers.py
@@ -0,0 +1,148 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+
+from google.cloud.redis_v1beta1.types import cloud_redis
+
+
+class ListInstancesPager:
+ """A pager for iterating through ``list_instances`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.cloud_redis.ListInstancesResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``instances`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListInstances`` requests and continue to iterate
+ through the ``instances`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.cloud_redis.ListInstancesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., cloud_redis.ListInstancesResponse],
+ request: cloud_redis.ListInstancesRequest,
+ response: cloud_redis.ListInstancesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The initial request object.
+ response (:class:`~.cloud_redis.ListInstancesResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = cloud_redis.ListInstancesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[cloud_redis.ListInstancesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[cloud_redis.Instance]:
+ for page in self.pages:
+ yield from page.instances
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListInstancesAsyncPager:
+ """A pager for iterating through ``list_instances`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.cloud_redis.ListInstancesResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``instances`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListInstances`` requests and continue to iterate
+ through the ``instances`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.cloud_redis.ListInstancesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]],
+ request: cloud_redis.ListInstancesRequest,
+ response: cloud_redis.ListInstancesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.cloud_redis.ListInstancesRequest`):
+ The initial request object.
+ response (:class:`~.cloud_redis.ListInstancesResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = cloud_redis.ListInstancesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[cloud_redis.ListInstancesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[cloud_redis.Instance]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.instances:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py b/google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py
new file mode 100644
index 0000000..3b0088e
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/transports/__init__.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import CloudRedisTransport
+from .grpc import CloudRedisGrpcTransport
+from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]]
+_transport_registry["grpc"] = CloudRedisGrpcTransport
+_transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport
+
+
+__all__ = (
+ "CloudRedisTransport",
+ "CloudRedisGrpcTransport",
+ "CloudRedisGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/transports/base.py b/google/cloud/redis_v1beta1/services/cloud_redis/transports/base.py
new file mode 100644
index 0000000..71337ab
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/transports/base.py
@@ -0,0 +1,228 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import abc
+import typing
+import pkg_resources
+
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google.auth import credentials # type: ignore
+
+from google.cloud.redis_v1beta1.types import cloud_redis
+from google.longrunning import operations_pb2 as operations # type: ignore
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-redis",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class CloudRedisTransport(abc.ABC):
+ """Abstract transport class for CloudRedis."""
+
+ AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
+
+ def __init__(
+ self,
+ *,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ # Lifted into its own function so it can be stubbed out during tests.
+ self._prep_wrapped_messages(client_info)
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_instances: gapic_v1.method.wrap_method(
+ self.list_instances, default_timeout=600.0, client_info=client_info,
+ ),
+ self.get_instance: gapic_v1.method.wrap_method(
+ self.get_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.create_instance: gapic_v1.method.wrap_method(
+ self.create_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.update_instance: gapic_v1.method.wrap_method(
+ self.update_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.upgrade_instance: gapic_v1.method.wrap_method(
+ self.upgrade_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.import_instance: gapic_v1.method.wrap_method(
+ self.import_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.export_instance: gapic_v1.method.wrap_method(
+ self.export_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.failover_instance: gapic_v1.method.wrap_method(
+ self.failover_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ self.delete_instance: gapic_v1.method.wrap_method(
+ self.delete_instance, default_timeout=600.0, client_info=client_info,
+ ),
+ }
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsClient:
+ """Return the client designed to process long-running operations."""
+ raise NotImplementedError()
+
+ @property
+ def list_instances(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.ListInstancesRequest],
+ typing.Union[
+ cloud_redis.ListInstancesResponse,
+ typing.Awaitable[cloud_redis.ListInstancesResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.GetInstanceRequest],
+ typing.Union[cloud_redis.Instance, typing.Awaitable[cloud_redis.Instance]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def create_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.CreateInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def update_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.UpdateInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def upgrade_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.UpgradeInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def import_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.ImportInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def export_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.ExportInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def failover_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.FailoverInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_instance(
+ self,
+ ) -> typing.Callable[
+ [cloud_redis.DeleteInstanceRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("CloudRedisTransport",)
diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py
new file mode 100644
index 0000000..b450f6e
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc.py
@@ -0,0 +1,550 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import grpc_helpers # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+
+from google.cloud.redis_v1beta1.types import cloud_redis
+from google.longrunning import operations_pb2 as operations # type: ignore
+
+from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+
+
+class CloudRedisGrpcTransport(CloudRedisTransport):
+ """gRPC backend transport for CloudRedis.
+
+ Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1beta1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be refering to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _stubs: Dict[str, Callable]
+
+ def __init__(
+ self,
+ *,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ channel (Optional[grpc.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ self._stubs = {} # type: Dict[str, Callable]
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> grpc.Channel:
+ """Create and return a gRPC channel object.
+ Args:
+ address (Optionsl[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ @property
+ def grpc_channel(self) -> grpc.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if "operations_client" not in self.__dict__:
+ self.__dict__["operations_client"] = operations_v1.OperationsClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self.__dict__["operations_client"]
+
+ @property
+ def list_instances(
+ self,
+ ) -> Callable[
+ [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse
+ ]:
+ r"""Return a callable for the list instances method over gRPC.
+
+ Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Returns:
+ Callable[[~.ListInstancesRequest],
+ ~.ListInstancesResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_instances" not in self._stubs:
+ self._stubs["list_instances"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/ListInstances",
+ request_serializer=cloud_redis.ListInstancesRequest.serialize,
+ response_deserializer=cloud_redis.ListInstancesResponse.deserialize,
+ )
+ return self._stubs["list_instances"]
+
+ @property
+ def get_instance(
+ self,
+ ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]:
+ r"""Return a callable for the get instance method over gRPC.
+
+ Gets the details of a specific Redis instance.
+
+ Returns:
+ Callable[[~.GetInstanceRequest],
+ ~.Instance]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_instance" not in self._stubs:
+ self._stubs["get_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/GetInstance",
+ request_serializer=cloud_redis.GetInstanceRequest.serialize,
+ response_deserializer=cloud_redis.Instance.deserialize,
+ )
+ return self._stubs["get_instance"]
+
+ @property
+ def create_instance(
+ self,
+ ) -> Callable[[cloud_redis.CreateInstanceRequest], operations.Operation]:
+ r"""Return a callable for the create instance method over gRPC.
+
+ Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.CreateInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_instance" not in self._stubs:
+ self._stubs["create_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/CreateInstance",
+ request_serializer=cloud_redis.CreateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["create_instance"]
+
+ @property
+ def update_instance(
+ self,
+ ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations.Operation]:
+ r"""Return a callable for the update instance method over gRPC.
+
+ Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.UpdateInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_instance" not in self._stubs:
+ self._stubs["update_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/UpdateInstance",
+ request_serializer=cloud_redis.UpdateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["update_instance"]
+
+ @property
+ def upgrade_instance(
+ self,
+ ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations.Operation]:
+ r"""Return a callable for the upgrade instance method over gRPC.
+
+ Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Returns:
+ Callable[[~.UpgradeInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "upgrade_instance" not in self._stubs:
+ self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/UpgradeInstance",
+ request_serializer=cloud_redis.UpgradeInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["upgrade_instance"]
+
+ @property
+ def import_instance(
+ self,
+ ) -> Callable[[cloud_redis.ImportInstanceRequest], operations.Operation]:
+ r"""Return a callable for the import instance method over gRPC.
+
+ Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ImportInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_instance" not in self._stubs:
+ self._stubs["import_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/ImportInstance",
+ request_serializer=cloud_redis.ImportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_instance"]
+
+ @property
+ def export_instance(
+ self,
+ ) -> Callable[[cloud_redis.ExportInstanceRequest], operations.Operation]:
+ r"""Return a callable for the export instance method over gRPC.
+
+ Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ExportInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "export_instance" not in self._stubs:
+ self._stubs["export_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/ExportInstance",
+ request_serializer=cloud_redis.ExportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["export_instance"]
+
+ @property
+ def failover_instance(
+ self,
+ ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations.Operation]:
+ r"""Return a callable for the failover instance method over gRPC.
+
+ Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Returns:
+ Callable[[~.FailoverInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "failover_instance" not in self._stubs:
+ self._stubs["failover_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/FailoverInstance",
+ request_serializer=cloud_redis.FailoverInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["failover_instance"]
+
+ @property
+ def delete_instance(
+ self,
+ ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations.Operation]:
+ r"""Return a callable for the delete instance method over gRPC.
+
+ Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Returns:
+ Callable[[~.DeleteInstanceRequest],
+ ~.Operation]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_instance" not in self._stubs:
+ self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/DeleteInstance",
+ request_serializer=cloud_redis.DeleteInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["delete_instance"]
+
+
+__all__ = ("CloudRedisGrpcTransport",)
diff --git a/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py
new file mode 100644
index 0000000..239d7d7
--- /dev/null
+++ b/google/cloud/redis_v1beta1/services/cloud_redis/transports/grpc_asyncio.py
@@ -0,0 +1,554 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.redis_v1beta1.types import cloud_redis
+from google.longrunning import operations_pb2 as operations # type: ignore
+
+from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO
+from .grpc import CloudRedisGrpcTransport
+
+
+class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport):
+ """gRPC AsyncIO backend transport for CloudRedis.
+
+ Configures and manages Cloud Memorystore for Redis instances
+
+ Google Cloud Memorystore for Redis v1beta1
+
+ The ``redis.googleapis.com`` service implements the Google Cloud
+ Memorystore for Redis API and defines the following resource model
+ for managing Redis instances:
+
+ - The service works with a collection of cloud projects, named:
+ ``/projects/*``
+ - Each project has a collection of available locations, named:
+ ``/locations/*``
+ - Each location has a collection of Redis instances, named:
+ ``/instances/*``
+ - As such, Redis instances are resources of the form:
+ ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note that location_id must be refering to a GCP ``region``; for
+ example:
+
+ - ``projects/redpepper-1290/locations/us-central1/instances/my-redis``
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ address (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "redis.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ self._stubs = {}
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsAsyncClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if "operations_client" not in self.__dict__:
+ self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self.__dict__["operations_client"]
+
+ @property
+ def list_instances(
+ self,
+ ) -> Callable[
+ [cloud_redis.ListInstancesRequest], Awaitable[cloud_redis.ListInstancesResponse]
+ ]:
+ r"""Return a callable for the list instances method over gRPC.
+
+ Lists all Redis instances owned by a project in either the
+ specified location (region) or all locations.
+
+ The location should have the following format:
+
+ - ``projects/{project_id}/locations/{location_id}``
+
+ If ``location_id`` is specified as ``-`` (wildcard), then all
+ regions available to the project are queried, and the results
+ are aggregated.
+
+ Returns:
+ Callable[[~.ListInstancesRequest],
+ Awaitable[~.ListInstancesResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_instances" not in self._stubs:
+ self._stubs["list_instances"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/ListInstances",
+ request_serializer=cloud_redis.ListInstancesRequest.serialize,
+ response_deserializer=cloud_redis.ListInstancesResponse.deserialize,
+ )
+ return self._stubs["list_instances"]
+
+ @property
+ def get_instance(
+ self,
+ ) -> Callable[[cloud_redis.GetInstanceRequest], Awaitable[cloud_redis.Instance]]:
+ r"""Return a callable for the get instance method over gRPC.
+
+ Gets the details of a specific Redis instance.
+
+ Returns:
+ Callable[[~.GetInstanceRequest],
+ Awaitable[~.Instance]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_instance" not in self._stubs:
+ self._stubs["get_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/GetInstance",
+ request_serializer=cloud_redis.GetInstanceRequest.serialize,
+ response_deserializer=cloud_redis.Instance.deserialize,
+ )
+ return self._stubs["get_instance"]
+
+ @property
+ def create_instance(
+ self,
+ ) -> Callable[[cloud_redis.CreateInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the create instance method over gRPC.
+
+ Creates a Redis instance based on the specified tier and memory
+ size.
+
+ By default, the instance is accessible from the project's
+ `default network `__.
+
+ The creation is executed asynchronously and callers may check
+ the returned operation to track its progress. Once the operation
+ is completed the Redis instance will be fully functional.
+ Completed longrunning.Operation will contain the new instance
+ object in the response field.
+
+ The returned operation is automatically deleted after a few
+ hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.CreateInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_instance" not in self._stubs:
+ self._stubs["create_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/CreateInstance",
+ request_serializer=cloud_redis.CreateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["create_instance"]
+
+ @property
+ def update_instance(
+ self,
+ ) -> Callable[[cloud_redis.UpdateInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the update instance method over gRPC.
+
+ Updates the metadata and configuration of a specific
+ Redis instance.
+ Completed longrunning.Operation will contain the new
+ instance object in the response field. The returned
+ operation is automatically deleted after a few hours, so
+ there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.UpdateInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_instance" not in self._stubs:
+ self._stubs["update_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/UpdateInstance",
+ request_serializer=cloud_redis.UpdateInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["update_instance"]
+
+ @property
+ def upgrade_instance(
+ self,
+ ) -> Callable[
+ [cloud_redis.UpgradeInstanceRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the upgrade instance method over gRPC.
+
+ Upgrades Redis instance to the newer Redis version
+ specified in the request.
+
+ Returns:
+ Callable[[~.UpgradeInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "upgrade_instance" not in self._stubs:
+ self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/UpgradeInstance",
+ request_serializer=cloud_redis.UpgradeInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["upgrade_instance"]
+
+ @property
+ def import_instance(
+ self,
+ ) -> Callable[[cloud_redis.ImportInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the import instance method over gRPC.
+
+ Import a Redis RDB snapshot file from Cloud Storage
+ into a Redis instance.
+ Redis may stop serving during this operation. Instance
+ state will be IMPORTING for entire operation. When
+ complete, the instance will contain only data from the
+ imported file.
+
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ImportInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_instance" not in self._stubs:
+ self._stubs["import_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/ImportInstance",
+ request_serializer=cloud_redis.ImportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_instance"]
+
+ @property
+ def export_instance(
+ self,
+ ) -> Callable[[cloud_redis.ExportInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the export instance method over gRPC.
+
+ Export Redis instance data into a Redis RDB format
+ file in Cloud Storage.
+ Redis will continue serving during this operation.
+ The returned operation is automatically deleted after a
+ few hours, so there is no need to call DeleteOperation.
+
+ Returns:
+ Callable[[~.ExportInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "export_instance" not in self._stubs:
+ self._stubs["export_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/ExportInstance",
+ request_serializer=cloud_redis.ExportInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["export_instance"]
+
+ @property
+ def failover_instance(
+ self,
+ ) -> Callable[
+ [cloud_redis.FailoverInstanceRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the failover instance method over gRPC.
+
+ Initiates a failover of the master node to current
+ replica node for a specific STANDARD tier Cloud
+ Memorystore for Redis instance.
+
+ Returns:
+ Callable[[~.FailoverInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "failover_instance" not in self._stubs:
+ self._stubs["failover_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/FailoverInstance",
+ request_serializer=cloud_redis.FailoverInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["failover_instance"]
+
+ @property
+ def delete_instance(
+ self,
+ ) -> Callable[[cloud_redis.DeleteInstanceRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the delete instance method over gRPC.
+
+ Deletes a specific Redis instance. Instance stops
+ serving and data is deleted.
+
+ Returns:
+ Callable[[~.DeleteInstanceRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_instance" not in self._stubs:
+ self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
+ "/google.cloud.redis.v1beta1.CloudRedis/DeleteInstance",
+ request_serializer=cloud_redis.DeleteInstanceRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["delete_instance"]
+
+
+__all__ = ("CloudRedisGrpcAsyncIOTransport",)
diff --git a/google/cloud/redis_v1beta1/types.py b/google/cloud/redis_v1beta1/types.py
deleted file mode 100644
index c7de16b..0000000
--- a/google/cloud/redis_v1beta1/types.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from __future__ import absolute_import
-import sys
-
-from google.api_core.protobuf_helpers import get_messages
-
-from google.cloud.redis_v1beta1.proto import cloud_redis_pb2
-from google.longrunning import operations_pb2
-from google.protobuf import any_pb2
-from google.protobuf import field_mask_pb2
-from google.protobuf import timestamp_pb2
-from google.rpc import status_pb2
-
-
-_shared_modules = [operations_pb2, any_pb2, field_mask_pb2, timestamp_pb2, status_pb2]
-
-_local_modules = [cloud_redis_pb2]
-
-names = []
-
-for module in _shared_modules: # pragma: NO COVER
- for name, message in get_messages(module).items():
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-for module in _local_modules:
- for name, message in get_messages(module).items():
- message.__module__ = "google.cloud.redis_v1beta1.types"
- setattr(sys.modules[__name__], name, message)
- names.append(name)
-
-
-__all__ = tuple(sorted(names))
diff --git a/google/cloud/redis_v1beta1/types/__init__.py b/google/cloud/redis_v1beta1/types/__init__.py
new file mode 100644
index 0000000..3d4931d
--- /dev/null
+++ b/google/cloud/redis_v1beta1/types/__init__.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .cloud_redis import (
+ Instance,
+ ListInstancesRequest,
+ ListInstancesResponse,
+ GetInstanceRequest,
+ CreateInstanceRequest,
+ UpdateInstanceRequest,
+ UpgradeInstanceRequest,
+ DeleteInstanceRequest,
+ GcsSource,
+ InputConfig,
+ ImportInstanceRequest,
+ GcsDestination,
+ OutputConfig,
+ ExportInstanceRequest,
+ FailoverInstanceRequest,
+ LocationMetadata,
+ ZoneMetadata,
+)
+
+
+__all__ = (
+ "Instance",
+ "ListInstancesRequest",
+ "ListInstancesResponse",
+ "GetInstanceRequest",
+ "CreateInstanceRequest",
+ "UpdateInstanceRequest",
+ "UpgradeInstanceRequest",
+ "DeleteInstanceRequest",
+ "GcsSource",
+ "InputConfig",
+ "ImportInstanceRequest",
+ "GcsDestination",
+ "OutputConfig",
+ "ExportInstanceRequest",
+ "FailoverInstanceRequest",
+ "LocationMetadata",
+ "ZoneMetadata",
+)
diff --git a/google/cloud/redis_v1beta1/types/cloud_redis.py b/google/cloud/redis_v1beta1/types/cloud_redis.py
new file mode 100644
index 0000000..80ec425
--- /dev/null
+++ b/google/cloud/redis_v1beta1/types/cloud_redis.py
@@ -0,0 +1,548 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.cloud.redis.v1beta1",
+ manifest={
+ "Instance",
+ "ListInstancesRequest",
+ "ListInstancesResponse",
+ "GetInstanceRequest",
+ "CreateInstanceRequest",
+ "UpdateInstanceRequest",
+ "UpgradeInstanceRequest",
+ "DeleteInstanceRequest",
+ "GcsSource",
+ "InputConfig",
+ "ImportInstanceRequest",
+ "GcsDestination",
+ "OutputConfig",
+ "ExportInstanceRequest",
+ "FailoverInstanceRequest",
+ "LocationMetadata",
+ "ZoneMetadata",
+ },
+)
+
+
+class Instance(proto.Message):
+ r"""A Google Cloud Redis instance.
+
+ Attributes:
+ name (str):
+ Required. Unique name of the resource in this scope
+ including project and location using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+
+ Note: Redis instances are managed and addressed at regional
+ level so location_id here refers to a GCP region; however,
+ users may choose which specific zone (or collection of zones
+ for cross-zone instances) an instance should be provisioned
+ in. Refer to
+ [location_id][google.cloud.redis.v1beta1.Instance.location_id]
+ and
+ [alternative_location_id][google.cloud.redis.v1beta1.Instance.alternative_location_id]
+ fields for more details.
+ display_name (str):
+ An arbitrary and optional user-provided name
+ for the instance.
+ labels (Sequence[~.cloud_redis.Instance.LabelsEntry]):
+ Resource labels to represent user provided
+ metadata
+ location_id (str):
+ Optional. The zone where the instance will be provisioned.
+ If not provided, the service will choose a zone for the
+ instance. For STANDARD_HA tier, instances will be created
+ across two zones for protection against zonal failures. If
+ [alternative_location_id][google.cloud.redis.v1beta1.Instance.alternative_location_id]
+ is also provided, it must be different from
+ [location_id][google.cloud.redis.v1beta1.Instance.location_id].
+ alternative_location_id (str):
+ Optional. Only applicable to STANDARD_HA tier which protects
+ the instance against zonal failures by provisioning it
+ across two zones. If provided, it must be a different zone
+ from the one provided in
+ [location_id][google.cloud.redis.v1beta1.Instance.location_id].
+ redis_version (str):
+ Optional. The version of Redis software. If not provided,
+ latest supported version will be used. Currently, the
+ supported values are:
+
+ - ``REDIS_3_2`` for Redis 3.2 compatibility
+ - ``REDIS_4_0`` for Redis 4.0 compatibility (default)
+ - ``REDIS_5_0`` for Redis 5.0 compatibility
+ reserved_ip_range (str):
+ Optional. The CIDR range of internal
+ addresses that are reserved for this instance.
+ If not provided, the service will choose an
+ unused /29 block, for example, 10.0.0.0/29 or
+ 192.168.0.0/29. Ranges must be unique and non-
+ overlapping with existing subnets in an
+ authorized network.
+ host (str):
+ Output only. Hostname or IP address of the
+ exposed Redis endpoint used by clients to
+ connect to the service.
+ port (int):
+ Output only. The port number of the exposed
+ Redis endpoint.
+ current_location_id (str):
+ Output only. The current zone where the Redis endpoint is
+ placed. For Basic Tier instances, this will always be the
+ same as the
+ [location_id][google.cloud.redis.v1beta1.Instance.location_id]
+ provided by the user at creation time. For Standard Tier
+ instances, this can be either
+ [location_id][google.cloud.redis.v1beta1.Instance.location_id]
+ or
+ [alternative_location_id][google.cloud.redis.v1beta1.Instance.alternative_location_id]
+ and can change after a failover event.
+ create_time (~.timestamp.Timestamp):
+ Output only. The time the instance was
+ created.
+ state (~.cloud_redis.Instance.State):
+ Output only. The current state of this
+ instance.
+ status_message (str):
+ Output only. Additional information about the
+ current status of this instance, if available.
+ redis_configs (Sequence[~.cloud_redis.Instance.RedisConfigsEntry]):
+ Optional. Redis configuration parameters, according to
+ http://redis.io/topics/config. Currently, the only supported
+ parameters are:
+
+ Redis version 3.2 and newer:
+
+ - maxmemory-policy
+ - notify-keyspace-events
+
+ Redis version 4.0 and newer:
+
+ - activedefrag
+ - lfu-decay-time
+ - lfu-log-factor
+ - maxmemory-gb
+
+ Redis version 5.0 and newer:
+
+ - stream-node-max-bytes
+ - stream-node-max-entries
+ tier (~.cloud_redis.Instance.Tier):
+ Required. The service tier of the instance.
+ memory_size_gb (int):
+ Required. Redis memory size in GiB.
+ authorized_network (str):
+ Optional. The full name of the Google Compute Engine
+ `network `__ to which
+ the instance is connected. If left unspecified, the
+ ``default`` network will be used.
+ persistence_iam_identity (str):
+ Output only. Cloud IAM identity used by import / export
+ operations to transfer data to/from Cloud Storage. Format is
+ "serviceAccount:". The value may
+ change over time for a given instance so should be checked
+ before each import/export operation.
+ connect_mode (~.cloud_redis.Instance.ConnectMode):
+ Optional. The connect mode of Redis instance. If not
+ provided, default one will be used. Current default:
+ DIRECT_PEERING.
+ """
+
+ class State(proto.Enum):
+ r"""Represents the different states of a Redis instance."""
+ STATE_UNSPECIFIED = 0
+ CREATING = 1
+ READY = 2
+ UPDATING = 3
+ DELETING = 4
+ REPAIRING = 5
+ MAINTENANCE = 6
+ IMPORTING = 8
+ FAILING_OVER = 10
+
+ class Tier(proto.Enum):
+ r"""Available service tiers to choose from"""
+ TIER_UNSPECIFIED = 0
+ BASIC = 1
+ STANDARD_HA = 3
+
+ class ConnectMode(proto.Enum):
+ r"""Available connection modes."""
+ CONNECT_MODE_UNSPECIFIED = 0
+ DIRECT_PEERING = 1
+ PRIVATE_SERVICE_ACCESS = 2
+
+ name = proto.Field(proto.STRING, number=1)
+
+ display_name = proto.Field(proto.STRING, number=2)
+
+ labels = proto.MapField(proto.STRING, proto.STRING, number=3)
+
+ location_id = proto.Field(proto.STRING, number=4)
+
+ alternative_location_id = proto.Field(proto.STRING, number=5)
+
+ redis_version = proto.Field(proto.STRING, number=7)
+
+ reserved_ip_range = proto.Field(proto.STRING, number=9)
+
+ host = proto.Field(proto.STRING, number=10)
+
+ port = proto.Field(proto.INT32, number=11)
+
+ current_location_id = proto.Field(proto.STRING, number=12)
+
+ create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,)
+
+ state = proto.Field(proto.ENUM, number=14, enum=State,)
+
+ status_message = proto.Field(proto.STRING, number=15)
+
+ redis_configs = proto.MapField(proto.STRING, proto.STRING, number=16)
+
+ tier = proto.Field(proto.ENUM, number=17, enum=Tier,)
+
+ memory_size_gb = proto.Field(proto.INT32, number=18)
+
+ authorized_network = proto.Field(proto.STRING, number=20)
+
+ persistence_iam_identity = proto.Field(proto.STRING, number=21)
+
+ connect_mode = proto.Field(proto.ENUM, number=22, enum=ConnectMode,)
+
+
+class ListInstancesRequest(proto.Message):
+ r"""Request for
+ [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+
+ Attributes:
+ parent (str):
+ Required. The resource name of the instance location using
+ the form: ``projects/{project_id}/locations/{location_id}``
+ where ``location_id`` refers to a GCP region.
+ page_size (int):
+ The maximum number of items to return.
+
+ If not specified, a default value of 1000 will be used by
+ the service. Regardless of the page_size value, the response
+ may include a partial list and a caller should only rely on
+ response's
+ [``next_page_token``][google.cloud.redis.v1beta1.ListInstancesResponse.next_page_token]
+ to determine if there are more instances left to be queried.
+ page_token (str):
+ The ``next_page_token`` value returned from a previous
+ [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances]
+ request, if any.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ page_size = proto.Field(proto.INT32, number=2)
+
+ page_token = proto.Field(proto.STRING, number=3)
+
+
+class ListInstancesResponse(proto.Message):
+ r"""Response for
+ [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances].
+
+ Attributes:
+ instances (Sequence[~.cloud_redis.Instance]):
+ A list of Redis instances in the project in the specified
+ location, or across all locations.
+
+ If the ``location_id`` in the parent field of the request is
+ "-", all regions available to the project are queried, and
+ the results aggregated. If in such an aggregated query a
+ location is unavailable, a dummy Redis entry is included in
+ the response with the ``name`` field set to a value of the
+ form
+ ``projects/{project_id}/locations/{location_id}/instances/``-
+ and the ``status`` field set to ERROR and ``status_message``
+ field set to "location not available for ListInstances".
+ next_page_token (str):
+ Token to retrieve the next page of results,
+ or empty if there are no more results in the
+ list.
+ unreachable (Sequence[str]):
+ Locations that could not be reached.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ instances = proto.RepeatedField(proto.MESSAGE, number=1, message=Instance,)
+
+ next_page_token = proto.Field(proto.STRING, number=2)
+
+ unreachable = proto.RepeatedField(proto.STRING, number=3)
+
+
+class GetInstanceRequest(proto.Message):
+ r"""Request for
+ [GetInstance][google.cloud.redis.v1beta1.CloudRedis.GetInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class CreateInstanceRequest(proto.Message):
+ r"""Request for
+ [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.CreateInstance].
+
+ Attributes:
+ parent (str):
+ Required. The resource name of the instance location using
+ the form: ``projects/{project_id}/locations/{location_id}``
+ where ``location_id`` refers to a GCP region.
+ instance_id (str):
+ Required. The logical name of the Redis instance in the
+ customer project with the following restrictions:
+
+ - Must contain only lowercase letters, numbers, and
+ hyphens.
+ - Must start with a letter.
+ - Must be between 1-40 characters.
+ - Must end with a number or a letter.
+ - Must be unique within the customer project / location
+ instance (~.cloud_redis.Instance):
+ Required. A Redis [Instance] resource
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ instance_id = proto.Field(proto.STRING, number=2)
+
+ instance = proto.Field(proto.MESSAGE, number=3, message=Instance,)
+
+
+class UpdateInstanceRequest(proto.Message):
+ r"""Request for
+ [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.UpdateInstance].
+
+ Attributes:
+ update_mask (~.field_mask.FieldMask):
+ Required. Mask of fields to update. At least one path must
+ be supplied in this field. The elements of the repeated
+ paths field may only include these fields from
+ [Instance][google.cloud.redis.v1beta1.Instance]:
+
+ - ``displayName``
+ - ``labels``
+ - ``memorySizeGb``
+ - ``redisConfig``
+ instance (~.cloud_redis.Instance):
+ Required. Update description. Only fields specified in
+ update_mask are updated.
+ """
+
+ update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,)
+
+ instance = proto.Field(proto.MESSAGE, number=2, message=Instance,)
+
+
+class UpgradeInstanceRequest(proto.Message):
+ r"""Request for
+ [UpgradeInstance][google.cloud.redis.v1beta1.CloudRedis.UpgradeInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ redis_version (str):
+ Required. Specifies the target version of
+ Redis software to upgrade to.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ redis_version = proto.Field(proto.STRING, number=2)
+
+
+class DeleteInstanceRequest(proto.Message):
+ r"""Request for
+ [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.DeleteInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class GcsSource(proto.Message):
+ r"""The Cloud Storage location for the input content
+
+ Attributes:
+ uri (str):
+ Required. Source data URI. (e.g.
+ 'gs://my_bucket/my_object').
+ """
+
+ uri = proto.Field(proto.STRING, number=1)
+
+
+class InputConfig(proto.Message):
+ r"""The input content
+
+ Attributes:
+ gcs_source (~.cloud_redis.GcsSource):
+ Google Cloud Storage location where input
+ content is located.
+ """
+
+ gcs_source = proto.Field(
+ proto.MESSAGE, number=1, oneof="source", message=GcsSource,
+ )
+
+
+class ImportInstanceRequest(proto.Message):
+ r"""Request for
+ [Import][google.cloud.redis.v1beta1.CloudRedis.ImportInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ input_config (~.cloud_redis.InputConfig):
+ Required. Specify data to be imported.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ input_config = proto.Field(proto.MESSAGE, number=3, message=InputConfig,)
+
+
+class GcsDestination(proto.Message):
+ r"""The Cloud Storage location for the output content
+
+ Attributes:
+ uri (str):
+ Required. Data destination URI (e.g.
+ 'gs://my_bucket/my_object'). Existing files will be
+ overwritten.
+ """
+
+ uri = proto.Field(proto.STRING, number=1)
+
+
+class OutputConfig(proto.Message):
+ r"""The output content
+
+ Attributes:
+ gcs_destination (~.cloud_redis.GcsDestination):
+ Google Cloud Storage destination for output
+ content.
+ """
+
+ gcs_destination = proto.Field(
+ proto.MESSAGE, number=1, oneof="destination", message=GcsDestination,
+ )
+
+
+class ExportInstanceRequest(proto.Message):
+ r"""Request for
+ [Export][google.cloud.redis.v1beta1.CloudRedis.ExportInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ output_config (~.cloud_redis.OutputConfig):
+ Required. Specify data to be exported.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ output_config = proto.Field(proto.MESSAGE, number=3, message=OutputConfig,)
+
+
+class FailoverInstanceRequest(proto.Message):
+ r"""Request for
+ [Failover][google.cloud.redis.v1beta1.CloudRedis.FailoverInstance].
+
+ Attributes:
+ name (str):
+ Required. Redis instance resource name using the form:
+ ``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
+ where ``location_id`` refers to a GCP region.
+ data_protection_mode (~.cloud_redis.FailoverInstanceRequest.DataProtectionMode):
+ Optional. Available data protection modes that the user can
+ choose. If it's unspecified, data protection mode will be
+ LIMITED_DATA_LOSS by default.
+ """
+
+ class DataProtectionMode(proto.Enum):
+ r"""Specifies different modes of operation in relation to the
+ data retention.
+ """
+ DATA_PROTECTION_MODE_UNSPECIFIED = 0
+ LIMITED_DATA_LOSS = 1
+ FORCE_DATA_LOSS = 2
+
+ name = proto.Field(proto.STRING, number=1)
+
+ data_protection_mode = proto.Field(proto.ENUM, number=2, enum=DataProtectionMode,)
+
+
+class LocationMetadata(proto.Message):
+ r"""This location metadata represents additional configuration options
+ for a given location where a Redis instance may be created. All
+ fields are output only. It is returned as content of the
+ ``google.cloud.location.Location.metadata`` field.
+
+ Attributes:
+ available_zones (Sequence[~.cloud_redis.LocationMetadata.AvailableZonesEntry]):
+ Output only. The set of available zones in the location. The
+ map is keyed by the lowercase ID of each zone, as defined by
+ GCE. These keys can be specified in ``location_id`` or
+ ``alternative_location_id`` fields when creating a Redis
+ instance.
+ """
+
+ available_zones = proto.MapField(
+ proto.STRING, proto.MESSAGE, number=1, message="ZoneMetadata",
+ )
+
+
+class ZoneMetadata(proto.Message):
+ r"""Defines specific information for a particular zone. Currently
+ empty and reserved for future use only.
+ """
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 0000000..4505b48
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,3 @@
+[mypy]
+python_version = 3.6
+namespace_packages = True
diff --git a/noxfile.py b/noxfile.py
index c454782..f04f906 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -23,14 +23,15 @@
import nox
-BLACK_VERSION = "black==19.3b0"
+BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-if os.path.exists("samples"):
- BLACK_PATHS.append("samples")
+DEFAULT_PYTHON_VERSION = "3.8"
+SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
"""Run linters.
@@ -38,7 +39,9 @@ def lint(session):
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
- session.run("black", "--check", *BLACK_PATHS)
+ session.run(
+ "black", "--check", *BLACK_PATHS,
+ )
session.run("flake8", "google", "tests")
@@ -53,10 +56,12 @@ def blacken(session):
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install(BLACK_VERSION)
- session.run("black", *BLACK_PATHS)
+ session.run(
+ "black", *BLACK_PATHS,
+ )
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install("docutils", "pygments")
@@ -65,6 +70,8 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
+ session.install("asyncmock", "pytest-asyncio")
+
session.install("mock", "pytest", "pytest-cov")
session.install("-e", ".")
@@ -84,17 +91,21 @@ def default(session):
)
-@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["2.7", "3.7"])
+@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
@@ -110,7 +121,9 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install("mock", "pytest", "google-cloud-testutils")
+ session.install(
+ "mock", "pytest", "google-cloud-testutils",
+ )
session.install("-e", ".")
# Run py.test against the system tests.
@@ -120,7 +133,7 @@ def system(session):
session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def cover(session):
"""Run the final coverage report.
@@ -128,24 +141,59 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=87")
+ session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "erase")
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
- session.install("sphinx<3.0.0", "alabaster", "recommonmark")
+ session.install("sphinx", "alabaster", "recommonmark")
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ # "-W", # warnings as errors
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
- "-W", # warnings as errors
"-T", # show full traceback on exception
"-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
"-b",
"html",
"-d",
diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md
new file mode 100644
index 0000000..55c97b3
--- /dev/null
+++ b/samples/AUTHORING_GUIDE.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md
\ No newline at end of file
diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md
new file mode 100644
index 0000000..34c882b
--- /dev/null
+++ b/samples/CONTRIBUTING.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md
\ No newline at end of file
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
new file mode 100755
index 0000000..21f6d2a
--- /dev/null
+++ b/scripts/decrypt-secrets.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
+ > testing/test-env.sh
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
+ > testing/service-account.json
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-client-secrets" \
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/fixup_redis_v1_keywords.py b/scripts/fixup_redis_v1_keywords.py
new file mode 100644
index 0000000..5d1dd0c
--- /dev/null
+++ b/scripts/fixup_redis_v1_keywords.py
@@ -0,0 +1,186 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import os
+import libcst as cst
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+ predicate: Callable[[Any], bool],
+ iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+ """A stable, out-of-place partition."""
+ results = ([], [])
+
+ for i in iterator:
+ results[int(predicate(i))].append(i)
+
+ # Returns trueList, falseList
+ return results[1], results[0]
+
+
+class redisCallTransformer(cst.CSTTransformer):
+ CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+ METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+ 'create_instance': ('parent', 'instance_id', 'instance', ),
+ 'delete_instance': ('name', ),
+ 'export_instance': ('name', 'output_config', ),
+ 'failover_instance': ('name', 'data_protection_mode', ),
+ 'get_instance': ('name', ),
+ 'import_instance': ('name', 'input_config', ),
+ 'list_instances': ('parent', 'page_size', 'page_token', ),
+ 'update_instance': ('update_mask', 'instance', ),
+ 'upgrade_instance': ('name', 'redis_version', ),
+
+ }
+
+ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+ try:
+ key = original.func.attr.value
+ kword_params = self.METHOD_TO_PARAMS[key]
+ except (AttributeError, KeyError):
+ # Either not a method from the API or too convoluted to be sure.
+ return updated
+
+ # If the existing code is valid, keyword args come after positional args.
+ # Therefore, all positional args must map to the first parameters.
+ args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+ if any(k.keyword.value == "request" for k in kwargs):
+ # We've already fixed this file, don't fix it again.
+ return updated
+
+ kwargs, ctrl_kwargs = partition(
+ lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ kwargs
+ )
+
+ args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+ ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+ for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+ request_arg = cst.Arg(
+ value=cst.Dict([
+ cst.DictElement(
+ cst.SimpleString("'{}'".format(name)),
+ cst.Element(value=arg.value)
+ )
+ # Note: the args + kwargs looks silly, but keep in mind that
+ # the control parameters had to be stripped out, and that
+ # those could have been passed positionally or by keyword.
+ for name, arg in zip(kword_params, args + kwargs)]),
+ keyword=cst.Name("request")
+ )
+
+ return updated.with_changes(
+ args=[request_arg] + ctrl_kwargs
+ )
+
+
+def fix_files(
+ in_dir: pathlib.Path,
+ out_dir: pathlib.Path,
+ *,
+ transformer=redisCallTransformer(),
+):
+ """Duplicate the input dir to the output dir, fixing file method calls.
+
+ Preconditions:
+ * in_dir is a real directory
+ * out_dir is a real, empty directory
+ """
+ pyfile_gen = (
+ pathlib.Path(os.path.join(root, f))
+ for root, _, files in os.walk(in_dir)
+ for f in files if os.path.splitext(f)[1] == ".py"
+ )
+
+ for fpath in pyfile_gen:
+ with open(fpath, 'r') as f:
+ src = f.read()
+
+ # Parse the code and insert method call fixes.
+ tree = cst.parse_module(src)
+ updated = tree.visit(transformer)
+
+ # Create the path and directory structure for the new file.
+ updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+ updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Generate the updated source file at the corresponding path.
+ with open(updated_path, 'w') as f:
+ f.write(updated.code)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="""Fix up source that uses the redis client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+ parameters in client method calls to keyword based parameters.
+ Cases where it WILL FAIL include
+ A) * or ** expansion in a method call.
+ B) Calls via function or method alias (includes free function calls)
+ C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+ These all constitute false negatives. The tool will also detect false
+ positives when an API method shares a name with another method.
+""")
+ parser.add_argument(
+ '-d',
+ '--input-directory',
+ required=True,
+ dest='input_dir',
+ help='the input directory to walk for python files to fix up',
+ )
+ parser.add_argument(
+ '-o',
+ '--output-directory',
+ required=True,
+ dest='output_dir',
+ help='the directory to output files fixed via un-flattening',
+ )
+ args = parser.parse_args()
+ input_dir = pathlib.Path(args.input_dir)
+ output_dir = pathlib.Path(args.output_dir)
+ if not input_dir.is_dir():
+ print(
+ f"input directory '{input_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if not output_dir.is_dir():
+ print(
+ f"output directory '{output_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if os.listdir(output_dir):
+ print(
+ f"output directory '{output_dir}' is not empty",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ fix_files(input_dir, output_dir)
diff --git a/scripts/fixup_redis_v1beta1_keywords.py b/scripts/fixup_redis_v1beta1_keywords.py
new file mode 100644
index 0000000..5d1dd0c
--- /dev/null
+++ b/scripts/fixup_redis_v1beta1_keywords.py
@@ -0,0 +1,186 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import os
+import libcst as cst
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+ predicate: Callable[[Any], bool],
+ iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+ """A stable, out-of-place partition."""
+ results = ([], [])
+
+ for i in iterator:
+ results[int(predicate(i))].append(i)
+
+ # Returns trueList, falseList
+ return results[1], results[0]
+
+
+class redisCallTransformer(cst.CSTTransformer):
+ CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+ METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+ 'create_instance': ('parent', 'instance_id', 'instance', ),
+ 'delete_instance': ('name', ),
+ 'export_instance': ('name', 'output_config', ),
+ 'failover_instance': ('name', 'data_protection_mode', ),
+ 'get_instance': ('name', ),
+ 'import_instance': ('name', 'input_config', ),
+ 'list_instances': ('parent', 'page_size', 'page_token', ),
+ 'update_instance': ('update_mask', 'instance', ),
+ 'upgrade_instance': ('name', 'redis_version', ),
+
+ }
+
+ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+ try:
+ key = original.func.attr.value
+ kword_params = self.METHOD_TO_PARAMS[key]
+ except (AttributeError, KeyError):
+ # Either not a method from the API or too convoluted to be sure.
+ return updated
+
+ # If the existing code is valid, keyword args come after positional args.
+ # Therefore, all positional args must map to the first parameters.
+ args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+ if any(k.keyword.value == "request" for k in kwargs):
+ # We've already fixed this file, don't fix it again.
+ return updated
+
+ kwargs, ctrl_kwargs = partition(
+ lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ kwargs
+ )
+
+ args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+ ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+ for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+ request_arg = cst.Arg(
+ value=cst.Dict([
+ cst.DictElement(
+ cst.SimpleString("'{}'".format(name)),
+ cst.Element(value=arg.value)
+ )
+ # Note: the args + kwargs looks silly, but keep in mind that
+ # the control parameters had to be stripped out, and that
+ # those could have been passed positionally or by keyword.
+ for name, arg in zip(kword_params, args + kwargs)]),
+ keyword=cst.Name("request")
+ )
+
+ return updated.with_changes(
+ args=[request_arg] + ctrl_kwargs
+ )
+
+
+def fix_files(
+ in_dir: pathlib.Path,
+ out_dir: pathlib.Path,
+ *,
+ transformer=redisCallTransformer(),
+):
+ """Duplicate the input dir to the output dir, fixing file method calls.
+
+ Preconditions:
+ * in_dir is a real directory
+ * out_dir is a real, empty directory
+ """
+ pyfile_gen = (
+ pathlib.Path(os.path.join(root, f))
+ for root, _, files in os.walk(in_dir)
+ for f in files if os.path.splitext(f)[1] == ".py"
+ )
+
+ for fpath in pyfile_gen:
+ with open(fpath, 'r') as f:
+ src = f.read()
+
+ # Parse the code and insert method call fixes.
+ tree = cst.parse_module(src)
+ updated = tree.visit(transformer)
+
+ # Create the path and directory structure for the new file.
+ updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+ updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Generate the updated source file at the corresponding path.
+ with open(updated_path, 'w') as f:
+ f.write(updated.code)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="""Fix up source that uses the redis client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+ parameters in client method calls to keyword based parameters.
+ Cases where it WILL FAIL include
+ A) * or ** expansion in a method call.
+ B) Calls via function or method alias (includes free function calls)
+ C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+ These all constitute false negatives. The tool will also detect false
+ positives when an API method shares a name with another method.
+""")
+ parser.add_argument(
+ '-d',
+ '--input-directory',
+ required=True,
+ dest='input_dir',
+ help='the input directory to walk for python files to fix up',
+ )
+ parser.add_argument(
+ '-o',
+ '--output-directory',
+ required=True,
+ dest='output_dir',
+ help='the directory to output files fixed via un-flattening',
+ )
+ args = parser.parse_args()
+ input_dir = pathlib.Path(args.input_dir)
+ output_dir = pathlib.Path(args.output_dir)
+ if not input_dir.is_dir():
+ print(
+ f"input directory '{input_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if not output_dir.is_dir():
+ print(
+ f"output directory '{output_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if os.listdir(output_dir):
+ print(
+ f"output directory '{output_dir}' is not empty",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ fix_files(input_dir, output_dir)
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
new file mode 100644
index 0000000..d309d6e
--- /dev/null
+++ b/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+ trim_blocks=True,
+ loader=jinja2.FileSystemLoader(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+ return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('source')
+ parser.add_argument('--destination', default='README.rst')
+
+ args = parser.parse_args()
+
+ source = os.path.abspath(args.source)
+ root = os.path.dirname(source)
+ destination = os.path.join(root, args.destination)
+
+ jinja_env.globals['get_help'] = get_help
+
+ with io.open(source, 'r') as f:
+ config = yaml.load(f)
+
+ # This allows get_help to execute in the right directory.
+ os.chdir(root)
+
+ output = README_TMPL.render(config)
+
+ with io.open(destination, 'w') as f:
+ f.write(output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 0000000..4fd2397
--- /dev/null
+++ b/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+ become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python {{sample.file}}
+{% if sample.show_help %}
+
+ {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 0000000..1446b94
--- /dev/null
+++ b/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 0000000..11957ce
--- /dev/null
+++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+ one for your project.
+
+.. _API Key:
+ https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 0000000..a0406db
--- /dev/null
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 0000000..5ea33d1
--- /dev/null
+++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+ brew install portaudio
+
+ **Note**: if you encounter an error when running `pip install` that indicates
+ it can't find `portaudio.h`, try running `pip install` with the following
+ flags::
+
+ pip install --global-option='build_ext' \
+ --global-option='-I/usr/local/include' \
+ --global-option='-L/usr/local/lib' \
+ pyaudio
+
+* For Debian / Ubuntu Linux::
+
+ apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+ installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+ https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/setup.py b/setup.py
index 9b93ab1..e5c5c23 100644
--- a/setup.py
+++ b/setup.py
@@ -22,15 +22,16 @@
name = "google-cloud-redis"
description = "Google Cloud Memorystore for Redis API client library"
-version = "1.0.0"
+version = "2.0.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-api-core[grpc] >= 1.14.0, < 2.0.0dev",
- 'enum34; python_version < "3.4"',
+ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
+ "proto-plus >= 1.4.0",
+ "libcst >= 0.2.5",
]
extras = {}
@@ -45,7 +46,9 @@
# Only include packages under the 'google' namespace. Do not include tests,
# benchmarks, etc.
packages = [
- package for package in setuptools.find_packages() if package.startswith("google")
+ package
+ for package in setuptools.PEP420PackageFinder.find()
+ if package.startswith("google")
]
# Determine which namespaces are needed.
@@ -68,12 +71,10 @@
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
"Topic :: Internet",
],
@@ -82,7 +83,11 @@
namespace_packages=namespaces,
install_requires=dependencies,
extras_require=extras,
- python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
+ python_requires=">=3.6",
+ scripts=[
+ "scripts/fixup_redis_v1_keywords.py",
+ "scripts/fixup_redis_v1beta1_keywords.py",
+ ],
include_package_data=True,
zip_safe=False,
)
diff --git a/synth.metadata b/synth.metadata
index 2b6877c..92c1159 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -1,32 +1,17 @@
{
"sources": [
- {
- "generator": {
- "name": "artman",
- "version": "2.0.0",
- "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098"
- }
- },
{
"git": {
"name": ".",
- "remote": "git@github.com:googleapis/python-redis",
- "sha": "276103ea988bff390f6835843c9665b97e671e11"
- }
- },
- {
- "git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "756b174de4a122461993c1c583345533d819936d",
- "internalRef": "308824110"
+ "remote": "https://github.com/googleapis/python-redis.git",
+ "sha": "0b3f2c075728a6ec4d5d503d010de229ed1ef725"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "01b6f23d24b27878b48667ce597876d66b59780e"
+ "sha": "fdd03c161003ab97657cc0218f25c82c89ddf4b6"
}
}
],
@@ -37,8 +22,7 @@
"apiName": "redis",
"apiVersion": "v1beta1",
"language": "python",
- "generator": "gapic",
- "config": "google/cloud/redis/artman_redis_v1beta1.yaml"
+ "generator": "bazel"
}
},
{
@@ -47,8 +31,7 @@
"apiName": "redis",
"apiVersion": "v1",
"language": "python",
- "generator": "gapic",
- "config": "google/cloud/redis/artman_redis_v1.yaml"
+ "generator": "bazel"
}
}
]
diff --git a/synth.py b/synth.py
index ab8f163..4e31d57 100644
--- a/synth.py
+++ b/synth.py
@@ -37,68 +37,27 @@
s.copy(library, excludes=excludes)
-# Fix docstrings
s.replace(
- "google/cloud/**/cloud_redis_client.py",
- r"resources of the form:\n ``",
- r"resources of the form:\n\n ``",
+ "google/cloud/**/*client.py",
+ "from collections import OrderedDict",
+ r"""import builtins
+from collections import OrderedDict"""
)
-
s.replace(
- "google/cloud/**/cloud_redis_client.py",
- r"""
- parent \(str\): Required. The resource name of the instance location using the form:
- ::
-
- `projects/{project_id}/locations/{location_id}`
- where ``location_id`` refers to a GCP region""",
- r"""
- parent (str): Required. The resource name of the instance location using the form ``projects/{project_id}/locations/{location_id}``
- where ``location_id`` refers to a GCP region""",
-)
-
-
-s.replace(
- "google/cloud/**/cloud_redis_client.py",
- r"""
- with the following restrictions:
-
- \* Must contain only lowercase letters, numbers, and hyphens\.""",
- r"""
- with the following restrictions:
- * Must contain only lowercase letters, numbers, and hyphens.""",
-)
-
-s.replace(
- "google/cloud/**/cloud_redis_client.py",
- r"""
- name \(str\): Required. Redis instance resource name using the form:
- ::
-
- `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
- where ``location_id`` refers to a GCP region""",
- r"""
- name (str): Required. Redis instance resource name using the form ``projects/{project_id}/locations/{location_id}/instances/{instance_id}```
- where ``location_id`` refers to a GCP region""",
-)
-
-s.replace(
- "google/cloud/**/cloud_redis_client.py",
- r"""
- fields from ``Instance``:
-
- \* ``displayName``
- \* ``labels``
- \* ``memorySizeGb``
- \* ``redisConfig``""",
- r"""
- fields from ``Instance``: ``displayName``, ``labels``, ``memorySizeGb``, and ``redisConfig``.""",
+ "google/cloud/**/*client.py",
+ "any\(",
+ "builtins.any(",
)
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(unit_cov_level=97, cov_level=87)
-s.move(templated_files)
+templated_files = common.py_library(
+ samples=False,
+ microgenerator=True,
+)
+s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file
+
+s.replace("noxfile.py", '[\"\']-W[\"\']', '# "-W"')
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/testing/.gitignore b/testing/.gitignore
new file mode 100644
index 0000000..b05fbd6
--- /dev/null
+++ b/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json
\ No newline at end of file
diff --git a/tests/unit/gapic/redis_v1/__init__.py b/tests/unit/gapic/redis_v1/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/tests/unit/gapic/redis_v1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/unit/gapic/redis_v1/test_cloud_redis.py
new file mode 100644
index 0000000..ad8595e
--- /dev/null
+++ b/tests/unit/gapic/redis_v1/test_cloud_redis.py
@@ -0,0 +1,2848 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import future
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import operation_async
+from google.api_core import operations_v1
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient
+from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient
+from google.cloud.redis_v1.services.cloud_redis import pagers
+from google.cloud.redis_v1.services.cloud_redis import transports
+from google.cloud.redis_v1.types import cloud_redis
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert CloudRedisClient._get_default_mtls_endpoint(None) is None
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+ )
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+
+@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient])
+def test_cloud_redis_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ assert client._transport._host == "redis.googleapis.com:443"
+
+
+def test_cloud_redis_client_get_transport_class():
+ transport = CloudRedisClient.get_transport_class()
+ assert transport == transports.CloudRedisGrpcTransport
+
+ transport = CloudRedisClient.get_transport_class("grpc")
+ assert transport == transports.CloudRedisGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)
+)
+@mock.patch.object(
+ CloudRedisAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CloudRedisAsyncClient),
+)
+def test_cloud_redis_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)
+)
+@mock.patch.object(
+ CloudRedisAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CloudRedisAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_cloud_redis_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
+ with mock.patch(
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
+ ):
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_cloud_redis_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_cloud_redis_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_cloud_redis_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = CloudRedisClient(client_options={"api_endpoint": "squid.clam.whelk"})
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_list_instances(
+ transport: str = "grpc", request_type=cloud_redis.ListInstancesRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.ListInstancesResponse(
+ next_page_token="next_page_token_value", unreachable=["unreachable_value"],
+ )
+
+ response = client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.ListInstancesRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListInstancesPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+ assert response.unreachable == ["unreachable_value"]
+
+
+def test_list_instances_from_dict():
+ test_list_instances(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_list_instances_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.ListInstancesRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.ListInstancesResponse(
+ next_page_token="next_page_token_value",
+ unreachable=["unreachable_value"],
+ )
+ )
+
+ response = await client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListInstancesAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+ assert response.unreachable == ["unreachable_value"]
+
+
+def test_list_instances_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ListInstancesRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ call.return_value = cloud_redis.ListInstancesResponse()
+
+ client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_instances_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ListInstancesRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.ListInstancesResponse()
+ )
+
+ await client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_instances_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.ListInstancesResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_instances(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+def test_list_instances_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_instances(
+ cloud_redis.ListInstancesRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_instances_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.ListInstancesResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.ListInstancesResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_instances(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_instances_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_instances(
+ cloud_redis.ListInstancesRequest(), parent="parent_value",
+ )
+
+
+def test_list_instances_pager():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_instances(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, cloud_redis.Instance) for i in results)
+
+
+def test_list_instances_pages():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_instances(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_instances_async_pager():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_instances(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, cloud_redis.Instance) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_instances_async_pages():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_instances(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_get_instance(
+ transport: str = "grpc", request_type=cloud_redis.GetInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.Instance(
+ name="name_value",
+ display_name="display_name_value",
+ location_id="location_id_value",
+ alternative_location_id="alternative_location_id_value",
+ redis_version="redis_version_value",
+ reserved_ip_range="reserved_ip_range_value",
+ host="host_value",
+ port=453,
+ current_location_id="current_location_id_value",
+ state=cloud_redis.Instance.State.CREATING,
+ status_message="status_message_value",
+ tier=cloud_redis.Instance.Tier.BASIC,
+ memory_size_gb=1499,
+ authorized_network="authorized_network_value",
+ persistence_iam_identity="persistence_iam_identity_value",
+ connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING,
+ )
+
+ response = client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.GetInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, cloud_redis.Instance)
+
+ assert response.name == "name_value"
+
+ assert response.display_name == "display_name_value"
+
+ assert response.location_id == "location_id_value"
+
+ assert response.alternative_location_id == "alternative_location_id_value"
+
+ assert response.redis_version == "redis_version_value"
+
+ assert response.reserved_ip_range == "reserved_ip_range_value"
+
+ assert response.host == "host_value"
+
+ assert response.port == 453
+
+ assert response.current_location_id == "current_location_id_value"
+
+ assert response.state == cloud_redis.Instance.State.CREATING
+
+ assert response.status_message == "status_message_value"
+
+ assert response.tier == cloud_redis.Instance.Tier.BASIC
+
+ assert response.memory_size_gb == 1499
+
+ assert response.authorized_network == "authorized_network_value"
+
+ assert response.persistence_iam_identity == "persistence_iam_identity_value"
+
+ assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING
+
+
+def test_get_instance_from_dict():
+ test_get_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.GetInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.Instance(
+ name="name_value",
+ display_name="display_name_value",
+ location_id="location_id_value",
+ alternative_location_id="alternative_location_id_value",
+ redis_version="redis_version_value",
+ reserved_ip_range="reserved_ip_range_value",
+ host="host_value",
+ port=453,
+ current_location_id="current_location_id_value",
+ state=cloud_redis.Instance.State.CREATING,
+ status_message="status_message_value",
+ tier=cloud_redis.Instance.Tier.BASIC,
+ memory_size_gb=1499,
+ authorized_network="authorized_network_value",
+ persistence_iam_identity="persistence_iam_identity_value",
+ connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING,
+ )
+ )
+
+ response = await client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, cloud_redis.Instance)
+
+ assert response.name == "name_value"
+
+ assert response.display_name == "display_name_value"
+
+ assert response.location_id == "location_id_value"
+
+ assert response.alternative_location_id == "alternative_location_id_value"
+
+ assert response.redis_version == "redis_version_value"
+
+ assert response.reserved_ip_range == "reserved_ip_range_value"
+
+ assert response.host == "host_value"
+
+ assert response.port == 453
+
+ assert response.current_location_id == "current_location_id_value"
+
+ assert response.state == cloud_redis.Instance.State.CREATING
+
+ assert response.status_message == "status_message_value"
+
+ assert response.tier == cloud_redis.Instance.Tier.BASIC
+
+ assert response.memory_size_gb == 1499
+
+ assert response.authorized_network == "authorized_network_value"
+
+ assert response.persistence_iam_identity == "persistence_iam_identity_value"
+
+ assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING
+
+
+def test_get_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.GetInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ call.return_value = cloud_redis.Instance()
+
+ client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.GetInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.Instance()
+ )
+
+ await client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.Instance()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_get_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_instance(
+ cloud_redis.GetInstanceRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.Instance()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.Instance()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_instance(
+ cloud_redis.GetInstanceRequest(), name="name_value",
+ )
+
+
+def test_create_instance(
+ transport: str = "grpc", request_type=cloud_redis.CreateInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.CreateInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_create_instance_from_dict():
+ test_create_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_create_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.CreateInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_create_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.CreateInstanceRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.CreateInstanceRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_instance(
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].instance_id == "instance_id_value"
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+def test_create_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_instance(
+ cloud_redis.CreateInstanceRequest(),
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_instance(
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].instance_id == "instance_id_value"
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+@pytest.mark.asyncio
+async def test_create_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_instance(
+ cloud_redis.CreateInstanceRequest(),
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+def test_update_instance(
+ transport: str = "grpc", request_type=cloud_redis.UpdateInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.UpdateInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_update_instance_from_dict():
+ test_update_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_update_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.UpdateInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_update_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpdateInstanceRequest()
+ request.instance.name = "instance.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[
+ "metadata"
+ ]
+
+
+@pytest.mark.asyncio
+async def test_update_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpdateInstanceRequest()
+ request.instance.name = "instance.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[
+ "metadata"
+ ]
+
+
+def test_update_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.update_instance(
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+def test_update_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.update_instance(
+ cloud_redis.UpdateInstanceRequest(),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_update_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.update_instance(
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+@pytest.mark.asyncio
+async def test_update_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.update_instance(
+ cloud_redis.UpdateInstanceRequest(),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+def test_upgrade_instance(
+ transport: str = "grpc", request_type=cloud_redis.UpgradeInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.UpgradeInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_upgrade_instance_from_dict():
+ test_upgrade_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.UpgradeInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_upgrade_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpgradeInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.upgrade_instance), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpgradeInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.upgrade_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_upgrade_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.upgrade_instance(
+ name="name_value", redis_version="redis_version_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].redis_version == "redis_version_value"
+
+
+def test_upgrade_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.upgrade_instance(
+ cloud_redis.UpgradeInstanceRequest(),
+ name="name_value",
+ redis_version="redis_version_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.upgrade_instance(
+ name="name_value", redis_version="redis_version_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].redis_version == "redis_version_value"
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.upgrade_instance(
+ cloud_redis.UpgradeInstanceRequest(),
+ name="name_value",
+ redis_version="redis_version_value",
+ )
+
+
+def test_import_instance(
+ transport: str = "grpc", request_type=cloud_redis.ImportInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.import_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.ImportInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_instance_from_dict():
+ test_import_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_import_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.ImportInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ImportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.import_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_import_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ImportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_import_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.import_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.import_instance(
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].input_config == cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ )
+
+
+def test_import_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.import_instance(
+ cloud_redis.ImportInstanceRequest(),
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.import_instance(
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].input_config == cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.import_instance(
+ cloud_redis.ImportInstanceRequest(),
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+
+def test_export_instance(
+ transport: str = "grpc", request_type=cloud_redis.ExportInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.export_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.ExportInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_export_instance_from_dict():
+ test_export_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_export_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.ExportInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_export_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ExportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.export_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_export_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ExportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_export_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.export_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.export_instance(
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].output_config == cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ )
+
+
+def test_export_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.export_instance(
+ cloud_redis.ExportInstanceRequest(),
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+
+@pytest.mark.asyncio
+async def test_export_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.export_instance(
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].output_config == cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ )
+
+
+@pytest.mark.asyncio
+async def test_export_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.export_instance(
+ cloud_redis.ExportInstanceRequest(),
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+
+def test_failover_instance(
+ transport: str = "grpc", request_type=cloud_redis.FailoverInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.FailoverInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_failover_instance_from_dict():
+ test_failover_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.FailoverInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_failover_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.FailoverInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.failover_instance), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.FailoverInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.failover_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_failover_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.failover_instance(
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert (
+ args[0].data_protection_mode
+ == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS
+ )
+
+
+def test_failover_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.failover_instance(
+ cloud_redis.FailoverInstanceRequest(),
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.failover_instance(
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert (
+ args[0].data_protection_mode
+ == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS
+ )
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.failover_instance(
+ cloud_redis.FailoverInstanceRequest(),
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+
+def test_delete_instance(
+ transport: str = "grpc", request_type=cloud_redis.DeleteInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.DeleteInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_delete_instance_from_dict():
+ test_delete_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.DeleteInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_delete_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.DeleteInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.DeleteInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_delete_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_instance(
+ cloud_redis.DeleteInstanceRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_instance(
+ cloud_redis.DeleteInstanceRequest(), name="name_value",
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CloudRedisClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CloudRedisClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = CloudRedisClient(transport=transport)
+ assert client._transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.CloudRedisGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client._transport, transports.CloudRedisGrpcTransport,)
+
+
+def test_cloud_redis_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.CloudRedisTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_cloud_redis_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.CloudRedisTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "list_instances",
+ "get_instance",
+ "create_instance",
+ "update_instance",
+ "upgrade_instance",
+ "import_instance",
+ "export_instance",
+ "failover_instance",
+ "delete_instance",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ # Additionally, the LRO client (a property) should
+ # also raise NotImplementedError
+ with pytest.raises(NotImplementedError):
+ transport.operations_client
+
+
+def test_cloud_redis_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.CloudRedisTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_cloud_redis_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.CloudRedisTransport()
+ adc.assert_called_once()
+
+
+def test_cloud_redis_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ CloudRedisClient()
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+def test_cloud_redis_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.CloudRedisGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_cloud_redis_host_no_port():
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="redis.googleapis.com"
+ ),
+ )
+ assert client._transport._host == "redis.googleapis.com:443"
+
+
+def test_cloud_redis_host_with_port():
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="redis.googleapis.com:8000"
+ ),
+ )
+ assert client._transport._host == "redis.googleapis.com:8000"
+
+
+def test_cloud_redis_grpc_transport_channel():
+ channel = grpc.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.CloudRedisGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+def test_cloud_redis_grpc_asyncio_transport_channel():
+ channel = aio.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.CloudRedisGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
+)
+def test_cloud_redis_transport_channel_mtls_with_client_cert_source(transport_class):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
+)
+def test_cloud_redis_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_cloud_redis_grpc_lro_client():
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+ transport = client._transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_cloud_redis_grpc_lro_async_client():
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ transport = client._client._transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_instance_path():
+ project = "squid"
+ location = "clam"
+ instance = "whelk"
+
+ expected = "projects/{project}/locations/{location}/instances/{instance}".format(
+ project=project, location=location, instance=instance,
+ )
+ actual = CloudRedisClient.instance_path(project, location, instance)
+ assert expected == actual
+
+
+def test_parse_instance_path():
+ expected = {
+ "project": "octopus",
+ "location": "oyster",
+ "instance": "nudibranch",
+ }
+ path = CloudRedisClient.instance_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CloudRedisClient.parse_instance_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.CloudRedisTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.CloudRedisTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = CloudRedisClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/redis_v1beta1/__init__.py b/tests/unit/gapic/redis_v1beta1/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/tests/unit/gapic/redis_v1beta1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py b/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py
new file mode 100644
index 0000000..b391a5e
--- /dev/null
+++ b/tests/unit/gapic/redis_v1beta1/test_cloud_redis.py
@@ -0,0 +1,2849 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import future
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import operation_async
+from google.api_core import operations_v1
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.redis_v1beta1.services.cloud_redis import CloudRedisAsyncClient
+from google.cloud.redis_v1beta1.services.cloud_redis import CloudRedisClient
+from google.cloud.redis_v1beta1.services.cloud_redis import pagers
+from google.cloud.redis_v1beta1.services.cloud_redis import transports
+from google.cloud.redis_v1beta1.types import cloud_redis
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import any_pb2 as any # type: ignore
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert CloudRedisClient._get_default_mtls_endpoint(None) is None
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+ )
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+
+@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient])
+def test_cloud_redis_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ assert client._transport._host == "redis.googleapis.com:443"
+
+
+def test_cloud_redis_client_get_transport_class():
+ transport = CloudRedisClient.get_transport_class()
+ assert transport == transports.CloudRedisGrpcTransport
+
+ transport = CloudRedisClient.get_transport_class("grpc")
+ assert transport == transports.CloudRedisGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)
+)
+@mock.patch.object(
+ CloudRedisAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CloudRedisAsyncClient),
+)
+def test_cloud_redis_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)
+)
+@mock.patch.object(
+ CloudRedisAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CloudRedisAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_cloud_redis_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
+ with mock.patch(
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
+ ):
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_cloud_redis_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
+ (
+ CloudRedisAsyncClient,
+ transports.CloudRedisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_cloud_redis_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_cloud_redis_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = CloudRedisClient(client_options={"api_endpoint": "squid.clam.whelk"})
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_list_instances(
+ transport: str = "grpc", request_type=cloud_redis.ListInstancesRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.ListInstancesResponse(
+ next_page_token="next_page_token_value", unreachable=["unreachable_value"],
+ )
+
+ response = client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.ListInstancesRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListInstancesPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+ assert response.unreachable == ["unreachable_value"]
+
+
+def test_list_instances_from_dict():
+ test_list_instances(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_list_instances_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.ListInstancesRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.ListInstancesResponse(
+ next_page_token="next_page_token_value",
+ unreachable=["unreachable_value"],
+ )
+ )
+
+ response = await client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListInstancesAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+ assert response.unreachable == ["unreachable_value"]
+
+
+def test_list_instances_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ListInstancesRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ call.return_value = cloud_redis.ListInstancesResponse()
+
+ client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_instances_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ListInstancesRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.ListInstancesResponse()
+ )
+
+ await client.list_instances(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_instances_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.ListInstancesResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_instances(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+def test_list_instances_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_instances(
+ cloud_redis.ListInstancesRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_instances_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.ListInstancesResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.ListInstancesResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_instances(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_instances_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_instances(
+ cloud_redis.ListInstancesRequest(), parent="parent_value",
+ )
+
+
+def test_list_instances_pager():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_instances(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, cloud_redis.Instance) for i in results)
+
+
+def test_list_instances_pages():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_instances(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_instances_async_pager():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_instances(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, cloud_redis.Instance) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_instances_async_pages():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_instances),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ cloud_redis.ListInstancesResponse(
+ instances=[
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ cloud_redis.Instance(),
+ ],
+ next_page_token="abc",
+ ),
+ cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(),], next_page_token="ghi",
+ ),
+ cloud_redis.ListInstancesResponse(
+ instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_instances(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_get_instance(
+ transport: str = "grpc", request_type=cloud_redis.GetInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.Instance(
+ name="name_value",
+ display_name="display_name_value",
+ location_id="location_id_value",
+ alternative_location_id="alternative_location_id_value",
+ redis_version="redis_version_value",
+ reserved_ip_range="reserved_ip_range_value",
+ host="host_value",
+ port=453,
+ current_location_id="current_location_id_value",
+ state=cloud_redis.Instance.State.CREATING,
+ status_message="status_message_value",
+ tier=cloud_redis.Instance.Tier.BASIC,
+ memory_size_gb=1499,
+ authorized_network="authorized_network_value",
+ persistence_iam_identity="persistence_iam_identity_value",
+ connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING,
+ )
+
+ response = client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.GetInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, cloud_redis.Instance)
+
+ assert response.name == "name_value"
+
+ assert response.display_name == "display_name_value"
+
+ assert response.location_id == "location_id_value"
+
+ assert response.alternative_location_id == "alternative_location_id_value"
+
+ assert response.redis_version == "redis_version_value"
+
+ assert response.reserved_ip_range == "reserved_ip_range_value"
+
+ assert response.host == "host_value"
+
+ assert response.port == 453
+
+ assert response.current_location_id == "current_location_id_value"
+
+ assert response.state == cloud_redis.Instance.State.CREATING
+
+ assert response.status_message == "status_message_value"
+
+ assert response.tier == cloud_redis.Instance.Tier.BASIC
+
+ assert response.memory_size_gb == 1499
+
+ assert response.authorized_network == "authorized_network_value"
+
+ assert response.persistence_iam_identity == "persistence_iam_identity_value"
+
+ assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING
+
+
+def test_get_instance_from_dict():
+ test_get_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.GetInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.Instance(
+ name="name_value",
+ display_name="display_name_value",
+ location_id="location_id_value",
+ alternative_location_id="alternative_location_id_value",
+ redis_version="redis_version_value",
+ reserved_ip_range="reserved_ip_range_value",
+ host="host_value",
+ port=453,
+ current_location_id="current_location_id_value",
+ state=cloud_redis.Instance.State.CREATING,
+ status_message="status_message_value",
+ tier=cloud_redis.Instance.Tier.BASIC,
+ memory_size_gb=1499,
+ authorized_network="authorized_network_value",
+ persistence_iam_identity="persistence_iam_identity_value",
+ connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING,
+ )
+ )
+
+ response = await client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, cloud_redis.Instance)
+
+ assert response.name == "name_value"
+
+ assert response.display_name == "display_name_value"
+
+ assert response.location_id == "location_id_value"
+
+ assert response.alternative_location_id == "alternative_location_id_value"
+
+ assert response.redis_version == "redis_version_value"
+
+ assert response.reserved_ip_range == "reserved_ip_range_value"
+
+ assert response.host == "host_value"
+
+ assert response.port == 453
+
+ assert response.current_location_id == "current_location_id_value"
+
+ assert response.state == cloud_redis.Instance.State.CREATING
+
+ assert response.status_message == "status_message_value"
+
+ assert response.tier == cloud_redis.Instance.Tier.BASIC
+
+ assert response.memory_size_gb == 1499
+
+ assert response.authorized_network == "authorized_network_value"
+
+ assert response.persistence_iam_identity == "persistence_iam_identity_value"
+
+ assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING
+
+
+def test_get_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.GetInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ call.return_value = cloud_redis.Instance()
+
+ client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.GetInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.Instance()
+ )
+
+ await client.get_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.Instance()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_get_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_instance(
+ cloud_redis.GetInstanceRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = cloud_redis.Instance()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ cloud_redis.Instance()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_instance(
+ cloud_redis.GetInstanceRequest(), name="name_value",
+ )
+
+
+def test_create_instance(
+ transport: str = "grpc", request_type=cloud_redis.CreateInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.CreateInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_create_instance_from_dict():
+ test_create_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_create_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.CreateInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_create_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.CreateInstanceRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.CreateInstanceRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.create_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_instance(
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].instance_id == "instance_id_value"
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+def test_create_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_instance(
+ cloud_redis.CreateInstanceRequest(),
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_instance(
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].instance_id == "instance_id_value"
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+@pytest.mark.asyncio
+async def test_create_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_instance(
+ cloud_redis.CreateInstanceRequest(),
+ parent="parent_value",
+ instance_id="instance_id_value",
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+def test_update_instance(
+ transport: str = "grpc", request_type=cloud_redis.UpdateInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.UpdateInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_update_instance_from_dict():
+ test_update_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_update_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.UpdateInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_update_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpdateInstanceRequest()
+ request.instance.name = "instance.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[
+ "metadata"
+ ]
+
+
+@pytest.mark.asyncio
+async def test_update_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpdateInstanceRequest()
+ request.instance.name = "instance.name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.update_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[
+ "metadata"
+ ]
+
+
+def test_update_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.update_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.update_instance(
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+def test_update_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.update_instance(
+ cloud_redis.UpdateInstanceRequest(),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_update_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.update_instance(
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+ assert args[0].instance == cloud_redis.Instance(name="name_value")
+
+
+@pytest.mark.asyncio
+async def test_update_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.update_instance(
+ cloud_redis.UpdateInstanceRequest(),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ instance=cloud_redis.Instance(name="name_value"),
+ )
+
+
+def test_upgrade_instance(
+ transport: str = "grpc", request_type=cloud_redis.UpgradeInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.UpgradeInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_upgrade_instance_from_dict():
+ test_upgrade_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.UpgradeInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_upgrade_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpgradeInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.upgrade_instance), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.UpgradeInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.upgrade_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.upgrade_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_upgrade_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.upgrade_instance(
+ name="name_value", redis_version="redis_version_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].redis_version == "redis_version_value"
+
+
+def test_upgrade_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.upgrade_instance(
+ cloud_redis.UpgradeInstanceRequest(),
+ name="name_value",
+ redis_version="redis_version_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.upgrade_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.upgrade_instance(
+ name="name_value", redis_version="redis_version_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].redis_version == "redis_version_value"
+
+
+@pytest.mark.asyncio
+async def test_upgrade_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.upgrade_instance(
+ cloud_redis.UpgradeInstanceRequest(),
+ name="name_value",
+ redis_version="redis_version_value",
+ )
+
+
+def test_import_instance(
+ transport: str = "grpc", request_type=cloud_redis.ImportInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.import_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.ImportInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_instance_from_dict():
+ test_import_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_import_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.ImportInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ImportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.import_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_import_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ImportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.import_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_import_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.import_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.import_instance(
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].input_config == cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ )
+
+
+def test_import_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.import_instance(
+ cloud_redis.ImportInstanceRequest(),
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.import_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.import_instance(
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].input_config == cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.import_instance(
+ cloud_redis.ImportInstanceRequest(),
+ name="name_value",
+ input_config=cloud_redis.InputConfig(
+ gcs_source=cloud_redis.GcsSource(uri="uri_value")
+ ),
+ )
+
+
+def test_export_instance(
+ transport: str = "grpc", request_type=cloud_redis.ExportInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.export_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.ExportInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_export_instance_from_dict():
+ test_export_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_export_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.ExportInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_export_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ExportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.export_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_export_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.ExportInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.export_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_export_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.export_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.export_instance(
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].output_config == cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ )
+
+
+def test_export_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.export_instance(
+ cloud_redis.ExportInstanceRequest(),
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+
+@pytest.mark.asyncio
+async def test_export_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.export_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.export_instance(
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].output_config == cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ )
+
+
+@pytest.mark.asyncio
+async def test_export_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.export_instance(
+ cloud_redis.ExportInstanceRequest(),
+ name="name_value",
+ output_config=cloud_redis.OutputConfig(
+ gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
+ ),
+ )
+
+
+def test_failover_instance(
+ transport: str = "grpc", request_type=cloud_redis.FailoverInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.FailoverInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_failover_instance_from_dict():
+ test_failover_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.FailoverInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_failover_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.FailoverInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.failover_instance), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.FailoverInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.failover_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.failover_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_failover_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.failover_instance(
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert (
+ args[0].data_protection_mode
+ == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS
+ )
+
+
+def test_failover_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.failover_instance(
+ cloud_redis.FailoverInstanceRequest(),
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.failover_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.failover_instance(
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert (
+ args[0].data_protection_mode
+ == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS
+ )
+
+
+@pytest.mark.asyncio
+async def test_failover_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.failover_instance(
+ cloud_redis.FailoverInstanceRequest(),
+ name="name_value",
+ data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
+ )
+
+
+def test_delete_instance(
+ transport: str = "grpc", request_type=cloud_redis.DeleteInstanceRequest
+):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == cloud_redis.DeleteInstanceRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_delete_instance_from_dict():
+ test_delete_instance(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_async(transport: str = "grpc_asyncio"):
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = cloud_redis.DeleteInstanceRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_delete_instance_field_headers():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.DeleteInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_field_headers_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = cloud_redis.DeleteInstanceRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_instance), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.delete_instance(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_instance_flattened():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_delete_instance_flattened_error():
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_instance(
+ cloud_redis.DeleteInstanceRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_flattened_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_instance), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_instance(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_instance_flattened_error_async():
+ client = CloudRedisAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_instance(
+ cloud_redis.DeleteInstanceRequest(), name="name_value",
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CloudRedisClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CloudRedisClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = CloudRedisClient(transport=transport)
+ assert client._transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.CloudRedisGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.CloudRedisGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = CloudRedisClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client._transport, transports.CloudRedisGrpcTransport,)
+
+
+def test_cloud_redis_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.CloudRedisTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_cloud_redis_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.CloudRedisTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "list_instances",
+ "get_instance",
+ "create_instance",
+ "update_instance",
+ "upgrade_instance",
+ "import_instance",
+ "export_instance",
+ "failover_instance",
+ "delete_instance",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ # Additionally, the LRO client (a property) should
+ # also raise NotImplementedError
+ with pytest.raises(NotImplementedError):
+ transport.operations_client
+
+
+def test_cloud_redis_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.CloudRedisTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_cloud_redis_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.CloudRedisTransport()
+ adc.assert_called_once()
+
+
+def test_cloud_redis_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ CloudRedisClient()
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+def test_cloud_redis_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.CloudRedisGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_cloud_redis_host_no_port():
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="redis.googleapis.com"
+ ),
+ )
+ assert client._transport._host == "redis.googleapis.com:443"
+
+
+def test_cloud_redis_host_with_port():
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="redis.googleapis.com:8000"
+ ),
+ )
+ assert client._transport._host == "redis.googleapis.com:8000"
+
+
+def test_cloud_redis_grpc_transport_channel():
+ channel = grpc.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.CloudRedisGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+def test_cloud_redis_grpc_asyncio_transport_channel():
+ channel = aio.insecure_channel("http://localhost/")
+
+ # Check that channel is used if provided.
+ transport = transports.CloudRedisGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
+)
+def test_cloud_redis_transport_channel_mtls_with_client_cert_source(transport_class):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
+)
+def test_cloud_redis_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_cloud_redis_grpc_lro_client():
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+ transport = client._transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_cloud_redis_grpc_lro_async_client():
+ client = CloudRedisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ transport = client._client._transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_instance_path():
+ project = "squid"
+ location = "clam"
+ instance = "whelk"
+
+ expected = "projects/{project}/locations/{location}/instances/{instance}".format(
+ project=project, location=location, instance=instance,
+ )
+ actual = CloudRedisClient.instance_path(project, location, instance)
+ assert expected == actual
+
+
+def test_parse_instance_path():
+ expected = {
+ "project": "octopus",
+ "location": "oyster",
+ "instance": "nudibranch",
+ }
+ path = CloudRedisClient.instance_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CloudRedisClient.parse_instance_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.CloudRedisTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = CloudRedisClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.CloudRedisTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = CloudRedisClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/v1/test_cloud_redis_client_v1.py b/tests/unit/gapic/v1/test_cloud_redis_client_v1.py
deleted file mode 100644
index a3bdb81..0000000
--- a/tests/unit/gapic/v1/test_cloud_redis_client_v1.py
+++ /dev/null
@@ -1,651 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Unit tests."""
-
-import mock
-import pytest
-
-from google.rpc import status_pb2
-
-from google.cloud import redis_v1
-from google.cloud.redis_v1 import enums
-from google.cloud.redis_v1.proto import cloud_redis_pb2
-from google.longrunning import operations_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import field_mask_pb2
-
-
-class MultiCallableStub(object):
- """Stub for the grpc.UnaryUnaryMultiCallable interface."""
-
- def __init__(self, method, channel_stub):
- self.method = method
- self.channel_stub = channel_stub
-
- def __call__(self, request, timeout=None, metadata=None, credentials=None):
- self.channel_stub.requests.append((self.method, request))
-
- response = None
- if self.channel_stub.responses:
- response = self.channel_stub.responses.pop()
-
- if isinstance(response, Exception):
- raise response
-
- if response:
- return response
-
-
-class ChannelStub(object):
- """Stub for the grpc.Channel interface."""
-
- def __init__(self, responses=[]):
- self.responses = responses
- self.requests = []
-
- def unary_unary(self, method, request_serializer=None, response_deserializer=None):
- return MultiCallableStub(method, self)
-
-
-class CustomException(Exception):
- pass
-
-
-class TestCloudRedisClient(object):
- def test_list_instances(self):
- # Setup Expected Response
- next_page_token = ""
- instances_element = {}
- instances = [instances_element]
- expected_response = {"next_page_token": next_page_token, "instances": instances}
- expected_response = cloud_redis_pb2.ListInstancesResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
-
- paged_list_response = client.list_instances(parent)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.instances[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.ListInstancesRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_instances_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
-
- paged_list_response = client.list_instances(parent)
- with pytest.raises(CustomException):
- list(paged_list_response)
-
- def test_get_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.get_instance(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.GetInstanceRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_instance_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- with pytest.raises(CustomException):
- client.get_instance(name)
-
- def test_create_instance(self):
- # Setup Expected Response
- name = "name3373707"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb_2 = 1493816946
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb_2,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_create_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
- instance_id = "test_instance"
- tier = enums.Instance.Tier.BASIC
- memory_size_gb = 1
- instance = {"tier": tier, "memory_size_gb": memory_size_gb}
-
- response = client.create_instance(parent, instance_id, instance)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.CreateInstanceRequest(
- parent=parent, instance_id=instance_id, instance=instance
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_create_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_create_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
- instance_id = "test_instance"
- tier = enums.Instance.Tier.BASIC
- memory_size_gb = 1
- instance = {"tier": tier, "memory_size_gb": memory_size_gb}
-
- response = client.create_instance(parent, instance_id, instance)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_update_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name_2 = "displayName21615000987"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb_2 = 1493816946
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name_2,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb_2,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_update_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- paths_element = "display_name"
- paths_element_2 = "memory_size_gb"
- paths = [paths_element, paths_element_2]
- update_mask = {"paths": paths}
- display_name = "UpdatedDisplayName"
- name = "projects//locations//instances/"
- memory_size_gb = 4
- instance = {
- "display_name": display_name,
- "name": name,
- "memory_size_gb": memory_size_gb,
- }
-
- response = client.update_instance(update_mask, instance)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.UpdateInstanceRequest(
- update_mask=update_mask, instance=instance
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_update_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_update_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- paths_element = "display_name"
- paths_element_2 = "memory_size_gb"
- paths = [paths_element, paths_element_2]
- update_mask = {"paths": paths}
- display_name = "UpdatedDisplayName"
- name = "projects//locations//instances/"
- memory_size_gb = 4
- instance = {
- "display_name": display_name,
- "name": name,
- "memory_size_gb": memory_size_gb,
- }
-
- response = client.update_instance(update_mask, instance)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_import_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_import_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- input_config = {}
-
- response = client.import_instance(name, input_config)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.ImportInstanceRequest(
- name=name, input_config=input_config
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_import_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_import_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- input_config = {}
-
- response = client.import_instance(name, input_config)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_export_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_export_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- output_config = {}
-
- response = client.export_instance(name, output_config)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.ExportInstanceRequest(
- name=name, output_config=output_config
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_export_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_export_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- output_config = {}
-
- response = client.export_instance(name, output_config)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_failover_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_failover_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.failover_instance(name)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.FailoverInstanceRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_failover_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_failover_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.failover_instance(name)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_delete_instance(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = empty_pb2.Empty(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_delete_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.delete_instance(name)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.DeleteInstanceRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_delete_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_delete_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.delete_instance(name)
- exception = response.exception()
- assert exception.errors[0] == error
diff --git a/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py
deleted file mode 100644
index 246e75a..0000000
--- a/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py
+++ /dev/null
@@ -1,732 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Unit tests."""
-
-import mock
-import pytest
-
-from google.rpc import status_pb2
-
-from google.cloud import redis_v1beta1
-from google.cloud.redis_v1beta1 import enums
-from google.cloud.redis_v1beta1.proto import cloud_redis_pb2
-from google.longrunning import operations_pb2
-from google.protobuf import empty_pb2
-from google.protobuf import field_mask_pb2
-
-
-class MultiCallableStub(object):
- """Stub for the grpc.UnaryUnaryMultiCallable interface."""
-
- def __init__(self, method, channel_stub):
- self.method = method
- self.channel_stub = channel_stub
-
- def __call__(self, request, timeout=None, metadata=None, credentials=None):
- self.channel_stub.requests.append((self.method, request))
-
- response = None
- if self.channel_stub.responses:
- response = self.channel_stub.responses.pop()
-
- if isinstance(response, Exception):
- raise response
-
- if response:
- return response
-
-
-class ChannelStub(object):
- """Stub for the grpc.Channel interface."""
-
- def __init__(self, responses=[]):
- self.responses = responses
- self.requests = []
-
- def unary_unary(self, method, request_serializer=None, response_deserializer=None):
- return MultiCallableStub(method, self)
-
-
-class CustomException(Exception):
- pass
-
-
-class TestCloudRedisClient(object):
- def test_list_instances(self):
- # Setup Expected Response
- next_page_token = ""
- instances_element = {}
- instances = [instances_element]
- expected_response = {"next_page_token": next_page_token, "instances": instances}
- expected_response = cloud_redis_pb2.ListInstancesResponse(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
-
- paged_list_response = client.list_instances(parent)
- resources = list(paged_list_response)
- assert len(resources) == 1
-
- assert expected_response.instances[0] == resources[0]
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.ListInstancesRequest(parent=parent)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_list_instances_exception(self):
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
-
- paged_list_response = client.list_instances(parent)
- with pytest.raises(CustomException):
- list(paged_list_response)
-
- def test_get_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.get_instance(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.GetInstanceRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_instance_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- with pytest.raises(CustomException):
- client.get_instance(name)
-
- def test_create_instance(self):
- # Setup Expected Response
- name = "name3373707"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb_2 = 1493816946
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb_2,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_create_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
- instance_id = "test_instance"
- tier = enums.Instance.Tier.BASIC
- memory_size_gb = 1
- instance = {"tier": tier, "memory_size_gb": memory_size_gb}
-
- response = client.create_instance(parent, instance_id, instance)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.CreateInstanceRequest(
- parent=parent, instance_id=instance_id, instance=instance
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_create_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_create_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- parent = client.location_path("[PROJECT]", "[LOCATION]")
- instance_id = "test_instance"
- tier = enums.Instance.Tier.BASIC
- memory_size_gb = 1
- instance = {"tier": tier, "memory_size_gb": memory_size_gb}
-
- response = client.create_instance(parent, instance_id, instance)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_update_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name_2 = "displayName21615000987"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb_2 = 1493816946
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name_2,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb_2,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_update_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- paths_element = "display_name"
- paths_element_2 = "memory_size_gb"
- paths = [paths_element, paths_element_2]
- update_mask = {"paths": paths}
- display_name = "UpdatedDisplayName"
- name = "projects//locations//instances/"
- memory_size_gb = 4
- instance = {
- "display_name": display_name,
- "name": name,
- "memory_size_gb": memory_size_gb,
- }
-
- response = client.update_instance(update_mask, instance)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.UpdateInstanceRequest(
- update_mask=update_mask, instance=instance
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_update_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_update_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- paths_element = "display_name"
- paths_element_2 = "memory_size_gb"
- paths = [paths_element, paths_element_2]
- update_mask = {"paths": paths}
- display_name = "UpdatedDisplayName"
- name = "projects//locations//instances/"
- memory_size_gb = 4
- instance = {
- "display_name": display_name,
- "name": name,
- "memory_size_gb": memory_size_gb,
- }
-
- response = client.update_instance(update_mask, instance)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_import_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_import_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- input_config = {}
-
- response = client.import_instance(name, input_config)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.ImportInstanceRequest(
- name=name, input_config=input_config
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_import_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_import_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- input_config = {}
-
- response = client.import_instance(name, input_config)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_export_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_export_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- output_config = {}
-
- response = client.export_instance(name, output_config)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.ExportInstanceRequest(
- name=name, output_config=output_config
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_export_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_export_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = "name3373707"
- output_config = {}
-
- response = client.export_instance(name, output_config)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_failover_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version = "redisVersion-685310444"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_failover_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.failover_instance(name)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.FailoverInstanceRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_failover_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_failover_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.failover_instance(name)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_delete_instance(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = empty_pb2.Empty(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_delete_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.delete_instance(name)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.DeleteInstanceRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_delete_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_delete_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
-
- response = client.delete_instance(name)
- exception = response.exception()
- assert exception.errors[0] == error
-
- def test_upgrade_instance(self):
- # Setup Expected Response
- name_2 = "name2-1052831874"
- display_name = "displayName1615086568"
- location_id = "locationId552319461"
- alternative_location_id = "alternativeLocationId-718920621"
- redis_version_2 = "redisVersion2-1453337401"
- reserved_ip_range = "reservedIpRange-1082940580"
- host = "host3208616"
- port = 3446913
- current_location_id = "currentLocationId1312712735"
- status_message = "statusMessage-239442758"
- memory_size_gb = 34199707
- authorized_network = "authorizedNetwork-1733809270"
- persistence_iam_identity = "persistenceIamIdentity1061944584"
- expected_response = {
- "name": name_2,
- "display_name": display_name,
- "location_id": location_id,
- "alternative_location_id": alternative_location_id,
- "redis_version": redis_version_2,
- "reserved_ip_range": reserved_ip_range,
- "host": host,
- "port": port,
- "current_location_id": current_location_id,
- "status_message": status_message,
- "memory_size_gb": memory_size_gb,
- "authorized_network": authorized_network,
- "persistence_iam_identity": persistence_iam_identity,
- }
- expected_response = cloud_redis_pb2.Instance(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_upgrade_instance", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
- redis_version = "redisVersion-685310444"
-
- response = client.upgrade_instance(name, redis_version)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = cloud_redis_pb2.UpgradeInstanceRequest(
- name=name, redis_version=redis_version
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_upgrade_instance_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_upgrade_instance_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = redis_v1beta1.CloudRedisClient()
-
- # Setup Request
- name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]")
- redis_version = "redisVersion-685310444"
-
- response = client.upgrade_instance(name, redis_version)
- exception = response.exception()
- assert exception.errors[0] == error