Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add test proxy #836

Merged
merged 34 commits into from
Aug 18, 2023
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
aa1e40e
support emulator in data client
daniel-sanche Jul 17, 2023
b059139
added test proxy files
daniel-sanche Jul 17, 2023
3ed2168
cleaned up noxfile
daniel-sanche Jul 17, 2023
5199839
moved protos to subdir
daniel-sanche Jul 17, 2023
4e14bf3
close client
daniel-sanche Jul 17, 2023
cbb95c9
moved handlers into subdir
daniel-sanche Jul 17, 2023
f43aac1
reverted close
daniel-sanche Jul 18, 2023
91fc1e6
removed user agent
daniel-sanche Jul 18, 2023
06e5276
removed go submodule
daniel-sanche Jul 18, 2023
62b8e48
fixied typo
daniel-sanche Jul 20, 2023
237e051
removed unneeded files
daniel-sanche Jul 20, 2023
868ff2e
removed duplicate client handler legacy
daniel-sanche Jul 20, 2023
21a5077
Merge branch 'v3' into test_proxy2
daniel-sanche Aug 16, 2023
02f0c09
addressed PR comments
daniel-sanche Aug 16, 2023
456caba
ran blacken
daniel-sanche Aug 16, 2023
f3627c1
Merge branch 'v3' into test_proxy2
daniel-sanche Aug 16, 2023
bcc02d7
fix method name
daniel-sanche Aug 16, 2023
5e7c156
added missing import
daniel-sanche Aug 17, 2023
604d3d8
added conformance tests to kokoro
daniel-sanche Aug 17, 2023
14f359d
added conformance to nox sessions
daniel-sanche Aug 17, 2023
858c57d
Revert unwwanted noxfile changes
daniel-sanche Aug 17, 2023
36a3153
added missing run_tests file
daniel-sanche Aug 17, 2023
07b39b1
changed conformance test kokoro configs
daniel-sanche Aug 17, 2023
5d90478
ran blacken
daniel-sanche Aug 17, 2023
b69da5a
install golang for conformance tests
daniel-sanche Aug 17, 2023
df3ea47
update before attempting install
daniel-sanche Aug 17, 2023
8dcd444
changed go install method
daniel-sanche Aug 17, 2023
94a8684
moved go installation to run_tests
daniel-sanche Aug 17, 2023
72b8d1b
fixed failing conformance tests
daniel-sanche Aug 17, 2023
8496211
Merge branch 'v3' into test_proxy2
daniel-sanche Aug 17, 2023
71ba0ea
fixed read rows test error
daniel-sanche Aug 17, 2023
94e98db
fixed conformance test errors
daniel-sanche Aug 17, 2023
320d157
download go locally instead of installing to system
daniel-sanche Aug 18, 2023
5064870
fixed lint issue
daniel-sanche Aug 18, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 27 additions & 7 deletions google/cloud/bigtable/data/_async/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,18 +29,20 @@
import warnings
import sys
import random
import os

from collections import namedtuple

from google.cloud.bigtable_v2.services.bigtable.client import BigtableClientMeta
from google.cloud.bigtable_v2.services.bigtable.async_client import BigtableAsyncClient
from google.cloud.bigtable_v2.services.bigtable.async_client import DEFAULT_CLIENT_INFO
from google.cloud.bigtable_v2.services.bigtable.transports.pooled_grpc_asyncio import (
PooledBigtableGrpcAsyncIOTransport,
PooledBigtableGrpcAsyncIOTransport, PooledChannel
)
from google.cloud.bigtable_v2.types.bigtable import PingAndWarmRequest
from google.cloud.client import ClientWithProject
from google.api_core.exceptions import GoogleAPICallError
from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
from google.api_core import retry_async as retries
from google.api_core import exceptions as core_exceptions
from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
Expand Down Expand Up @@ -152,23 +154,41 @@ def __init__(
# attempt to start background tasks
daniel-sanche marked this conversation as resolved.
Show resolved Hide resolved
self._channel_init_time = time.monotonic()
self._channel_refresh_tasks: list[asyncio.Task[None]] = []
try:
self.start_background_channel_refresh()
except RuntimeError:
self._emulator_host = os.getenv(BIGTABLE_EMULATOR)
if self._emulator_host is not None:
# connect to an emulator host
warnings.warn(
f"{self.__class__.__name__} should be started in an "
"asyncio event loop. Channel refresh will not be started",
"Connecting to Bigtable emulator at {}".format(self._emulator_host),
RuntimeWarning,
stacklevel=2,
)
self.transport._grpc_channel = PooledChannel(
pool_size=pool_size,
host=self._emulator_host,
insecure=True,
)
# refresh cached stubs to use emulator pool
self.transport._stubs = {}
self.transport._prep_wrapped_messages(client_info)
else:
# attempt to start background channel refresh tasks
try:
self.start_background_channel_refresh()
except RuntimeError:
warnings.warn(
f"{self.__class__.__name__} should be started in an "
"asyncio event loop. Channel refresh will not be started",
RuntimeWarning,
stacklevel=2,
)

def start_background_channel_refresh(self) -> None:
"""
Starts a background task to ping and warm each channel in the pool
Raises:
- RuntimeError if not called in an asyncio event loop
"""
if not self._channel_refresh_tasks:
if not self._channel_refresh_tasks and not self._emulator_host:
# raise RuntimeError if there is no event loop
asyncio.get_running_loop()
for channel_idx in range(self.transport.pool_size):
Expand Down
105 changes: 105 additions & 0 deletions test_proxy/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# CBT Python Test Proxy

The CBT test proxy is intended for running confromance tests for Cloug Bigtable Python Client.
daniel-sanche marked this conversation as resolved.
Show resolved Hide resolved

## Start test proxy

#### running the proxy with nox

You can launch the test proxy directly using `nox`, which will handle dependency management

```
cd python-bigtable/test_proxy
nox -s test_proxy
```

The port can be configured using the `PROXY_SERVER_PORT` environment variable

```
cd python-bigtable/test_proxy
PROXY_SERVER_PORT=8080
nox -s test_proxy
```

#### running the proxy script manually

You can also run the `test_proxy.py` file directly

```
cd python-bigtable/test_proxy
python test_proxy.py
```

The port can be set by passing in an extra positional argument

```
cd python-bigtable/test_proxy
python test_proxy.py --port 8080
```

## Run the test cases

Prerequisites:
- If you have not already done so, [install golang](https://go.dev/doc/install).
- Before running tests, [launch an instance of the test proxy](#start-test-proxy)
in a separate shell session, and make note of the port

#### running the test cases with nox

You can trigger the tests directly using `nox`, which will clone the test repo locally if it doesn't exist

```
cd python-bigtable/test_proxy
nox -s conformance_tests
```

The port can be configured using the `PROXY_SERVER_PORT` environment variable

```
cd python-bigtable/test_proxy
PROXY_SERVER_PORT=8080
nox -s conformance_tests
```

#### running the test cases manually

Clone and navigate to the go test library:

```
git clone https://github.com/googleapis/cloud-bigtable-clients-test.git
cd cloud-bigtable-clients-test/tests
```


Launch the tests

```
go test -v -proxy_addr=:50055
```

## Test a released client

You can run the test proxy against a released version of the library with `nox`
by setting the `PROXY_CLIENT_VERSION` environment variable:

```
PROXY_CLIENT_VERSION=3.0.0
nox -s test_proxy
```

if unset, it will default to installing the library from source

## Test the legacy client

By default, tests are run against the new data client.You can run the test proxy against the
previous client by running it with the `--legacy-client` flag:

```
nox -s test_proxy -- --legacy-client
```

or

```
python test_proxy.py --legacy-client
```
132 changes: 132 additions & 0 deletions test_proxy/client_handler_legacy.py

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How much do you want to support legacy client? I only see 3 APIs are implemented, what's the future plan (including deprecation)?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We will support them side-by-side for the medium term, with plans to eventually deprecate the old client. For now, my attention has been focused on the new data client. But it would be good to get test parity at some point

Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module contains the client handler process for proxy_server.py.
"""
import os

from google.cloud.environment_vars import BIGTABLE_EMULATOR
from google.cloud.bigtable.client import Client

import client_handler_data as client_handler

import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)


class LegacyTestProxyClientHandler(client_handler.TestProxyClientHandler):

def __init__(
self,
data_target=None,
project_id=None,
instance_id=None,
app_profile_id=None,
per_operation_timeout=None,
**kwargs,
):
self.closed = False
# use emulator
os.environ[BIGTABLE_EMULATOR] = data_target
self.client = Client(project=project_id)
self.instance_id = instance_id
self.app_profile_id = app_profile_id
self.per_operation_timeout = per_operation_timeout

async def close(self):
self.closed = True

@client_handler.error_safe
async def ReadRows(self, request, **kwargs):
table_id = request["table_name"].split("/")[-1]
# app_profile_id = self.app_profile_id or request.get("app_profile_id", None)
instance = self.client.instance(self.instance_id)
table = instance.table(table_id)

limit = request.get("rows_limit", None)
start_key = request.get("rows", {}).get("row_keys", [None])[0]
end_key = request.get("rows", {}).get("row_keys", [None])[-1]
end_inclusive = request.get("rows", {}).get("row_ranges", [{}])[-1].get("end_key_closed", True)

row_list = []
for row in table.read_rows(start_key=start_key, end_key=end_key, limit=limit, end_inclusive=end_inclusive):
# parse results into proto formatted dict
dict_val = {"row_key": row.row_key}
for family, family_cells in row.cells.items():
family_dict = {"name": family}
for qualifier, qualifier_cells in family_cells.items():
column_dict = {"qualifier": qualifier}
for cell in qualifier_cells:
cell_dict = {
"value": cell.value,
"timestamp_micros": cell.timestamp.timestamp() * 1000000,
"labels": cell.labels,
}
column_dict.setdefault("cells", []).append(cell_dict)
family_dict.setdefault("columns", []).append(column_dict)
dict_val.setdefault("families", []).append(family_dict)
row_list.append(dict_val)
return row_list

@client_handler.error_safe
async def MutateRow(self, request, **kwargs):
from google.cloud.bigtable.row import DirectRow
table_id = request["table_name"].split("/")[-1]
instance = self.client.instance(self.instance_id)
table = instance.table(table_id)
row_key = request["row_key"]
new_row = DirectRow(row_key, table)
for m_dict in request.get("mutations", []):
if m_dict.get("set_cell"):
details = m_dict["set_cell"]
new_row.set_cell(details["family_name"], details["column_qualifier"], details["value"], timestamp=details["timestamp_micros"])
elif m_dict.get("delete_from_column"):
details = m_dict["delete_from_column"]
new_row.delete_cell(details["family_name"], details["column_qualifier"], timestamp=details["timestamp_micros"])
elif m_dict.get("delete_from_family"):
details = m_dict["delete_from_family"]
new_row.delete_cells(details["family_name"], timestamp=details["timestamp_micros"])
elif m_dict.get("delete_from_row"):
new_row.delete()
async with self.measure_call():
table.mutate_rows([new_row])
return "OK"

@client_handler.error_safe
async def BulkMutateRows(self, request, **kwargs):
from google.cloud.bigtable.row import DirectRow
table_id = request["table_name"].split("/")[-1]
instance = self.client.instance(self.instance_id)
table = instance.table(table_id)
rows = []
for entry in request.get("entries", []):
row_key = entry["row_key"]
new_row = DirectRow(row_key, table)
for m_dict in entry.get("mutations", {}):
if m_dict.get("set_cell"):
details = m_dict["set_cell"]
new_row.set_cell(details["family_name"], details["column_qualifier"], details["value"], timestamp=details.get("timestamp_micros",None))
elif m_dict.get("delete_from_column"):
details = m_dict["delete_from_column"]
new_row.delete_cell(details["family_name"], details["column_qualifier"], timestamp=details["timestamp_micros"])
elif m_dict.get("delete_from_family"):
details = m_dict["delete_from_family"]
new_row.delete_cells(details["family_name"], timestamp=details["timestamp_micros"])
elif m_dict.get("delete_from_row"):
new_row.delete()
rows.append(new_row)
async with self.measure_call():
table.mutate_rows(rows)
return "OK"

Loading