From f6f6080463582ddd88b0cf9e394d20e6d8ef45dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Thu, 24 Oct 2024 12:47:40 +0200 Subject: [PATCH 01/15] test: add mock server tests --- google/spanner/v1/README.md | 14 + google/spanner/v1/__init__.py | 0 google/spanner/v1/spanner_pb2_grpc.py | 906 +++++++++++++++++++++ tests/unit/mockserver/__init__.py | 15 + tests/unit/mockserver/mock_spanner.py | 135 +++ tests/unit/mockserver_tests/__init__.py | 0 tests/unit/mockserver_tests/test_basics.py | 97 +++ 7 files changed, 1167 insertions(+) create mode 100644 google/spanner/v1/README.md create mode 100644 google/spanner/v1/__init__.py create mode 100644 google/spanner/v1/spanner_pb2_grpc.py create mode 100644 tests/unit/mockserver/__init__.py create mode 100644 tests/unit/mockserver/mock_spanner.py create mode 100644 tests/unit/mockserver_tests/__init__.py create mode 100644 tests/unit/mockserver_tests/test_basics.py diff --git a/google/spanner/v1/README.md b/google/spanner/v1/README.md new file mode 100644 index 0000000000..9f4914f55a --- /dev/null +++ b/google/spanner/v1/README.md @@ -0,0 +1,14 @@ +# Spanner Servicer + +The Spanner server definition files were generated with these commands: + +```shell +pip install grpcio-tools +git clone git@github.com:googleapis/googleapis.git +cd googleapis +python -m grpc_tools.protoc \ + -I . \ + --python_out=. --pyi_out=. --grpc_python_out=. \ + ./google/spanner/v1/*.proto +``` + diff --git a/google/spanner/v1/__init__.py b/google/spanner/v1/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/google/spanner/v1/spanner_pb2_grpc.py b/google/spanner/v1/spanner_pb2_grpc.py new file mode 100644 index 0000000000..8a2bad63a5 --- /dev/null +++ b/google/spanner/v1/spanner_pb2_grpc.py @@ -0,0 +1,906 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.cloud.spanner_v1.types import commit_response as google_dot_spanner_dot_v1_dot_commit__response__pb2 +from google.cloud.spanner_v1.types import result_set as google_dot_spanner_dot_v1_dot_result__set__pb2 +from google.cloud.spanner_v1.types import spanner as google_dot_spanner_dot_v1_dot_spanner__pb2 +from google.cloud.spanner_v1.types import transaction as google_dot_spanner_dot_v1_dot_transaction__pb2 + +GRPC_GENERATED_VERSION = '1.67.0' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in google/spanner/v1/spanner_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) + + +class SpannerStub(object): + """Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSession = channel.unary_unary( + '/google.spanner.v1.Spanner/CreateSession', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, + _registered_method=True) + self.BatchCreateSessions = channel.unary_unary( + '/google.spanner.v1.Spanner/BatchCreateSessions', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.from_json, + _registered_method=True) + self.GetSession = channel.unary_unary( + '/google.spanner.v1.Spanner/GetSession', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, + _registered_method=True) + self.ListSessions = channel.unary_unary( + '/google.spanner.v1.Spanner/ListSessions', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.from_json, + _registered_method=True) + self.DeleteSession = channel.unary_unary( + '/google.spanner.v1.Spanner/DeleteSession', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.to_json, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, + _registered_method=True) + self.ExecuteSql = channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteSql', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, + _registered_method=True) + self.ExecuteStreamingSql = channel.unary_stream( + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, + _registered_method=True) + self.ExecuteBatchDml = channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteBatchDml', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.from_json, + _registered_method=True) + self.Read = channel.unary_unary( + '/google.spanner.v1.Spanner/Read', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, + _registered_method=True) + self.StreamingRead = channel.unary_stream( + '/google.spanner.v1.Spanner/StreamingRead', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, + _registered_method=True) + self.BeginTransaction = channel.unary_unary( + '/google.spanner.v1.Spanner/BeginTransaction', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.from_json, + _registered_method=True) + self.Commit = channel.unary_unary( + '/google.spanner.v1.Spanner/Commit', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.from_json, + _registered_method=True) + self.Rollback = channel.unary_unary( + '/google.spanner.v1.Spanner/Rollback', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.to_json, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, + _registered_method=True) + self.PartitionQuery = channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionQuery', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, + _registered_method=True) + self.PartitionRead = channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionRead', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, + _registered_method=True) + self.BatchWrite = channel.unary_stream( + '/google.spanner.v1.Spanner/BatchWrite', + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.from_json, + _registered_method=True) + + +class SpannerServicer(object): + """Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + def CreateSession(self, request, context): + """Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner database. + Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it is a good idea to + delete idle and unneeded sessions. + Aside from explicit deletes, Cloud Spanner may delete sessions for which no + operations are sent for more than an hour. If a session is deleted, + requests to it return `NOT_FOUND`. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, e.g., `"SELECT 1"`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def BatchCreateSessions(self, request, context): + """Creates multiple new sessions. + + This API can be used to initialize a session cache on the clients. + See https://goo.gl/TgSFN2 for best practices on session cache management. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSession(self, request, context): + """Gets a session. Returns `NOT_FOUND` if the session does not exist. + This is mainly useful for determining whether a session is still + alive. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSessions(self, request, context): + """Lists all sessions in a given database. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSession(self, request, context): + """Ends a session, releasing server resources associated with it. This will + asynchronously trigger cancellation of any operations that are running with + this session. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ExecuteSql(self, request, context): + """Executes an SQL statement, returning all results in a single reply. This + method cannot be used to return a result set larger than 10 MiB; + if the query yields more data than that, the query fails with + a `FAILED_PRECONDITION` error. + + Operations inside read-write transactions might return `ABORTED`. If + this occurs, the application should restart the transaction from + the beginning. See [Transaction][google.spanner.v1.Transaction] for more + details. + + Larger result sets can be fetched in streaming fashion by calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ExecuteStreamingSql(self, request, context): + """Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except returns the + result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no limit on + the size of the returned result set. However, no individual row in the + result set can exceed 100 MiB, and no column value can exceed 10 MiB. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ExecuteBatchDml(self, request, context): + """Executes a batch of SQL DML statements. This method allows many statements + to be run with lower latency than submitting them sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can succeed even if + a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement that failed. + Clients must inspect this field to determine whether an error occurred. + + Execution stops after the first failed statement; the remaining statements + are not executed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Read(self, request, context): + """Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method cannot be + used to return a result set larger than 10 MiB; if the read matches more + data than that, the read fails with a `FAILED_PRECONDITION` + error. + + Reads inside read-write transactions might return `ABORTED`. If + this occurs, the application should restart the transaction from + the beginning. See [Transaction][google.spanner.v1.Transaction] for more + details. + + Larger result sets can be yielded in streaming fashion by calling + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] instead. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingRead(self, request, context): + """Like [Read][google.spanner.v1.Spanner.Read], except returns the result set + as a stream. Unlike [Read][google.spanner.v1.Spanner.Read], there is no + limit on the size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can exceed + 10 MiB. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def BeginTransaction(self, request, context): + """Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new transaction as a + side-effect. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Commit(self, request, context): + """Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + `Commit` might return an `ABORTED` error. This can occur at any time; + commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If `Commit` returns `ABORTED`, the caller should re-attempt + the transaction from the beginning, re-using the same session. + + On very rare occasions, `Commit` might return `UNKNOWN`. This can happen, + for example, if the client job experiences a 1+ hour networking failure. + At that point, Cloud Spanner has lost track of the transaction outcome and + we recommend that you perform another read from the database to see the + state of things as they are now. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Rollback(self, request, context): + """Rolls back a transaction, releasing any locks it holds. It is a good + idea to call this for any transaction that includes one or more + [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and ultimately + decides not to commit. + + `Rollback` returns `OK` if it successfully aborts the transaction, the + transaction was already aborted, or the transaction is not + found. `Rollback` never returns `ABORTED`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PartitionQuery(self, request, context): + """Creates a set of partition tokens that can be used to execute a query + operation in parallel. Each of the returned partition tokens can be used + by [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] to + specify a subset of the query result to read. The same session and + read-only transaction must be used by the PartitionQueryRequest used to + create the partition tokens and the ExecuteSqlRequests that use the + partition tokens. + + Partition tokens become invalid when the session used to create them + is deleted, is idle for too long, begins a new transaction, or becomes too + old. When any of these happen, it is not possible to resume the query, and + the whole operation must be restarted from the beginning. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PartitionRead(self, request, context): + """Creates a set of partition tokens that can be used to execute a read + operation in parallel. Each of the returned partition tokens can be used + by [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to specify a + subset of the read result to read. The same session and read-only + transaction must be used by the PartitionReadRequest used to create the + partition tokens and the ReadRequests that use the partition tokens. There + are no ordering guarantees on rows returned among the returned partition + tokens, or even within each individual StreamingRead call issued with a + partition_token. + + Partition tokens become invalid when the session used to create them + is deleted, is idle for too long, begins a new transaction, or becomes too + old. When any of these happen, it is not possible to resume the read, and + the whole operation must be restarted from the beginning. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def BatchWrite(self, request, context): + """Batches the supplied mutation groups in a collection of efficient + transactions. All mutations in a group are committed atomically. However, + mutations across groups can be committed non-atomically in an unspecified + order and thus, they must be independent of each other. Partial failure is + possible, i.e., some groups may have been committed successfully, while + some may have failed. The results of individual batches are streamed into + the response as the batches are applied. + + BatchWrite requests are not replay protected, meaning that each mutation + group may be applied more than once. Replays of non-idempotent mutations + may have undesirable effects. For example, replays of an insert mutation + may produce an already exists error or if you use generated or commit + timestamp-based keys, it may result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups to be + idempotent to avoid this issue. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SpannerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSession': grpc.unary_unary_rpc_method_handler( + servicer.CreateSession, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize, + ), + 'BatchCreateSessions': grpc.unary_unary_rpc_method_handler( + servicer.BatchCreateSessions, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.serialize, + ), + 'GetSession': grpc.unary_unary_rpc_method_handler( + servicer.GetSession, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize, + ), + 'ListSessions': grpc.unary_unary_rpc_method_handler( + servicer.ListSessions, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.serialize, + ), + 'DeleteSession': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSession, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ExecuteSql': grpc.unary_unary_rpc_method_handler( + servicer.ExecuteSql, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize, + ), + 'ExecuteStreamingSql': grpc.unary_stream_rpc_method_handler( + servicer.ExecuteStreamingSql, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize, + ), + 'ExecuteBatchDml': grpc.unary_unary_rpc_method_handler( + servicer.ExecuteBatchDml, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.serialize, + ), + 'Read': grpc.unary_unary_rpc_method_handler( + servicer.Read, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize, + ), + 'StreamingRead': grpc.unary_stream_rpc_method_handler( + servicer.StreamingRead, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize, + ), + 'BeginTransaction': grpc.unary_unary_rpc_method_handler( + servicer.BeginTransaction, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.serialize, + ), + 'Commit': grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.serialize, + ), + 'Rollback': grpc.unary_unary_rpc_method_handler( + servicer.Rollback, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'PartitionQuery': grpc.unary_unary_rpc_method_handler( + servicer.PartitionQuery, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize, + ), + 'PartitionRead': grpc.unary_unary_rpc_method_handler( + servicer.PartitionRead, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize, + ), + 'BatchWrite': grpc.unary_stream_rpc_method_handler( + servicer.BatchWrite, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.serialize, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.spanner.v1.Spanner', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('google.spanner.v1.Spanner', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class Spanner(object): + """Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + @staticmethod + def CreateSession(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/CreateSession', + google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def BatchCreateSessions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/BatchCreateSessions', + google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetSession(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/GetSession', + google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListSessions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/ListSessions', + google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteSession(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/DeleteSession', + google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.to_json, + google_dot_protobuf_dot_empty__pb2.Empty.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ExecuteSql(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/ExecuteSql', + google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, + google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ExecuteStreamingSql(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, + google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ExecuteBatchDml(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/ExecuteBatchDml', + google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Read(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/Read', + google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, + google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def StreamingRead(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/google.spanner.v1.Spanner/StreamingRead', + google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, + google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def BeginTransaction(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/BeginTransaction', + google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.to_json, + google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Commit(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/Commit', + google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.to_json, + google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Rollback(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/Rollback', + google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.to_json, + google_dot_protobuf_dot_empty__pb2.Empty.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def PartitionQuery(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/PartitionQuery', + google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def PartitionRead(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.v1.Spanner/PartitionRead', + google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def BatchWrite(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/google.spanner.v1.Spanner/BatchWrite', + google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.to_json, + google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.from_json, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/tests/unit/mockserver/__init__.py b/tests/unit/mockserver/__init__.py new file mode 100644 index 0000000000..8f6cf06824 --- /dev/null +++ b/tests/unit/mockserver/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/mockserver/mock_spanner.py b/tests/unit/mockserver/mock_spanner.py new file mode 100644 index 0000000000..b3ef6eade9 --- /dev/null +++ b/tests/unit/mockserver/mock_spanner.py @@ -0,0 +1,135 @@ +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +import google.spanner.v1.spanner_pb2_grpc as spanner_grpc +import google.cloud.spanner_v1.types.result_set as result_set +import google.cloud.spanner_v1.types.transaction as transaction +import google.cloud.spanner_v1.types.commit_response as commit +import google.cloud.spanner_v1.types.spanner as spanner +from concurrent import futures +import grpc + +class MockSpanner: + def __init__(self): + self.results = {} + + def add_result(self, sql: str, result: result_set.ResultSet): + self.results[sql] = result + + def get_result_as_partial_result_sets(self, sql: str) -> [result_set.PartialResultSet]: + result: result_set.ResultSet = self.results.get(sql) + if result is None: + return [] + partials = [] + first = True + for row in result.rows: + partial = result_set.PartialResultSet() + if first: + partial.metadata=result.metadata + partial.values.extend(row) + partials.append(partial) + return partials + +class SpannerServicer(spanner_grpc.SpannerServicer): + def __init__(self): + self.requests = [] + self.session_counter = 0 + self.sessions = {} + self._mock_spanner = MockSpanner() + + @property + def mock_spanner(self): + return self._mock_spanner + + def CreateSession(self, request, context): + self.requests.append(request) + return self.__create_session(request.database, request.session) + + def BatchCreateSessions(self, request, context): + self.requests.append(request) + sessions = [] + for i in range(request.session_count): + sessions.append(self.__create_session(request.database, request.session_template)) + return spanner.BatchCreateSessionsResponse(dict(session=sessions)) + + def __create_session(self, database: str, session_template: spanner.Session): + self.session_counter += 1 + session = spanner.Session() + session.name = database + "/sessions/" + str(self.session_counter) + session.multiplexed = session_template.multiplexed + session.labels.MergeFrom(session_template.labels) + session.creator_role = session_template.creator_role + self.sessions[session.name] = session + return session + + def GetSession(self, request, context): + return spanner.Session() + + def ListSessions(self, request, context): + return [spanner.Session()] + + def DeleteSession(self, request, context): + return empty_pb2.Empty() + + def ExecuteSql(self, request, context): + return result_set.ResultSet() + + def ExecuteStreamingSql(self, request, context): + self.requests.append(request) + partials = self.mock_spanner.get_result_as_partial_result_sets(request.sql) + for result in partials: + yield result + + def ExecuteBatchDml(self, request, context): + return spanner.ExecuteBatchDmlResponse() + + def Read(self, request, context): + return result_set.ResultSet() + + def StreamingRead(self, request, context): + for result in [result_set.PartialResultSet(), result_set.PartialResultSet()]: + yield result + + def BeginTransaction(self, request, context): + return transaction.Transaction() + + def Commit(self, request, context): + return commit.CommitResponse() + + def Rollback(self, request, context): + return empty_pb2.Empty() + + def PartitionQuery(self, request, context): + return spanner.PartitionResponse() + + def PartitionRead(self, request, context): + return spanner.PartitionResponse() + + def BatchWrite(self, request, context): + for result in [spanner.BatchWriteResponse(), spanner.BatchWriteResponse()]: + yield result + +def start_mock_server() -> (grpc.Server, SpannerServicer, int): + spanner_servicer = SpannerServicer() + spanner_server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + spanner_grpc.add_SpannerServicer_to_server(spanner_servicer, spanner_server) + port = spanner_server.add_insecure_port("[::]:0") + spanner_server.start() + return spanner_server, spanner_servicer, port + +if __name__ == "__main__": + server, _ = start_mock_server() + server.wait_for_termination() diff --git a/tests/unit/mockserver_tests/__init__.py b/tests/unit/mockserver_tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/mockserver_tests/test_basics.py b/tests/unit/mockserver_tests/test_basics.py new file mode 100644 index 0000000000..7621f7e9c4 --- /dev/null +++ b/tests/unit/mockserver_tests/test_basics.py @@ -0,0 +1,97 @@ +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +from tests.unit.mockserver.mock_spanner import start_mock_server, \ + SpannerServicer +import google.cloud.spanner_v1.types.type as spanner_type +import google.cloud.spanner_v1.types.result_set as result_set +from google.api_core.client_options import ClientOptions +from google.auth.credentials import AnonymousCredentials +from google.cloud.spanner_v1 import Client, FixedSizePool +from google.cloud.spanner_v1.database import Database +from google.cloud.spanner_v1.instance import Instance +import grpc + +class TestBasics(unittest.TestCase): + server: grpc.Server = None + service: SpannerServicer = None + port: int = None + + def __init__(self, *args, **kwargs): + super(TestBasics, self).__init__(*args, **kwargs) + self._client = None + self._instance = None + self._database = None + + @classmethod + def setUpClass(cls): + TestBasics.server, TestBasics.service, TestBasics.port = start_mock_server() + + @classmethod + def tearDownClass(cls): + if TestBasics.server is not None: + TestBasics.server.stop(grace=None) + TestBasics.server = None + + @property + def client(self) -> Client: + if self._client is None: + self._client = Client( + project="test-project", + credentials=AnonymousCredentials(), + client_options=ClientOptions( + api_endpoint="localhost:" + str(TestBasics.port), + ) + ) + return self._client + + @property + def instance(self) -> Instance: + if self._instance is None: + self._instance = self.client.instance("test-instance") + return self._instance + + @property + def database(self) -> Database: + if self._database is None: + self._database = self.instance.database( + "test-database", + pool=FixedSizePool(size=10) + ) + return self._database + + def test_select1(self): + result = result_set.ResultSet(dict( + metadata=result_set.ResultSetMetadata(dict( + row_type=spanner_type.StructType(dict( + fields=[spanner_type.StructType.Field(dict( + name="c", + type=spanner_type.Type(dict( + code=spanner_type.TypeCode.INT64) + )) + )] + ))) + ), + )) + result.rows.extend(["1"]) + TestBasics.service.mock_spanner.add_result("select 1", result) + with self.database.snapshot() as snapshot: + results = snapshot.execute_sql("select 1") + result_list = [] + for row in results: + result_list.append(row) + self.assertEqual(1, row[0]) + self.assertEqual(1, len(result_list)) + From 10bff97c2f2d6cd74b2f3d6099e0e992144e0e90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Fri, 25 Oct 2024 09:25:27 +0200 Subject: [PATCH 02/15] chore: move to testing folder + fix formatting --- .../spanner_v1/testing}/__init__.py | 0 .../cloud/spanner_v1/testing}/mock_spanner.py | 29 +- .../spanner_v1/testing}/spanner_pb2_grpc.py | 858 ++++++++++-------- google/spanner/v1/README.md | 14 - tests/unit/mockserver/__init__.py | 15 - tests/unit/mockserver_tests/test_basics.py | 68 +- 6 files changed, 540 insertions(+), 444 deletions(-) rename google/{spanner/v1 => cloud/spanner_v1/testing}/__init__.py (100%) rename {tests/unit/mockserver => google/cloud/spanner_v1/testing}/mock_spanner.py (87%) rename google/{spanner/v1 => cloud/spanner_v1/testing}/spanner_pb2_grpc.py (50%) delete mode 100644 google/spanner/v1/README.md delete mode 100644 tests/unit/mockserver/__init__.py diff --git a/google/spanner/v1/__init__.py b/google/cloud/spanner_v1/testing/__init__.py similarity index 100% rename from google/spanner/v1/__init__.py rename to google/cloud/spanner_v1/testing/__init__.py diff --git a/tests/unit/mockserver/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py similarity index 87% rename from tests/unit/mockserver/mock_spanner.py rename to google/cloud/spanner_v1/testing/mock_spanner.py index b3ef6eade9..6251e26883 100644 --- a/tests/unit/mockserver/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -14,7 +14,7 @@ from google.protobuf import empty_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore -import google.spanner.v1.spanner_pb2_grpc as spanner_grpc +import google.cloud.spanner_v1.testing.spanner_pb2_grpc as spanner_grpc import google.cloud.spanner_v1.types.result_set as result_set import google.cloud.spanner_v1.types.transaction as transaction import google.cloud.spanner_v1.types.commit_response as commit @@ -22,6 +22,7 @@ from concurrent import futures import grpc + class MockSpanner: def __init__(self): self.results = {} @@ -29,7 +30,9 @@ def __init__(self): def add_result(self, sql: str, result: result_set.ResultSet): self.results[sql] = result - def get_result_as_partial_result_sets(self, sql: str) -> [result_set.PartialResultSet]: + def get_result_as_partial_result_sets( + self, sql: str + ) -> [result_set.PartialResultSet]: result: result_set.ResultSet = self.results.get(sql) if result is None: return [] @@ -38,14 +41,16 @@ def get_result_as_partial_result_sets(self, sql: str) -> [result_set.PartialResu for row in result.rows: partial = result_set.PartialResultSet() if first: - partial.metadata=result.metadata + partial.metadata = result.metadata partial.values.extend(row) partials.append(partial) return partials + +# An in-memory mock Spanner server that can be used for testing. class SpannerServicer(spanner_grpc.SpannerServicer): def __init__(self): - self.requests = [] + self._requests = [] self.session_counter = 0 self.sessions = {} self._mock_spanner = MockSpanner() @@ -54,15 +59,21 @@ def __init__(self): def mock_spanner(self): return self._mock_spanner + @property + def requests(self): + return self._requests + def CreateSession(self, request, context): - self.requests.append(request) + self._requests.append(request) return self.__create_session(request.database, request.session) def BatchCreateSessions(self, request, context): - self.requests.append(request) + self._requests.append(request) sessions = [] for i in range(request.session_count): - sessions.append(self.__create_session(request.database, request.session_template)) + sessions.append( + self.__create_session(request.database, request.session_template) + ) return spanner.BatchCreateSessionsResponse(dict(session=sessions)) def __create_session(self, database: str, session_template: spanner.Session): @@ -88,7 +99,7 @@ def ExecuteSql(self, request, context): return result_set.ResultSet() def ExecuteStreamingSql(self, request, context): - self.requests.append(request) + self._requests.append(request) partials = self.mock_spanner.get_result_as_partial_result_sets(request.sql) for result in partials: yield result @@ -122,6 +133,7 @@ def BatchWrite(self, request, context): for result in [spanner.BatchWriteResponse(), spanner.BatchWriteResponse()]: yield result + def start_mock_server() -> (grpc.Server, SpannerServicer, int): spanner_servicer = SpannerServicer() spanner_server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) @@ -130,6 +142,7 @@ def start_mock_server() -> (grpc.Server, SpannerServicer, int): spanner_server.start() return spanner_server, spanner_servicer, port + if __name__ == "__main__": server, _ = start_mock_server() server.wait_for_termination() diff --git a/google/spanner/v1/spanner_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py similarity index 50% rename from google/spanner/v1/spanner_pb2_grpc.py rename to google/cloud/spanner_v1/testing/spanner_pb2_grpc.py index 8a2bad63a5..a9106764ae 100644 --- a/google/spanner/v1/spanner_pb2_grpc.py +++ b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py @@ -1,31 +1,53 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! + +# Generated with the following commands: +# +# pip install grpcio-tools +# git clone git@github.com:googleapis/googleapis.git +# cd googleapis +# python -m grpc_tools.protoc \ +# -I . \ +# --python_out=. --pyi_out=. --grpc_python_out=. \ +# ./google/spanner/v1/*.proto + """Client and server classes corresponding to protobuf-defined services.""" import grpc import warnings from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.cloud.spanner_v1.types import commit_response as google_dot_spanner_dot_v1_dot_commit__response__pb2 -from google.cloud.spanner_v1.types import result_set as google_dot_spanner_dot_v1_dot_result__set__pb2 -from google.cloud.spanner_v1.types import spanner as google_dot_spanner_dot_v1_dot_spanner__pb2 -from google.cloud.spanner_v1.types import transaction as google_dot_spanner_dot_v1_dot_transaction__pb2 - -GRPC_GENERATED_VERSION = '1.67.0' +from google.cloud.spanner_v1.types import ( + commit_response as google_dot_spanner_dot_v1_dot_commit__response__pb2, +) +from google.cloud.spanner_v1.types import ( + result_set as google_dot_spanner_dot_v1_dot_result__set__pb2, +) +from google.cloud.spanner_v1.types import ( + spanner as google_dot_spanner_dot_v1_dot_spanner__pb2, +) +from google.cloud.spanner_v1.types import ( + transaction as google_dot_spanner_dot_v1_dot_transaction__pb2, +) + +GRPC_GENERATED_VERSION = "1.67.0" GRPC_VERSION = grpc.__version__ _version_not_supported = False try: from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) except ImportError: _version_not_supported = True if _version_not_supported: raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in google/spanner/v1/spanner_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in google/spanner/v1/spanner_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." ) @@ -43,85 +65,101 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.CreateSession = channel.unary_unary( - '/google.spanner.v1.Spanner/CreateSession', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/CreateSession", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, + _registered_method=True, + ) self.BatchCreateSessions = channel.unary_unary( - '/google.spanner.v1.Spanner/BatchCreateSessions', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/BatchCreateSessions", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.from_json, + _registered_method=True, + ) self.GetSession = channel.unary_unary( - '/google.spanner.v1.Spanner/GetSession', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/GetSession", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, + _registered_method=True, + ) self.ListSessions = channel.unary_unary( - '/google.spanner.v1.Spanner/ListSessions', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/ListSessions", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.from_json, + _registered_method=True, + ) self.DeleteSession = channel.unary_unary( - '/google.spanner.v1.Spanner/DeleteSession', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.to_json, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/DeleteSession", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.to_json, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, + _registered_method=True, + ) self.ExecuteSql = channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteSql', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/ExecuteSql", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, + _registered_method=True, + ) self.ExecuteStreamingSql = channel.unary_stream( - '/google.spanner.v1.Spanner/ExecuteStreamingSql', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/ExecuteStreamingSql", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, + _registered_method=True, + ) self.ExecuteBatchDml = channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteBatchDml', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/ExecuteBatchDml", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.from_json, + _registered_method=True, + ) self.Read = channel.unary_unary( - '/google.spanner.v1.Spanner/Read', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/Read", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, + _registered_method=True, + ) self.StreamingRead = channel.unary_stream( - '/google.spanner.v1.Spanner/StreamingRead', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/StreamingRead", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, + _registered_method=True, + ) self.BeginTransaction = channel.unary_unary( - '/google.spanner.v1.Spanner/BeginTransaction', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/BeginTransaction", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.from_json, + _registered_method=True, + ) self.Commit = channel.unary_unary( - '/google.spanner.v1.Spanner/Commit', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/Commit", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.from_json, + _registered_method=True, + ) self.Rollback = channel.unary_unary( - '/google.spanner.v1.Spanner/Rollback', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.to_json, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/Rollback", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.to_json, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, + _registered_method=True, + ) self.PartitionQuery = channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionQuery', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/PartitionQuery", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, + _registered_method=True, + ) self.PartitionRead = channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionRead', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/PartitionRead", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, + _registered_method=True, + ) self.BatchWrite = channel.unary_stream( - '/google.spanner.v1.Spanner/BatchWrite', - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.from_json, - _registered_method=True) + "/google.spanner.v1.Spanner/BatchWrite", + request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.to_json, + response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.from_json, + _registered_method=True, + ) class SpannerServicer(object): @@ -153,8 +191,8 @@ def CreateSession(self, request, context): periodically, e.g., `"SELECT 1"`. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def BatchCreateSessions(self, request, context): """Creates multiple new sessions. @@ -163,8 +201,8 @@ def BatchCreateSessions(self, request, context): See https://goo.gl/TgSFN2 for best practices on session cache management. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def GetSession(self, request, context): """Gets a session. Returns `NOT_FOUND` if the session does not exist. @@ -172,15 +210,14 @@ def GetSession(self, request, context): alive. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ListSessions(self, request, context): - """Lists all sessions in a given database. - """ + """Lists all sessions in a given database.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def DeleteSession(self, request, context): """Ends a session, releasing server resources associated with it. This will @@ -188,8 +225,8 @@ def DeleteSession(self, request, context): this session. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ExecuteSql(self, request, context): """Executes an SQL statement, returning all results in a single reply. This @@ -207,8 +244,8 @@ def ExecuteSql(self, request, context): instead. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ExecuteStreamingSql(self, request, context): """Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except returns the @@ -218,8 +255,8 @@ def ExecuteStreamingSql(self, request, context): result set can exceed 100 MiB, and no column value can exceed 10 MiB. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ExecuteBatchDml(self, request, context): """Executes a batch of SQL DML statements. This method allows many statements @@ -236,8 +273,8 @@ def ExecuteBatchDml(self, request, context): are not executed. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def Read(self, request, context): """Reads rows from the database using key lookups and scans, as a @@ -256,8 +293,8 @@ def Read(self, request, context): [StreamingRead][google.spanner.v1.Spanner.StreamingRead] instead. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def StreamingRead(self, request, context): """Like [Read][google.spanner.v1.Spanner.Read], except returns the result set @@ -267,8 +304,8 @@ def StreamingRead(self, request, context): 10 MiB. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def BeginTransaction(self, request, context): """Begins a new transaction. This step can often be skipped: @@ -278,8 +315,8 @@ def BeginTransaction(self, request, context): side-effect. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def Commit(self, request, context): """Commits a transaction. The request includes the mutations to be @@ -298,8 +335,8 @@ def Commit(self, request, context): state of things as they are now. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def Rollback(self, request, context): """Rolls back a transaction, releasing any locks it holds. It is a good @@ -313,8 +350,8 @@ def Rollback(self, request, context): found. `Rollback` never returns `ABORTED`. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def PartitionQuery(self, request, context): """Creates a set of partition tokens that can be used to execute a query @@ -331,8 +368,8 @@ def PartitionQuery(self, request, context): the whole operation must be restarted from the beginning. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def PartitionRead(self, request, context): """Creates a set of partition tokens that can be used to execute a read @@ -351,8 +388,8 @@ def PartitionRead(self, request, context): the whole operation must be restarted from the beginning. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def BatchWrite(self, request, context): """Batches the supplied mutation groups in a collection of efficient @@ -372,100 +409,103 @@ def BatchWrite(self, request, context): idempotent to avoid this issue. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_SpannerServicer_to_server(servicer, server): rpc_method_handlers = { - 'CreateSession': grpc.unary_unary_rpc_method_handler( - servicer.CreateSession, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize, - ), - 'BatchCreateSessions': grpc.unary_unary_rpc_method_handler( - servicer.BatchCreateSessions, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.serialize, - ), - 'GetSession': grpc.unary_unary_rpc_method_handler( - servicer.GetSession, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize, - ), - 'ListSessions': grpc.unary_unary_rpc_method_handler( - servicer.ListSessions, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.serialize, - ), - 'DeleteSession': grpc.unary_unary_rpc_method_handler( - servicer.DeleteSession, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.deserialize, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ExecuteSql': grpc.unary_unary_rpc_method_handler( - servicer.ExecuteSql, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize, - ), - 'ExecuteStreamingSql': grpc.unary_stream_rpc_method_handler( - servicer.ExecuteStreamingSql, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize, - ), - 'ExecuteBatchDml': grpc.unary_unary_rpc_method_handler( - servicer.ExecuteBatchDml, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.serialize, - ), - 'Read': grpc.unary_unary_rpc_method_handler( - servicer.Read, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize, - ), - 'StreamingRead': grpc.unary_stream_rpc_method_handler( - servicer.StreamingRead, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize, - ), - 'BeginTransaction': grpc.unary_unary_rpc_method_handler( - servicer.BeginTransaction, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.serialize, - ), - 'Commit': grpc.unary_unary_rpc_method_handler( - servicer.Commit, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.serialize, - ), - 'Rollback': grpc.unary_unary_rpc_method_handler( - servicer.Rollback, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.deserialize, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'PartitionQuery': grpc.unary_unary_rpc_method_handler( - servicer.PartitionQuery, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize, - ), - 'PartitionRead': grpc.unary_unary_rpc_method_handler( - servicer.PartitionRead, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize, - ), - 'BatchWrite': grpc.unary_stream_rpc_method_handler( - servicer.BatchWrite, - request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.deserialize, - response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.serialize, - ), + "CreateSession": grpc.unary_unary_rpc_method_handler( + servicer.CreateSession, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize, + ), + "BatchCreateSessions": grpc.unary_unary_rpc_method_handler( + servicer.BatchCreateSessions, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.serialize, + ), + "GetSession": grpc.unary_unary_rpc_method_handler( + servicer.GetSession, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.serialize, + ), + "ListSessions": grpc.unary_unary_rpc_method_handler( + servicer.ListSessions, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.serialize, + ), + "DeleteSession": grpc.unary_unary_rpc_method_handler( + servicer.DeleteSession, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "ExecuteSql": grpc.unary_unary_rpc_method_handler( + servicer.ExecuteSql, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize, + ), + "ExecuteStreamingSql": grpc.unary_stream_rpc_method_handler( + servicer.ExecuteStreamingSql, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize, + ), + "ExecuteBatchDml": grpc.unary_unary_rpc_method_handler( + servicer.ExecuteBatchDml, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.serialize, + ), + "Read": grpc.unary_unary_rpc_method_handler( + servicer.Read, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.serialize, + ), + "StreamingRead": grpc.unary_stream_rpc_method_handler( + servicer.StreamingRead, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.serialize, + ), + "BeginTransaction": grpc.unary_unary_rpc_method_handler( + servicer.BeginTransaction, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.serialize, + ), + "Commit": grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.serialize, + ), + "Rollback": grpc.unary_unary_rpc_method_handler( + servicer.Rollback, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "PartitionQuery": grpc.unary_unary_rpc_method_handler( + servicer.PartitionQuery, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize, + ), + "PartitionRead": grpc.unary_unary_rpc_method_handler( + servicer.PartitionRead, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.serialize, + ), + "BatchWrite": grpc.unary_stream_rpc_method_handler( + servicer.BatchWrite, + request_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.deserialize, + response_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.serialize, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'google.spanner.v1.Spanner', rpc_method_handlers) + "google.spanner.v1.Spanner", rpc_method_handlers + ) server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('google.spanner.v1.Spanner', rpc_method_handlers) + server.add_registered_method_handlers( + "google.spanner.v1.Spanner", rpc_method_handlers + ) - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class Spanner(object): """Cloud Spanner API @@ -474,20 +514,22 @@ class Spanner(object): """ @staticmethod - def CreateSession(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def CreateSession( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/CreateSession', + "/google.spanner.v1.Spanner/CreateSession", google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, options, @@ -498,23 +540,26 @@ def CreateSession(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def BatchCreateSessions(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def BatchCreateSessions( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/BatchCreateSessions', + "/google.spanner.v1.Spanner/BatchCreateSessions", google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.from_json, options, @@ -525,23 +570,26 @@ def BatchCreateSessions(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def GetSession(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def GetSession( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/GetSession', + "/google.spanner.v1.Spanner/GetSession", google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, options, @@ -552,23 +600,26 @@ def GetSession(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ListSessions(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ListSessions( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/ListSessions', + "/google.spanner.v1.Spanner/ListSessions", google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.from_json, options, @@ -579,23 +630,26 @@ def ListSessions(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def DeleteSession(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def DeleteSession( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/DeleteSession', + "/google.spanner.v1.Spanner/DeleteSession", google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.to_json, google_dot_protobuf_dot_empty__pb2.Empty.from_json, options, @@ -606,23 +660,26 @@ def DeleteSession(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ExecuteSql(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ExecuteSql( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/ExecuteSql', + "/google.spanner.v1.Spanner/ExecuteSql", google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, options, @@ -633,23 +690,26 @@ def ExecuteSql(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ExecuteStreamingSql(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ExecuteStreamingSql( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_stream( request, target, - '/google.spanner.v1.Spanner/ExecuteStreamingSql', + "/google.spanner.v1.Spanner/ExecuteStreamingSql", google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, options, @@ -660,23 +720,26 @@ def ExecuteStreamingSql(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ExecuteBatchDml(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ExecuteBatchDml( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/ExecuteBatchDml', + "/google.spanner.v1.Spanner/ExecuteBatchDml", google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.from_json, options, @@ -687,23 +750,26 @@ def ExecuteBatchDml(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def Read(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def Read( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/Read', + "/google.spanner.v1.Spanner/Read", google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, options, @@ -714,23 +780,26 @@ def Read(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def StreamingRead(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def StreamingRead( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_stream( request, target, - '/google.spanner.v1.Spanner/StreamingRead', + "/google.spanner.v1.Spanner/StreamingRead", google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, options, @@ -741,23 +810,26 @@ def StreamingRead(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def BeginTransaction(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def BeginTransaction( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/BeginTransaction', + "/google.spanner.v1.Spanner/BeginTransaction", google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.to_json, google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.from_json, options, @@ -768,23 +840,26 @@ def BeginTransaction(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def Commit(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def Commit( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/Commit', + "/google.spanner.v1.Spanner/Commit", google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.to_json, google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.from_json, options, @@ -795,23 +870,26 @@ def Commit(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def Rollback(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def Rollback( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/Rollback', + "/google.spanner.v1.Spanner/Rollback", google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.to_json, google_dot_protobuf_dot_empty__pb2.Empty.from_json, options, @@ -822,23 +900,26 @@ def Rollback(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def PartitionQuery(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def PartitionQuery( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/PartitionQuery', + "/google.spanner.v1.Spanner/PartitionQuery", google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, options, @@ -849,23 +930,26 @@ def PartitionQuery(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def PartitionRead(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def PartitionRead( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.v1.Spanner/PartitionRead', + "/google.spanner.v1.Spanner/PartitionRead", google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, options, @@ -876,23 +960,26 @@ def PartitionRead(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def BatchWrite(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def BatchWrite( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_stream( request, target, - '/google.spanner.v1.Spanner/BatchWrite', + "/google.spanner.v1.Spanner/BatchWrite", google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.to_json, google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.from_json, options, @@ -903,4 +990,5 @@ def BatchWrite(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) diff --git a/google/spanner/v1/README.md b/google/spanner/v1/README.md deleted file mode 100644 index 9f4914f55a..0000000000 --- a/google/spanner/v1/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Spanner Servicer - -The Spanner server definition files were generated with these commands: - -```shell -pip install grpcio-tools -git clone git@github.com:googleapis/googleapis.git -cd googleapis -python -m grpc_tools.protoc \ - -I . \ - --python_out=. --pyi_out=. --grpc_python_out=. \ - ./google/spanner/v1/*.proto -``` - diff --git a/tests/unit/mockserver/__init__.py b/tests/unit/mockserver/__init__.py deleted file mode 100644 index 8f6cf06824..0000000000 --- a/tests/unit/mockserver/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/mockserver_tests/test_basics.py b/tests/unit/mockserver_tests/test_basics.py index 7621f7e9c4..407bbc88cb 100644 --- a/tests/unit/mockserver_tests/test_basics.py +++ b/tests/unit/mockserver_tests/test_basics.py @@ -13,20 +13,28 @@ # limitations under the License. import unittest -from tests.unit.mockserver.mock_spanner import start_mock_server, \ - SpannerServicer +from google.cloud.spanner_v1.testing.mock_spanner import ( + start_mock_server, + SpannerServicer, +) import google.cloud.spanner_v1.types.type as spanner_type import google.cloud.spanner_v1.types.result_set as result_set from google.api_core.client_options import ClientOptions from google.auth.credentials import AnonymousCredentials -from google.cloud.spanner_v1 import Client, FixedSizePool +from google.cloud.spanner_v1 import ( + Client, + FixedSizePool, + BatchCreateSessionsRequest, + ExecuteSqlRequest, +) from google.cloud.spanner_v1.database import Database from google.cloud.spanner_v1.instance import Instance import grpc + class TestBasics(unittest.TestCase): server: grpc.Server = None - service: SpannerServicer = None + spanner_service: SpannerServicer = None port: int = None def __init__(self, *args, **kwargs): @@ -37,7 +45,11 @@ def __init__(self, *args, **kwargs): @classmethod def setUpClass(cls): - TestBasics.server, TestBasics.service, TestBasics.port = start_mock_server() + ( + TestBasics.server, + TestBasics.spanner_service, + TestBasics.port, + ) = start_mock_server() @classmethod def tearDownClass(cls): @@ -53,7 +65,7 @@ def client(self) -> Client: credentials=AnonymousCredentials(), client_options=ClientOptions( api_endpoint="localhost:" + str(TestBasics.port), - ) + ), ) return self._client @@ -67,26 +79,35 @@ def instance(self) -> Instance: def database(self) -> Database: if self._database is None: self._database = self.instance.database( - "test-database", - pool=FixedSizePool(size=10) + "test-database", pool=FixedSizePool(size=10) ) return self._database def test_select1(self): - result = result_set.ResultSet(dict( - metadata=result_set.ResultSetMetadata(dict( - row_type=spanner_type.StructType(dict( - fields=[spanner_type.StructType.Field(dict( - name="c", - type=spanner_type.Type(dict( - code=spanner_type.TypeCode.INT64) - )) - )] - ))) - ), - )) + result = result_set.ResultSet( + dict( + metadata=result_set.ResultSetMetadata( + dict( + row_type=spanner_type.StructType( + dict( + fields=[ + spanner_type.StructType.Field( + dict( + name="c", + type=spanner_type.Type( + dict(code=spanner_type.TypeCode.INT64) + ), + ) + ) + ] + ) + ) + ) + ), + ) + ) result.rows.extend(["1"]) - TestBasics.service.mock_spanner.add_result("select 1", result) + TestBasics.spanner_service.mock_spanner.add_result("select 1", result) with self.database.snapshot() as snapshot: results = snapshot.execute_sql("select 1") result_list = [] @@ -94,4 +115,7 @@ def test_select1(self): result_list.append(row) self.assertEqual(1, row[0]) self.assertEqual(1, len(result_list)) - + requests = self.spanner_service.requests + self.assertEqual(2, len(requests)) + self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[1], ExecuteSqlRequest)) From e73620106fa90ef945df3f20373783e6c5d9c630 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Fri, 25 Oct 2024 12:21:01 +0200 Subject: [PATCH 03/15] refactor: move mock server tests to separate directory --- .github/workflows/mock_server_tests.yaml | 21 ++++++++++++++ noxfile.py | 29 +++++++++++++++++++ tests/{unit => }/mockserver_tests/__init__.py | 0 .../mockserver_tests/test_basics.py | 0 4 files changed, 50 insertions(+) create mode 100644 .github/workflows/mock_server_tests.yaml rename tests/{unit => }/mockserver_tests/__init__.py (100%) rename tests/{unit => }/mockserver_tests/test_basics.py (100%) diff --git a/.github/workflows/mock_server_tests.yaml b/.github/workflows/mock_server_tests.yaml new file mode 100644 index 0000000000..2da5320071 --- /dev/null +++ b/.github/workflows/mock_server_tests.yaml @@ -0,0 +1,21 @@ +on: + push: + branches: + - main + pull_request: +name: Run Spanner tests against an in-mem mock server +jobs: + system-tests: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: 3.12 + - name: Install nox + run: python -m pip install nox + - name: Run mock server tests + run: nox -s mockserver diff --git a/noxfile.py b/noxfile.py index 3b656a758c..3d3a4c9335 100644 --- a/noxfile.py +++ b/noxfile.py @@ -33,6 +33,7 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" +DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12" UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ @@ -226,6 +227,34 @@ def unit(session, protobuf_implementation): ) +@nox.session(python=DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION) +def mockserver(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + # install_unittest_dependencies(session, "-c", constraints_path) + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) + + # Run py.test against the mockserver tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "mockserver_tests"), + *session.posargs, + ) + + def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. diff --git a/tests/unit/mockserver_tests/__init__.py b/tests/mockserver_tests/__init__.py similarity index 100% rename from tests/unit/mockserver_tests/__init__.py rename to tests/mockserver_tests/__init__.py diff --git a/tests/unit/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py similarity index 100% rename from tests/unit/mockserver_tests/test_basics.py rename to tests/mockserver_tests/test_basics.py From d51b098f78483781bf72bf027104a37e4af1a21e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Fri, 25 Oct 2024 14:23:13 +0200 Subject: [PATCH 04/15] feat: add database admin service Adds a DatabaseAdminService to the mock server and sets up a basic test case for this. Also removes the generated stubs in the grpc files, as these are not needed. --- .../spanner_v1/testing/mock_database_admin.py | 37 + .../cloud/spanner_v1/testing/mock_spanner.py | 21 +- .../spanner_database_admin_pb2_grpc.py | 1187 +++++++++++++++++ .../spanner_v1/testing/spanner_pb2_grpc.py | 111 -- tests/mockserver_tests/test_basics.py | 26 + 5 files changed, 1263 insertions(+), 119 deletions(-) create mode 100644 google/cloud/spanner_v1/testing/mock_database_admin.py create mode 100644 google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py diff --git a/google/cloud/spanner_v1/testing/mock_database_admin.py b/google/cloud/spanner_v1/testing/mock_database_admin.py new file mode 100644 index 0000000000..73f8df92e3 --- /dev/null +++ b/google/cloud/spanner_v1/testing/mock_database_admin.py @@ -0,0 +1,37 @@ +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc +from google.longrunning import operations_pb2 as operations_pb2 + + +# An in-memory mock DatabaseAdmin server that can be used for testing. +class DatabaseAdminServicer(database_admin_grpc.DatabaseAdminServicer): + def __init__(self): + self._requests = [] + + @property + def requests(self): + return self._requests + + def UpdateDatabaseDdl(self, request, context): + self._requests.append(request) + operation = operations_pb2.Operation() + operation.done = True + operation.name = "projects/test-project/operations/test-operation" + operation.response.Pack(empty_pb2.Empty()) + return operation diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index 6251e26883..8d5192bfc6 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -15,6 +15,9 @@ from google.protobuf import empty_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore import google.cloud.spanner_v1.testing.spanner_pb2_grpc as spanner_grpc +import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc +from google.cloud.spanner_v1.testing.mock_database_admin import \ + DatabaseAdminServicer import google.cloud.spanner_v1.types.result_set as result_set import google.cloud.spanner_v1.types.transaction as transaction import google.cloud.spanner_v1.types.commit_response as commit @@ -134,15 +137,17 @@ def BatchWrite(self, request, context): yield result -def start_mock_server() -> (grpc.Server, SpannerServicer, int): - spanner_servicer = SpannerServicer() +def start_mock_server() -> (grpc.Server, SpannerServicer, DatabaseAdminServicer, int): + # Create a gRPC server. spanner_server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + + # Add the Spanner services to the gRPC server. + spanner_servicer = SpannerServicer() spanner_grpc.add_SpannerServicer_to_server(spanner_servicer, spanner_server) + database_admin_servicer = DatabaseAdminServicer() + database_admin_grpc.add_DatabaseAdminServicer_to_server(database_admin_servicer, spanner_server) + + # Start the server on a random port. port = spanner_server.add_insecure_port("[::]:0") spanner_server.start() - return spanner_server, spanner_servicer, port - - -if __name__ == "__main__": - server, _ = start_mock_server() - server.wait_for_termination() + return spanner_server, spanner_servicer, database_admin_servicer, port diff --git a/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py new file mode 100644 index 0000000000..8a9c983d0d --- /dev/null +++ b/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py @@ -0,0 +1,1187 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! + + +# Generated with the following commands: +# +# pip install grpcio-tools +# git clone git@github.com:googleapis/googleapis.git +# cd googleapis +# python -m grpc_tools.protoc \ +# -I . \ +# --python_out=. --pyi_out=. --grpc_python_out=. \ +# ./google/spanner/admin/database/v1/*.proto + +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 +from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.cloud.spanner_admin_database_v1.types import backup as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2 +from google.cloud.spanner_admin_database_v1.types import backup_schedule as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2 +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2 + +GRPC_GENERATED_VERSION = '1.67.0' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in google/spanner/admin/database/v1/spanner_database_admin_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) + + +class DatabaseAdminServicer(object): + """Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + * create, drop, and list databases + * update the schema of pre-existing databases + * create, delete, copy and list backups for a database + * restore a database from an existing backup + """ + + def ListDatabases(self, request, context): + """Lists Cloud Spanner databases. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateDatabase(self, request, context): + """Creates a new Cloud Spanner database and starts to prepare it for serving. + The returned [long-running operation][google.longrunning.Operation] will + have a name of the format `/operations/` and + can be used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type is + [Database][google.spanner.admin.database.v1.Database], if successful. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDatabase(self, request, context): + """Gets the state of a Cloud Spanner database. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateDatabase(self, request, context): + """Updates a Cloud Spanner database. The returned + [long-running operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database does not + exist, returns `NOT_FOUND`. + + While the operation is pending: + + * The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + * Cancelling the operation is best-effort. If the cancellation succeeds, + the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates with a + `CANCELLED` status. + * New UpdateDatabase requests will return a `FAILED_PRECONDITION` error + until the pending operation is done (returns successfully or with + error). + * Reading the database via the API continues to give the pre-request + values. + + Upon completion of the returned operation: + + * The new values are in effect and readable via the API. + * The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running operation][google.longrunning.Operation] will + have a name of the format + `projects//instances//databases//operations/` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type is + [Database][google.spanner.admin.database.v1.Database], if successful. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateDatabaseDdl(self, request, context): + """Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The returned + [long-running operation][google.longrunning.Operation] will have a name of + the format `/operations/` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DropDatabase(self, request, context): + """Drops (aka deletes) a Cloud Spanner database. + Completed backups for the database will be retained according to their + `expire_time`. + Note: Cloud Spanner might continue to accept requests for a few seconds + after the database has been deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDatabaseDdl(self, request, context): + """Returns the schema of a Cloud Spanner database as a list of formatted + DDL statements. This method does not show pending schema updates, those may + be queried using the [Operations][google.longrunning.Operations] API. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetIamPolicy(self, request, context): + """Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires `spanner.databases.setIamPolicy` + permission on [resource][google.iam.v1.SetIamPolicyRequest.resource]. + For backups, authorization requires `spanner.backups.setIamPolicy` + permission on [resource][google.iam.v1.SetIamPolicyRequest.resource]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetIamPolicy(self, request, context): + """Gets the access control policy for a database or backup resource. + Returns an empty policy if a database or backup exists but does not have a + policy set. + + Authorization requires `spanner.databases.getIamPolicy` permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + For backups, authorization requires `spanner.backups.getIamPolicy` + permission on [resource][google.iam.v1.GetIamPolicyRequest.resource]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def TestIamPermissions(self, request, context): + """Returns permissions that the caller has on the specified database or backup + resource. + + Attempting this RPC on a non-existent Cloud Spanner database will + result in a NOT_FOUND error if the user has + `spanner.databases.list` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will + result in a NOT_FOUND error if the user has + `spanner.backups.list` permission on the containing instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateBackup(self, request, context): + """Starts creating a new Cloud Spanner Backup. + The returned backup [long-running operation][google.longrunning.Operation] + will have a name of the format + `projects//instances//backups//operations/` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type is + [Backup][google.spanner.admin.database.v1.Backup], if successful. + Cancelling the returned operation will stop the creation and delete the + backup. There can be only one pending backup creation per database. Backup + creation of different databases can run concurrently. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CopyBackup(self, request, context): + """Starts copying a Cloud Spanner Backup. + The returned backup [long-running operation][google.longrunning.Operation] + will have a name of the format + `projects//instances//backups//operations/` + and can be used to track copying of the backup. The operation is associated + with the destination backup. + The [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type is + [Backup][google.spanner.admin.database.v1.Backup], if successful. + Cancelling the returned operation will stop the copying and delete the + destination backup. Concurrent CopyBackup requests can run on the same + source backup. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetBackup(self, request, context): + """Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateBackup(self, request, context): + """Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteBackup(self, request, context): + """Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListBackups(self, request, context): + """Lists completed and pending backups. + Backups returned are ordered by `create_time` in descending order, + starting from the most recent `create_time`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RestoreDatabase(self, request, context): + """Create a new database by restoring from a completed backup. The new + database must be in the same project and in an instance with the same + instance configuration as the instance containing + the backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the format + `projects//instances//databases//operations/`, + and can be used to track the progress of the operation, and to cancel it. + The [metadata][google.longrunning.Operation.metadata] field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type + is [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the restore and + delete the database. + There can be only one database being restored into an instance at a time. + Once the restore operation completes, a new restore operation can be + initiated, without waiting for the optimize operation associated with the + first restore to complete. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDatabaseOperations(self, request, context): + """Lists database [longrunning-operations][google.longrunning.Operation]. + A database operation has a name of the form + `projects//instances//databases//operations/`. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + `metadata.type_url` describes the type of the metadata. Operations returned + include those that have completed/failed/canceled within the last 7 days, + and pending operations. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListBackupOperations(self, request, context): + """Lists the backup [long-running operations][google.longrunning.Operation] in + the given instance. A backup operation has a name of the form + `projects//instances//backups//operations/`. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + `metadata.type_url` describes the type of the metadata. Operations returned + include those that have completed/failed/canceled within the last 7 days, + and pending operations. Operations returned are ordered by + `operation.metadata.value.progress.start_time` in descending order starting + from the most recently started operation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDatabaseRoles(self, request, context): + """Lists Cloud Spanner database roles. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateBackupSchedule(self, request, context): + """Creates a new backup schedule. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetBackupSchedule(self, request, context): + """Gets backup schedule for the input schedule name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateBackupSchedule(self, request, context): + """Updates a backup schedule. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteBackupSchedule(self, request, context): + """Deletes a backup schedule. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListBackupSchedules(self, request, context): + """Lists all the backup schedules for the database. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_DatabaseAdminServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListDatabases': grpc.unary_unary_rpc_method_handler( + servicer.ListDatabases, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesResponse.serialize, + ), + 'CreateDatabase': grpc.unary_unary_rpc_method_handler( + servicer.CreateDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.CreateDatabaseRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'GetDatabase': grpc.unary_unary_rpc_method_handler( + servicer.GetDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.Database.serialize, + ), + 'UpdateDatabase': grpc.unary_unary_rpc_method_handler( + servicer.UpdateDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'UpdateDatabaseDdl': grpc.unary_unary_rpc_method_handler( + servicer.UpdateDatabaseDdl, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseDdlRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'DropDatabase': grpc.unary_unary_rpc_method_handler( + servicer.DropDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.DropDatabaseRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetDatabaseDdl': grpc.unary_unary_rpc_method_handler( + servicer.GetDatabaseDdl, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlResponse.serialize, + ), + 'SetIamPolicy': grpc.unary_unary_rpc_method_handler( + servicer.SetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + 'GetIamPolicy': grpc.unary_unary_rpc_method_handler( + servicer.GetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + 'TestIamPermissions': grpc.unary_unary_rpc_method_handler( + servicer.TestIamPermissions, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, + ), + 'CreateBackup': grpc.unary_unary_rpc_method_handler( + servicer.CreateBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CreateBackupRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'CopyBackup': grpc.unary_unary_rpc_method_handler( + servicer.CopyBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CopyBackupRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'GetBackup': grpc.unary_unary_rpc_method_handler( + servicer.GetBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.GetBackupRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize, + ), + 'UpdateBackup': grpc.unary_unary_rpc_method_handler( + servicer.UpdateBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.UpdateBackupRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize, + ), + 'DeleteBackup': grpc.unary_unary_rpc_method_handler( + servicer.DeleteBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.DeleteBackupRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListBackups': grpc.unary_unary_rpc_method_handler( + servicer.ListBackups, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsResponse.serialize, + ), + 'RestoreDatabase': grpc.unary_unary_rpc_method_handler( + servicer.RestoreDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.RestoreDatabaseRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'ListDatabaseOperations': grpc.unary_unary_rpc_method_handler( + servicer.ListDatabaseOperations, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsResponse.serialize, + ), + 'ListBackupOperations': grpc.unary_unary_rpc_method_handler( + servicer.ListBackupOperations, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsResponse.serialize, + ), + 'ListDatabaseRoles': grpc.unary_unary_rpc_method_handler( + servicer.ListDatabaseRoles, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesResponse.serialize, + ), + 'CreateBackupSchedule': grpc.unary_unary_rpc_method_handler( + servicer.CreateBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.CreateBackupScheduleRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, + ), + 'GetBackupSchedule': grpc.unary_unary_rpc_method_handler( + servicer.GetBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.GetBackupScheduleRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, + ), + 'UpdateBackupSchedule': grpc.unary_unary_rpc_method_handler( + servicer.UpdateBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.UpdateBackupScheduleRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, + ), + 'DeleteBackupSchedule': grpc.unary_unary_rpc_method_handler( + servicer.DeleteBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.DeleteBackupScheduleRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListBackupSchedules': grpc.unary_unary_rpc_method_handler( + servicer.ListBackupSchedules, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesResponse.serialize, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.spanner.admin.database.v1.DatabaseAdmin', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('google.spanner.admin.database.v1.DatabaseAdmin', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class DatabaseAdmin(object): + """Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + * create, drop, and list databases + * update the schema of pre-existing databases + * create, delete, copy and list backups for a database + * restore a database from an existing backup + """ + + @staticmethod + def ListDatabases(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateDatabase(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.CreateDatabaseRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetDatabase(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.Database.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateDatabase(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateDatabaseDdl(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseDdlRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DropDatabase(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.DropDatabaseRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetDatabaseDdl(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def SetIamPolicy(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, + google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetIamPolicy(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, + google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def TestIamPermissions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, + google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateBackup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CreateBackupRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CopyBackup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CopyBackupRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetBackup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.GetBackupRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateBackup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.UpdateBackupRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteBackup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.DeleteBackupRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListBackups(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def RestoreDatabase(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.RestoreDatabaseRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListDatabaseOperations(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListBackupOperations(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListDatabaseRoles(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateBackupSchedule(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.CreateBackupScheduleRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetBackupSchedule(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.GetBackupScheduleRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def UpdateBackupSchedule(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.UpdateBackupScheduleRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteBackupSchedule(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.DeleteBackupScheduleRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListBackupSchedules(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesRequest.SerializeToString, + google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py index a9106764ae..dcbd55ccb5 100644 --- a/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py +++ b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py @@ -51,117 +51,6 @@ ) -class SpannerStub(object): - """Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateSession = channel.unary_unary( - "/google.spanner.v1.Spanner/CreateSession", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CreateSessionRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, - _registered_method=True, - ) - self.BatchCreateSessions = channel.unary_unary( - "/google.spanner.v1.Spanner/BatchCreateSessions", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchCreateSessionsResponse.from_json, - _registered_method=True, - ) - self.GetSession = channel.unary_unary( - "/google.spanner.v1.Spanner/GetSession", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.GetSessionRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.Session.from_json, - _registered_method=True, - ) - self.ListSessions = channel.unary_unary( - "/google.spanner.v1.Spanner/ListSessions", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ListSessionsResponse.from_json, - _registered_method=True, - ) - self.DeleteSession = channel.unary_unary( - "/google.spanner.v1.Spanner/DeleteSession", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.DeleteSessionRequest.to_json, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, - _registered_method=True, - ) - self.ExecuteSql = channel.unary_unary( - "/google.spanner.v1.Spanner/ExecuteSql", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, - _registered_method=True, - ) - self.ExecuteStreamingSql = channel.unary_stream( - "/google.spanner.v1.Spanner/ExecuteStreamingSql", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteSqlRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, - _registered_method=True, - ) - self.ExecuteBatchDml = channel.unary_unary( - "/google.spanner.v1.Spanner/ExecuteBatchDml", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ExecuteBatchDmlResponse.from_json, - _registered_method=True, - ) - self.Read = channel.unary_unary( - "/google.spanner.v1.Spanner/Read", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.ResultSet.from_json, - _registered_method=True, - ) - self.StreamingRead = channel.unary_stream( - "/google.spanner.v1.Spanner/StreamingRead", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.ReadRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_result__set__pb2.PartialResultSet.from_json, - _registered_method=True, - ) - self.BeginTransaction = channel.unary_unary( - "/google.spanner.v1.Spanner/BeginTransaction", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BeginTransactionRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_transaction__pb2.Transaction.from_json, - _registered_method=True, - ) - self.Commit = channel.unary_unary( - "/google.spanner.v1.Spanner/Commit", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.CommitRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_commit__response__pb2.CommitResponse.from_json, - _registered_method=True, - ) - self.Rollback = channel.unary_unary( - "/google.spanner.v1.Spanner/Rollback", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.RollbackRequest.to_json, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.from_json, - _registered_method=True, - ) - self.PartitionQuery = channel.unary_unary( - "/google.spanner.v1.Spanner/PartitionQuery", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionQueryRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, - _registered_method=True, - ) - self.PartitionRead = channel.unary_unary( - "/google.spanner.v1.Spanner/PartitionRead", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionReadRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.PartitionResponse.from_json, - _registered_method=True, - ) - self.BatchWrite = channel.unary_stream( - "/google.spanner.v1.Spanner/BatchWrite", - request_serializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteRequest.to_json, - response_deserializer=google_dot_spanner_dot_v1_dot_spanner__pb2.BatchWriteResponse.from_json, - _registered_method=True, - ) - - class SpannerServicer(object): """Cloud Spanner API diff --git a/tests/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py index 407bbc88cb..5c39d6c749 100644 --- a/tests/mockserver_tests/test_basics.py +++ b/tests/mockserver_tests/test_basics.py @@ -13,6 +13,11 @@ # limitations under the License. import unittest + +from google.cloud.spanner_admin_database_v1 import UpdateDatabaseDdlRequest +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.cloud.spanner_v1.testing.mock_database_admin import \ + DatabaseAdminServicer from google.cloud.spanner_v1.testing.mock_spanner import ( start_mock_server, SpannerServicer, @@ -35,6 +40,7 @@ class TestBasics(unittest.TestCase): server: grpc.Server = None spanner_service: SpannerServicer = None + database_admin_service: DatabaseAdminServicer = None port: int = None def __init__(self, *args, **kwargs): @@ -48,6 +54,7 @@ def setUpClass(cls): ( TestBasics.server, TestBasics.spanner_service, + TestBasics.database_admin_service, TestBasics.port, ) = start_mock_server() @@ -119,3 +126,22 @@ def test_select1(self): self.assertEqual(2, len(requests)) self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) self.assertTrue(isinstance(requests[1], ExecuteSqlRequest)) + + def test_create_table(self): + database_admin_api = self.client.database_admin_api + request = spanner_database_admin.UpdateDatabaseDdlRequest(dict( + database=database_admin_api.database_path( + "test-project", "test-instance", "test-database" + ), + statements=[ + "CREATE TABLE Test (" + "Id INT64, " + "Value STRING(MAX)) " + "PRIMARY KEY (Id)", + ], + )) + operation = database_admin_api.update_database_ddl(request) + operation.result(1) + requests = self.database_admin_service.requests + self.assertEqual(1, len(requests)) + self.assertTrue(isinstance(requests[0], UpdateDatabaseDdlRequest)) From 505442fa1845f77710165f69f9d86bcbe9cab941 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Mon, 28 Oct 2024 11:21:26 +0100 Subject: [PATCH 05/15] test: add DDL test --- .../spanner_v1/testing/mock_database_admin.py | 10 +- .../cloud/spanner_v1/testing/mock_spanner.py | 29 +- .../spanner_database_admin_pb2_grpc.py | 1096 +++++++++-------- .../spanner_v1/testing/spanner_pb2_grpc.py | 5 +- tests/mockserver_tests/test_basics.py | 31 +- 5 files changed, 627 insertions(+), 544 deletions(-) diff --git a/google/cloud/spanner_v1/testing/mock_database_admin.py b/google/cloud/spanner_v1/testing/mock_database_admin.py index 73f8df92e3..fe9ac979eb 100644 --- a/google/cloud/spanner_v1/testing/mock_database_admin.py +++ b/google/cloud/spanner_v1/testing/mock_database_admin.py @@ -12,11 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc from google.longrunning import operations_pb2 as operations_pb2 +from google.protobuf import empty_pb2 + +import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc # An in-memory mock DatabaseAdmin server that can be used for testing. @@ -28,6 +27,9 @@ def __init__(self): def requests(self): return self._requests + def clear_requests(self): + self._requests = [] + def UpdateDatabaseDdl(self, request, context): self._requests.append(request) operation = operations_pb2.Operation() diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index 8d5192bfc6..6afc56eadc 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -12,18 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -import google.cloud.spanner_v1.testing.spanner_pb2_grpc as spanner_grpc +from concurrent import futures + +from google.protobuf import empty_pb2 +import grpc + +from google.cloud.spanner_v1.testing.mock_database_admin import DatabaseAdminServicer import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc -from google.cloud.spanner_v1.testing.mock_database_admin import \ - DatabaseAdminServicer -import google.cloud.spanner_v1.types.result_set as result_set -import google.cloud.spanner_v1.types.transaction as transaction +import google.cloud.spanner_v1.testing.spanner_pb2_grpc as spanner_grpc import google.cloud.spanner_v1.types.commit_response as commit +import google.cloud.spanner_v1.types.result_set as result_set import google.cloud.spanner_v1.types.spanner as spanner -from concurrent import futures -import grpc +import google.cloud.spanner_v1.types.transaction as transaction class MockSpanner: @@ -31,12 +31,12 @@ def __init__(self): self.results = {} def add_result(self, sql: str, result: result_set.ResultSet): - self.results[sql] = result + self.results[sql.lower()] = result def get_result_as_partial_result_sets( self, sql: str ) -> [result_set.PartialResultSet]: - result: result_set.ResultSet = self.results.get(sql) + result: result_set.ResultSet = self.results.get(sql.lower()) if result is None: return [] partials = [] @@ -66,6 +66,9 @@ def mock_spanner(self): def requests(self): return self._requests + def clear_requests(self): + self._requests = [] + def CreateSession(self, request, context): self._requests.append(request) return self.__create_session(request.database, request.session) @@ -145,7 +148,9 @@ def start_mock_server() -> (grpc.Server, SpannerServicer, DatabaseAdminServicer, spanner_servicer = SpannerServicer() spanner_grpc.add_SpannerServicer_to_server(spanner_servicer, spanner_server) database_admin_servicer = DatabaseAdminServicer() - database_admin_grpc.add_DatabaseAdminServicer_to_server(database_admin_servicer, spanner_server) + database_admin_grpc.add_DatabaseAdminServicer_to_server( + database_admin_servicer, spanner_server + ) # Start the server on a random port. port = spanner_server.add_insecure_port("[::]:0") diff --git a/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py index 8a9c983d0d..3001ce5c6a 100644 --- a/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py +++ b/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py @@ -12,34 +12,44 @@ # ./google/spanner/admin/database/v1/*.proto """Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.cloud.spanner_admin_database_v1.types import backup as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2 -from google.cloud.spanner_admin_database_v1.types import backup_schedule as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2 -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2 +import grpc -GRPC_GENERATED_VERSION = '1.67.0' +from google.cloud.spanner_admin_database_v1.types import ( + backup as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2, +) +from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2, +) +from google.cloud.spanner_admin_database_v1.types import ( + spanner_database_admin as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2, +) + +GRPC_GENERATED_VERSION = "1.67.0" GRPC_VERSION = grpc.__version__ _version_not_supported = False try: from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) except ImportError: _version_not_supported = True if _version_not_supported: raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in google/spanner/admin/database/v1/spanner_database_admin_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + f"The grpc package installed is at version {GRPC_VERSION}," + + " but the generated code in google/spanner/admin/database/v1/spanner_database_admin_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." ) @@ -54,11 +64,10 @@ class DatabaseAdminServicer(object): """ def ListDatabases(self, request, context): - """Lists Cloud Spanner databases. - """ + """Lists Cloud Spanner databases.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def CreateDatabase(self, request, context): """Creates a new Cloud Spanner database and starts to prepare it for serving. @@ -71,15 +80,14 @@ def CreateDatabase(self, request, context): [Database][google.spanner.admin.database.v1.Database], if successful. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def GetDatabase(self, request, context): - """Gets the state of a Cloud Spanner database. - """ + """Gets the state of a Cloud Spanner database.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def UpdateDatabase(self, request, context): """Updates a Cloud Spanner database. The returned @@ -120,8 +128,8 @@ def UpdateDatabase(self, request, context): [Database][google.spanner.admin.database.v1.Database], if successful. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def UpdateDatabaseDdl(self, request, context): """Updates the schema of a Cloud Spanner database by @@ -134,8 +142,8 @@ def UpdateDatabaseDdl(self, request, context): The operation has no response. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def DropDatabase(self, request, context): """Drops (aka deletes) a Cloud Spanner database. @@ -145,8 +153,8 @@ def DropDatabase(self, request, context): after the database has been deleted. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def GetDatabaseDdl(self, request, context): """Returns the schema of a Cloud Spanner database as a list of formatted @@ -154,8 +162,8 @@ def GetDatabaseDdl(self, request, context): be queried using the [Operations][google.longrunning.Operations] API. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def SetIamPolicy(self, request, context): """Sets the access control policy on a database or backup resource. @@ -167,8 +175,8 @@ def SetIamPolicy(self, request, context): permission on [resource][google.iam.v1.SetIamPolicyRequest.resource]. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def GetIamPolicy(self, request, context): """Gets the access control policy for a database or backup resource. @@ -181,8 +189,8 @@ def GetIamPolicy(self, request, context): permission on [resource][google.iam.v1.GetIamPolicyRequest.resource]. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def TestIamPermissions(self, request, context): """Returns permissions that the caller has on the specified database or backup @@ -197,8 +205,8 @@ def TestIamPermissions(self, request, context): `spanner.backups.list` permission on the containing instance. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def CreateBackup(self, request, context): """Starts creating a new Cloud Spanner Backup. @@ -215,8 +223,8 @@ def CreateBackup(self, request, context): creation of different databases can run concurrently. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def CopyBackup(self, request, context): """Starts copying a Cloud Spanner Backup. @@ -234,32 +242,32 @@ def CopyBackup(self, request, context): source backup. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def GetBackup(self, request, context): """Gets metadata on a pending or completed [Backup][google.spanner.admin.database.v1.Backup]. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def UpdateBackup(self, request, context): """Updates a pending or completed [Backup][google.spanner.admin.database.v1.Backup]. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def DeleteBackup(self, request, context): """Deletes a pending or completed [Backup][google.spanner.admin.database.v1.Backup]. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ListBackups(self, request, context): """Lists completed and pending backups. @@ -267,8 +275,8 @@ def ListBackups(self, request, context): starting from the most recent `create_time`. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def RestoreDatabase(self, request, context): """Create a new database by restoring from a completed backup. The new @@ -290,8 +298,8 @@ def RestoreDatabase(self, request, context): first restore to complete. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ListDatabaseOperations(self, request, context): """Lists database [longrunning-operations][google.longrunning.Operation]. @@ -304,8 +312,8 @@ def ListDatabaseOperations(self, request, context): and pending operations. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ListBackupOperations(self, request, context): """Lists the backup [long-running operations][google.longrunning.Operation] in @@ -320,187 +328,184 @@ def ListBackupOperations(self, request, context): from the most recently started operation. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ListDatabaseRoles(self, request, context): - """Lists Cloud Spanner database roles. - """ + """Lists Cloud Spanner database roles.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def CreateBackupSchedule(self, request, context): - """Creates a new backup schedule. - """ + """Creates a new backup schedule.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def GetBackupSchedule(self, request, context): - """Gets backup schedule for the input schedule name. - """ + """Gets backup schedule for the input schedule name.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def UpdateBackupSchedule(self, request, context): - """Updates a backup schedule. - """ + """Updates a backup schedule.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def DeleteBackupSchedule(self, request, context): - """Deletes a backup schedule. - """ + """Deletes a backup schedule.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def ListBackupSchedules(self, request, context): - """Lists all the backup schedules for the database. - """ + """Lists all the backup schedules for the database.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_DatabaseAdminServicer_to_server(servicer, server): rpc_method_handlers = { - 'ListDatabases': grpc.unary_unary_rpc_method_handler( - servicer.ListDatabases, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesResponse.serialize, - ), - 'CreateDatabase': grpc.unary_unary_rpc_method_handler( - servicer.CreateDatabase, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.CreateDatabaseRequest.deserialize, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'GetDatabase': grpc.unary_unary_rpc_method_handler( - servicer.GetDatabase, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.Database.serialize, - ), - 'UpdateDatabase': grpc.unary_unary_rpc_method_handler( - servicer.UpdateDatabase, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseRequest.deserialize, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'UpdateDatabaseDdl': grpc.unary_unary_rpc_method_handler( - servicer.UpdateDatabaseDdl, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseDdlRequest.deserialize, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'DropDatabase': grpc.unary_unary_rpc_method_handler( - servicer.DropDatabase, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.DropDatabaseRequest.deserialize, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'GetDatabaseDdl': grpc.unary_unary_rpc_method_handler( - servicer.GetDatabaseDdl, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlResponse.serialize, - ), - 'SetIamPolicy': grpc.unary_unary_rpc_method_handler( - servicer.SetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - 'GetIamPolicy': grpc.unary_unary_rpc_method_handler( - servicer.GetIamPolicy, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, - ), - 'TestIamPermissions': grpc.unary_unary_rpc_method_handler( - servicer.TestIamPermissions, - request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, - response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, - ), - 'CreateBackup': grpc.unary_unary_rpc_method_handler( - servicer.CreateBackup, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CreateBackupRequest.deserialize, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'CopyBackup': grpc.unary_unary_rpc_method_handler( - servicer.CopyBackup, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CopyBackupRequest.deserialize, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'GetBackup': grpc.unary_unary_rpc_method_handler( - servicer.GetBackup, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.GetBackupRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize, - ), - 'UpdateBackup': grpc.unary_unary_rpc_method_handler( - servicer.UpdateBackup, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.UpdateBackupRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize, - ), - 'DeleteBackup': grpc.unary_unary_rpc_method_handler( - servicer.DeleteBackup, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.DeleteBackupRequest.deserialize, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ListBackups': grpc.unary_unary_rpc_method_handler( - servicer.ListBackups, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsResponse.serialize, - ), - 'RestoreDatabase': grpc.unary_unary_rpc_method_handler( - servicer.RestoreDatabase, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.RestoreDatabaseRequest.deserialize, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'ListDatabaseOperations': grpc.unary_unary_rpc_method_handler( - servicer.ListDatabaseOperations, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsResponse.serialize, - ), - 'ListBackupOperations': grpc.unary_unary_rpc_method_handler( - servicer.ListBackupOperations, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsResponse.serialize, - ), - 'ListDatabaseRoles': grpc.unary_unary_rpc_method_handler( - servicer.ListDatabaseRoles, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesResponse.serialize, - ), - 'CreateBackupSchedule': grpc.unary_unary_rpc_method_handler( - servicer.CreateBackupSchedule, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.CreateBackupScheduleRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, - ), - 'GetBackupSchedule': grpc.unary_unary_rpc_method_handler( - servicer.GetBackupSchedule, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.GetBackupScheduleRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, - ), - 'UpdateBackupSchedule': grpc.unary_unary_rpc_method_handler( - servicer.UpdateBackupSchedule, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.UpdateBackupScheduleRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, - ), - 'DeleteBackupSchedule': grpc.unary_unary_rpc_method_handler( - servicer.DeleteBackupSchedule, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.DeleteBackupScheduleRequest.deserialize, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ListBackupSchedules': grpc.unary_unary_rpc_method_handler( - servicer.ListBackupSchedules, - request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesRequest.deserialize, - response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesResponse.serialize, - ), + "ListDatabases": grpc.unary_unary_rpc_method_handler( + servicer.ListDatabases, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesResponse.serialize, + ), + "CreateDatabase": grpc.unary_unary_rpc_method_handler( + servicer.CreateDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.CreateDatabaseRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetDatabase": grpc.unary_unary_rpc_method_handler( + servicer.GetDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.Database.serialize, + ), + "UpdateDatabase": grpc.unary_unary_rpc_method_handler( + servicer.UpdateDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "UpdateDatabaseDdl": grpc.unary_unary_rpc_method_handler( + servicer.UpdateDatabaseDdl, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseDdlRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "DropDatabase": grpc.unary_unary_rpc_method_handler( + servicer.DropDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.DropDatabaseRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "GetDatabaseDdl": grpc.unary_unary_rpc_method_handler( + servicer.GetDatabaseDdl, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlResponse.serialize, + ), + "SetIamPolicy": grpc.unary_unary_rpc_method_handler( + servicer.SetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + "GetIamPolicy": grpc.unary_unary_rpc_method_handler( + servicer.GetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + "TestIamPermissions": grpc.unary_unary_rpc_method_handler( + servicer.TestIamPermissions, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, + ), + "CreateBackup": grpc.unary_unary_rpc_method_handler( + servicer.CreateBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CreateBackupRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CopyBackup": grpc.unary_unary_rpc_method_handler( + servicer.CopyBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CopyBackupRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetBackup": grpc.unary_unary_rpc_method_handler( + servicer.GetBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.GetBackupRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize, + ), + "UpdateBackup": grpc.unary_unary_rpc_method_handler( + servicer.UpdateBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.UpdateBackupRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.serialize, + ), + "DeleteBackup": grpc.unary_unary_rpc_method_handler( + servicer.DeleteBackup, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.DeleteBackupRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "ListBackups": grpc.unary_unary_rpc_method_handler( + servicer.ListBackups, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsResponse.serialize, + ), + "RestoreDatabase": grpc.unary_unary_rpc_method_handler( + servicer.RestoreDatabase, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.RestoreDatabaseRequest.deserialize, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ListDatabaseOperations": grpc.unary_unary_rpc_method_handler( + servicer.ListDatabaseOperations, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsResponse.serialize, + ), + "ListBackupOperations": grpc.unary_unary_rpc_method_handler( + servicer.ListBackupOperations, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsResponse.serialize, + ), + "ListDatabaseRoles": grpc.unary_unary_rpc_method_handler( + servicer.ListDatabaseRoles, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesResponse.serialize, + ), + "CreateBackupSchedule": grpc.unary_unary_rpc_method_handler( + servicer.CreateBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.CreateBackupScheduleRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, + ), + "GetBackupSchedule": grpc.unary_unary_rpc_method_handler( + servicer.GetBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.GetBackupScheduleRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, + ), + "UpdateBackupSchedule": grpc.unary_unary_rpc_method_handler( + servicer.UpdateBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.UpdateBackupScheduleRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.serialize, + ), + "DeleteBackupSchedule": grpc.unary_unary_rpc_method_handler( + servicer.DeleteBackupSchedule, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.DeleteBackupScheduleRequest.deserialize, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "ListBackupSchedules": grpc.unary_unary_rpc_method_handler( + servicer.ListBackupSchedules, + request_deserializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesRequest.deserialize, + response_serializer=google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesResponse.serialize, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'google.spanner.admin.database.v1.DatabaseAdmin', rpc_method_handlers) + "google.spanner.admin.database.v1.DatabaseAdmin", rpc_method_handlers + ) server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('google.spanner.admin.database.v1.DatabaseAdmin', rpc_method_handlers) + server.add_registered_method_handlers( + "google.spanner.admin.database.v1.DatabaseAdmin", rpc_method_handlers + ) - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class DatabaseAdmin(object): """Cloud Spanner Database Admin API @@ -512,20 +517,22 @@ class DatabaseAdmin(object): """ @staticmethod - def ListDatabases(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ListDatabases( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabasesResponse.FromString, options, @@ -536,23 +543,26 @@ def ListDatabases(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def CreateDatabase(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def CreateDatabase( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + "/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.CreateDatabaseRequest.SerializeToString, google_dot_longrunning_dot_operations__pb2.Operation.FromString, options, @@ -563,23 +573,26 @@ def CreateDatabase(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def GetDatabase(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def GetDatabase( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.Database.FromString, options, @@ -590,23 +603,26 @@ def GetDatabase(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def UpdateDatabase(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def UpdateDatabase( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', + "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseRequest.SerializeToString, google_dot_longrunning_dot_operations__pb2.Operation.FromString, options, @@ -617,23 +633,26 @@ def UpdateDatabase(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def UpdateDatabaseDdl(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def UpdateDatabaseDdl( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.UpdateDatabaseDdlRequest.SerializeToString, google_dot_longrunning_dot_operations__pb2.Operation.FromString, options, @@ -644,23 +663,26 @@ def UpdateDatabaseDdl(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def DropDatabase(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def DropDatabase( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + "/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.DropDatabaseRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, @@ -671,23 +693,26 @@ def DropDatabase(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def GetDatabaseDdl(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def GetDatabaseDdl( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.GetDatabaseDdlResponse.FromString, options, @@ -698,23 +723,26 @@ def GetDatabaseDdl(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def SetIamPolicy(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def SetIamPolicy( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + "/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy", google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, options, @@ -725,23 +753,26 @@ def SetIamPolicy(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def GetIamPolicy(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def GetIamPolicy( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + "/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy", google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, options, @@ -752,23 +783,26 @@ def GetIamPolicy(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def TestIamPermissions(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def TestIamPermissions( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + "/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions", google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, options, @@ -779,23 +813,26 @@ def TestIamPermissions(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def CreateBackup(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def CreateBackup( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + "/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CreateBackupRequest.SerializeToString, google_dot_longrunning_dot_operations__pb2.Operation.FromString, options, @@ -806,23 +843,26 @@ def CreateBackup(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def CopyBackup(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def CopyBackup( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', + "/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.CopyBackupRequest.SerializeToString, google_dot_longrunning_dot_operations__pb2.Operation.FromString, options, @@ -833,23 +873,26 @@ def CopyBackup(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def GetBackup(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def GetBackup( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + "/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.GetBackupRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.FromString, options, @@ -860,23 +903,26 @@ def GetBackup(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def UpdateBackup(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def UpdateBackup( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.UpdateBackupRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.Backup.FromString, options, @@ -887,23 +933,26 @@ def UpdateBackup(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def DeleteBackup(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def DeleteBackup( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + "/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.DeleteBackupRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, @@ -914,23 +963,26 @@ def DeleteBackup(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ListBackups(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ListBackups( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupsResponse.FromString, options, @@ -941,23 +993,26 @@ def ListBackups(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def RestoreDatabase(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def RestoreDatabase( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + "/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.RestoreDatabaseRequest.SerializeToString, google_dot_longrunning_dot_operations__pb2.Operation.FromString, options, @@ -968,23 +1023,26 @@ def RestoreDatabase(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ListDatabaseOperations(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ListDatabaseOperations( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseOperationsResponse.FromString, options, @@ -995,23 +1053,26 @@ def ListDatabaseOperations(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ListBackupOperations(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ListBackupOperations( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2.ListBackupOperationsResponse.FromString, options, @@ -1022,23 +1083,26 @@ def ListBackupOperations(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ListDatabaseRoles(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ListDatabaseRoles( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', + "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_spanner__database__admin__pb2.ListDatabaseRolesResponse.FromString, options, @@ -1049,23 +1113,26 @@ def ListDatabaseRoles(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def CreateBackupSchedule(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def CreateBackupSchedule( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', + "/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.CreateBackupScheduleRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString, options, @@ -1076,23 +1143,26 @@ def CreateBackupSchedule(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def GetBackupSchedule(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def GetBackupSchedule( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', + "/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.GetBackupScheduleRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString, options, @@ -1103,23 +1173,26 @@ def GetBackupSchedule(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def UpdateBackupSchedule(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def UpdateBackupSchedule( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', + "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.UpdateBackupScheduleRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.BackupSchedule.FromString, options, @@ -1130,23 +1203,26 @@ def UpdateBackupSchedule(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def DeleteBackupSchedule(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def DeleteBackupSchedule( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', + "/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.DeleteBackupScheduleRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, @@ -1157,23 +1233,26 @@ def DeleteBackupSchedule(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) @staticmethod - def ListBackupSchedules(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def ListBackupSchedules( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.unary_unary( request, target, - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', + "/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules", google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesRequest.SerializeToString, google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__schedule__pb2.ListBackupSchedulesResponse.FromString, options, @@ -1184,4 +1263,5 @@ def ListBackupSchedules(request, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) diff --git a/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py index dcbd55ccb5..90bb228553 100644 --- a/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py +++ b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py @@ -11,10 +11,9 @@ # ./google/spanner/v1/*.proto """Client and server classes corresponding to protobuf-defined services.""" +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 import grpc -import warnings -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.cloud.spanner_v1.types import ( commit_response as google_dot_spanner_dot_v1_dot_commit__response__pb2, ) @@ -44,7 +43,7 @@ if _version_not_supported: raise RuntimeError( f"The grpc package installed is at version {GRPC_VERSION}," - + f" but the generated code in google/spanner/v1/spanner_pb2_grpc.py depends on" + + " but the generated code in google/spanner/v1/spanner_pb2_grpc.py depends on" + f" grpcio>={GRPC_GENERATED_VERSION}." + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." diff --git a/tests/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py index 5c39d6c749..50333b28bb 100644 --- a/tests/mockserver_tests/test_basics.py +++ b/tests/mockserver_tests/test_basics.py @@ -14,10 +14,8 @@ import unittest -from google.cloud.spanner_admin_database_v1 import UpdateDatabaseDdlRequest from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.cloud.spanner_v1.testing.mock_database_admin import \ - DatabaseAdminServicer +from google.cloud.spanner_v1.testing.mock_database_admin import DatabaseAdminServicer from google.cloud.spanner_v1.testing.mock_spanner import ( start_mock_server, SpannerServicer, @@ -129,19 +127,18 @@ def test_select1(self): def test_create_table(self): database_admin_api = self.client.database_admin_api - request = spanner_database_admin.UpdateDatabaseDdlRequest(dict( - database=database_admin_api.database_path( - "test-project", "test-instance", "test-database" - ), - statements=[ - "CREATE TABLE Test (" - "Id INT64, " - "Value STRING(MAX)) " - "PRIMARY KEY (Id)", - ], - )) + request = spanner_database_admin.UpdateDatabaseDdlRequest( + dict( + database=database_admin_api.database_path( + "test-project", "test-instance", "test-database" + ), + statements=[ + "CREATE TABLE Test (" + "Id INT64, " + "Value STRING(MAX)) " + "PRIMARY KEY (Id)", + ], + ) + ) operation = database_admin_api.update_database_ddl(request) operation.result(1) - requests = self.database_admin_service.requests - self.assertEqual(1, len(requests)) - self.assertTrue(isinstance(requests[0], UpdateDatabaseDdlRequest)) From 21557e04c90e8d1acb5d8bfd302ef50b15e2fd13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Thu, 28 Nov 2024 09:58:26 +0100 Subject: [PATCH 06/15] test: add async client tests --- google/cloud/spanner_v1/database.py | 28 ++++++- .../services/spanner/transports/grpc.py | 2 +- .../cloud/spanner_v1/testing/mock_spanner.py | 4 +- tests/mockserver_tests/test_basics.py | 80 +++++++++++++------ 4 files changed, 84 insertions(+), 30 deletions(-) diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index abddd5d97d..5c858320c4 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -25,6 +25,7 @@ import google.auth.credentials from google.api_core.retry import Retry from google.api_core.retry import if_exception_type +from google.auth.aio.credentials import AnonymousCredentials from google.cloud.exceptions import NotFound from google.api_core.exceptions import Aborted from google.api_core import gapic_v1 @@ -41,7 +42,7 @@ from google.cloud.spanner_admin_database_v1 import UpdateDatabaseDdlRequest from google.cloud.spanner_admin_database_v1.types import DatabaseDialect from google.cloud.spanner_v1.transaction import BatchTransactionId -from google.cloud.spanner_v1 import ExecuteSqlRequest +from google.cloud.spanner_v1 import ExecuteSqlRequest, SpannerAsyncClient from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode from google.cloud.spanner_v1 import TransactionSelector @@ -142,7 +143,8 @@ class Database(object): statements in 'ddl_statements' above. """ - _spanner_api = None + _spanner_api: SpannerClient = None + _spanner_async_api: SpannerAsyncClient = None def __init__( self, @@ -438,6 +440,28 @@ def spanner_api(self): ) return self._spanner_api + @property + def spanner_async_api(self): + if self._spanner_async_api is None: + client_info = self._instance._client._client_info + client_options = self._instance._client._client_options + if self._instance.emulator_host is not None: + channel=grpc.aio.insecure_channel(target=self._instance.emulator_host) + transport = SpannerGrpcTransport(channel=channel) + self._spanner_async_api = SpannerAsyncClient( + client_info=client_info, transport=transport + ) + return self._spanner_async_api + credentials = self._instance._client.credentials + if isinstance(credentials, google.auth.credentials.Scoped): + credentials = credentials.with_scopes((SPANNER_DATA_SCOPE,)) + self._spanner_async_api = SpannerAsyncClient( + credentials=credentials, + client_info=client_info, + client_options=client_options, + ) + return self._spanner_async_api + def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index a2afa32174..fce1002942 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -127,7 +127,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if isinstance(channel, grpc.Channel): + if isinstance(channel, grpc.Channel) or isinstance(channel, grpc.aio.Channel): # Ignore credentials if a channel was passed. credentials = None self._ignore_credentials = True diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index 6afc56eadc..5484615dd2 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -102,7 +102,9 @@ def DeleteSession(self, request, context): return empty_pb2.Empty() def ExecuteSql(self, request, context): - return result_set.ResultSet() + self._requests.append(request) + result: result_set.ResultSet = self.mock_spanner.results.get(request.sql.lower()) + return result def ExecuteStreamingSql(self, request, context): self._requests.append(request) diff --git a/tests/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py index 50333b28bb..ae23533df7 100644 --- a/tests/mockserver_tests/test_basics.py +++ b/tests/mockserver_tests/test_basics.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import asyncio import unittest from google.cloud.spanner_admin_database_v1.types import spanner_database_admin @@ -28,7 +28,7 @@ Client, FixedSizePool, BatchCreateSessionsRequest, - ExecuteSqlRequest, + ExecuteSqlRequest, CreateSessionRequest, ) from google.cloud.spanner_v1.database import Database from google.cloud.spanner_v1.instance import Instance @@ -62,6 +62,32 @@ def tearDownClass(cls): TestBasics.server.stop(grace=None) TestBasics.server = None + def _add_select1_result(self): + result = result_set.ResultSet( + dict( + metadata=result_set.ResultSetMetadata( + dict( + row_type=spanner_type.StructType( + dict( + fields=[ + spanner_type.StructType.Field( + dict( + name="c", + type=spanner_type.Type( + dict(code=spanner_type.TypeCode.INT64) + ), + ) + ) + ] + ) + ) + ) + ), + ) + ) + result.rows.extend(["1"]) + TestBasics.spanner_service.mock_spanner.add_result("select 1", result) + @property def client(self) -> Client: if self._client is None: @@ -89,30 +115,7 @@ def database(self) -> Database: return self._database def test_select1(self): - result = result_set.ResultSet( - dict( - metadata=result_set.ResultSetMetadata( - dict( - row_type=spanner_type.StructType( - dict( - fields=[ - spanner_type.StructType.Field( - dict( - name="c", - type=spanner_type.Type( - dict(code=spanner_type.TypeCode.INT64) - ), - ) - ) - ] - ) - ) - ) - ), - ) - ) - result.rows.extend(["1"]) - TestBasics.spanner_service.mock_spanner.add_result("select 1", result) + self._add_select1_result() with self.database.snapshot() as snapshot: results = snapshot.execute_sql("select 1") result_list = [] @@ -142,3 +145,28 @@ def test_create_table(self): ) operation = database_admin_api.update_database_ddl(request) operation.result(1) + + + def test_async_select1(self): + self._add_select1_result() + results = asyncio.run(self._async_select1()) + result_list = [] + for row in results.rows: + result_list.append(row) + self.assertEqual("1", row[0]) + self.assertEqual(1, len(result_list)) + requests = self.spanner_service.requests + self.assertEqual(3, len(requests)) + self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) + self.assertTrue(isinstance(requests[1], CreateSessionRequest)) + self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) + + async def _async_select1(self): + client = self.database.spanner_async_api + create_session_request = CreateSessionRequest(database=self._database.name) + session = await client.create_session(create_session_request) + execute_request = ExecuteSqlRequest(dict( + session=session.name, + sql="select 1", + )) + return await client.execute_sql(execute_request) From 39a11d0668de232329ff094806d62f47334db4ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Wed, 4 Dec 2024 15:10:11 +0100 Subject: [PATCH 07/15] chore: remove async + add transaction handling --- google/cloud/spanner_v1/database.py | 26 +----- .../services/spanner/transports/grpc.py | 2 +- .../cloud/spanner_v1/testing/mock_spanner.py | 83 ++++++++++++++++--- tests/mockserver_tests/test_basics.py | 37 ++------- 4 files changed, 80 insertions(+), 68 deletions(-) diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index 5c858320c4..1e10e1df73 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -25,7 +25,6 @@ import google.auth.credentials from google.api_core.retry import Retry from google.api_core.retry import if_exception_type -from google.auth.aio.credentials import AnonymousCredentials from google.cloud.exceptions import NotFound from google.api_core.exceptions import Aborted from google.api_core import gapic_v1 @@ -42,7 +41,7 @@ from google.cloud.spanner_admin_database_v1 import UpdateDatabaseDdlRequest from google.cloud.spanner_admin_database_v1.types import DatabaseDialect from google.cloud.spanner_v1.transaction import BatchTransactionId -from google.cloud.spanner_v1 import ExecuteSqlRequest, SpannerAsyncClient +from google.cloud.spanner_v1 import ExecuteSqlRequest from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode from google.cloud.spanner_v1 import TransactionSelector @@ -144,7 +143,6 @@ class Database(object): """ _spanner_api: SpannerClient = None - _spanner_async_api: SpannerAsyncClient = None def __init__( self, @@ -440,28 +438,6 @@ def spanner_api(self): ) return self._spanner_api - @property - def spanner_async_api(self): - if self._spanner_async_api is None: - client_info = self._instance._client._client_info - client_options = self._instance._client._client_options - if self._instance.emulator_host is not None: - channel=grpc.aio.insecure_channel(target=self._instance.emulator_host) - transport = SpannerGrpcTransport(channel=channel) - self._spanner_async_api = SpannerAsyncClient( - client_info=client_info, transport=transport - ) - return self._spanner_async_api - credentials = self._instance._client.credentials - if isinstance(credentials, google.auth.credentials.Scoped): - credentials = credentials.with_scopes((SPANNER_DATA_SCOPE,)) - self._spanner_async_api = SpannerAsyncClient( - credentials=credentials, - client_info=client_info, - client_options=client_options, - ) - return self._spanner_async_api - def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index fce1002942..a2afa32174 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -127,7 +127,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if isinstance(channel, grpc.Channel) or isinstance(channel, grpc.aio.Channel): + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = None self._ignore_credentials = True diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index 5484615dd2..86b4a96b67 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import base64 from concurrent import futures from google.protobuf import empty_pb2 @@ -31,22 +31,32 @@ def __init__(self): self.results = {} def add_result(self, sql: str, result: result_set.ResultSet): - self.results[sql.lower()] = result + self.results[sql.lower().strip()] = result + + def get_result(self, sql: str) -> result_set.ResultSet: + result = self.results.get(sql.lower().strip()) + if result is None: + raise ValueError(f"No result found for {sql}") + return result def get_result_as_partial_result_sets( self, sql: str ) -> [result_set.PartialResultSet]: - result: result_set.ResultSet = self.results.get(sql.lower()) - if result is None: - return [] + result: result_set.ResultSet = self.get_result(sql) partials = [] first = True - for row in result.rows: + if len(result.rows) == 0: partial = result_set.PartialResultSet() - if first: - partial.metadata = result.metadata - partial.values.extend(row) + partial.metadata = result.metadata partials.append(partial) + else: + for row in result.rows: + partial = result_set.PartialResultSet() + if first: + partial.metadata = result.metadata + partial.values.extend(row) + partials.append(partial) + partials[len(partials) - 1].stats = result.stats return partials @@ -56,6 +66,8 @@ def __init__(self): self._requests = [] self.session_counter = 0 self.sessions = {} + self.transaction_counter = 0 + self.transactions = {} self._mock_spanner = MockSpanner() @property @@ -93,18 +105,20 @@ def __create_session(self, database: str, session_template: spanner.Session): return session def GetSession(self, request, context): + self._requests.append(request) return spanner.Session() def ListSessions(self, request, context): + self._requests.append(request) return [spanner.Session()] def DeleteSession(self, request, context): + self._requests.append(request) return empty_pb2.Empty() def ExecuteSql(self, request, context): self._requests.append(request) - result: result_set.ResultSet = self.mock_spanner.results.get(request.sql.lower()) - return result + return result_set.ResultSet() def ExecuteStreamingSql(self, request, context): self._requests.append(request) @@ -113,31 +127,74 @@ def ExecuteStreamingSql(self, request, context): yield result def ExecuteBatchDml(self, request, context): - return spanner.ExecuteBatchDmlResponse() + self._requests.append(request) + response = spanner.ExecuteBatchDmlResponse() + started_transaction = None + if not request.transaction.begin == transaction.TransactionOptions(): + started_transaction = self.__create_transaction( + request.session, request.transaction.begin + ) + first = True + for statement in request.statements: + result = self.mock_spanner.get_result(statement.sql) + if first and started_transaction is not None: + result = result_set.ResultSet( + self.mock_spanner.get_result(statement.sql) + ) + result.metadata = result_set.ResultSetMetadata(result.metadata) + result.metadata.transaction = started_transaction + response.result_sets.append(result) + return response def Read(self, request, context): + self._requests.append(request) return result_set.ResultSet() def StreamingRead(self, request, context): + self._requests.append(request) for result in [result_set.PartialResultSet(), result_set.PartialResultSet()]: yield result def BeginTransaction(self, request, context): - return transaction.Transaction() + self._requests.append(request) + return self.__create_transaction(request.session, request.options) + + def __create_transaction( + self, session: str, options: transaction.TransactionOptions + ) -> transaction.Transaction: + session = self.sessions[session] + if session is None: + raise ValueError(f"Session not found: {session}") + self.transaction_counter += 1 + id_bytes = bytes( + f"{session.name}/transactions/{self.transaction_counter}", "UTF-8" + ) + transaction_id = base64.urlsafe_b64encode(id_bytes) + self.transactions[transaction_id] = options + return transaction.Transaction(dict(id=transaction_id)) def Commit(self, request, context): + self._requests.append(request) + tx = self.transactions[request.transaction_id] + if tx is None: + raise ValueError(f"Transaction not found: {request.transaction_id}") + del self.transactions[request.transaction_id] return commit.CommitResponse() def Rollback(self, request, context): + self._requests.append(request) return empty_pb2.Empty() def PartitionQuery(self, request, context): + self._requests.append(request) return spanner.PartitionResponse() def PartitionRead(self, request, context): + self._requests.append(request) return spanner.PartitionResponse() def BatchWrite(self, request, context): + self._requests.append(request) for result in [spanner.BatchWriteResponse(), spanner.BatchWriteResponse()]: yield result diff --git a/tests/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py index ae23533df7..f2dab9af06 100644 --- a/tests/mockserver_tests/test_basics.py +++ b/tests/mockserver_tests/test_basics.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import asyncio + import unittest from google.cloud.spanner_admin_database_v1.types import spanner_database_admin @@ -28,7 +28,8 @@ Client, FixedSizePool, BatchCreateSessionsRequest, - ExecuteSqlRequest, CreateSessionRequest, + ExecuteSqlRequest, + GetSessionRequest, ) from google.cloud.spanner_v1.database import Database from google.cloud.spanner_v1.instance import Instance @@ -124,9 +125,12 @@ def test_select1(self): self.assertEqual(1, row[0]) self.assertEqual(1, len(result_list)) requests = self.spanner_service.requests - self.assertEqual(2, len(requests)) + self.assertEqual(3, len(requests)) self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) - self.assertTrue(isinstance(requests[1], ExecuteSqlRequest)) + # TODO: Optimize FixedSizePool so this GetSessionRequest is not executed + # every time a session is fetched. + self.assertTrue(isinstance(requests[1], GetSessionRequest)) + self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) def test_create_table(self): database_admin_api = self.client.database_admin_api @@ -145,28 +149,3 @@ def test_create_table(self): ) operation = database_admin_api.update_database_ddl(request) operation.result(1) - - - def test_async_select1(self): - self._add_select1_result() - results = asyncio.run(self._async_select1()) - result_list = [] - for row in results.rows: - result_list.append(row) - self.assertEqual("1", row[0]) - self.assertEqual(1, len(result_list)) - requests = self.spanner_service.requests - self.assertEqual(3, len(requests)) - self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) - self.assertTrue(isinstance(requests[1], CreateSessionRequest)) - self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) - - async def _async_select1(self): - client = self.database.spanner_async_api - create_session_request = CreateSessionRequest(database=self._database.name) - session = await client.create_session(create_session_request) - execute_request = ExecuteSqlRequest(dict( - session=session.name, - sql="select 1", - )) - return await client.execute_sql(execute_request) From ddb44a3bc6977635ad1683dd4d26ff6c7471147d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Wed, 4 Dec 2024 15:14:56 +0100 Subject: [PATCH 08/15] chore: cleanup --- google/cloud/spanner_v1/testing/mock_database_admin.py | 2 -- google/cloud/spanner_v1/testing/mock_spanner.py | 4 +--- .../spanner_v1/testing/spanner_database_admin_pb2_grpc.py | 4 ++-- google/cloud/spanner_v1/testing/spanner_pb2_grpc.py | 4 ++-- 4 files changed, 5 insertions(+), 9 deletions(-) diff --git a/google/cloud/spanner_v1/testing/mock_database_admin.py b/google/cloud/spanner_v1/testing/mock_database_admin.py index fe9ac979eb..e5aad9cd70 100644 --- a/google/cloud/spanner_v1/testing/mock_database_admin.py +++ b/google/cloud/spanner_v1/testing/mock_database_admin.py @@ -14,10 +14,8 @@ from google.longrunning import operations_pb2 as operations_pb2 from google.protobuf import empty_pb2 - import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc - # An in-memory mock DatabaseAdmin server that can be used for testing. class DatabaseAdminServicer(database_admin_grpc.DatabaseAdminServicer): def __init__(self): diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index 86b4a96b67..052478a5c7 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -12,11 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. import base64 +import grpc from concurrent import futures from google.protobuf import empty_pb2 -import grpc - from google.cloud.spanner_v1.testing.mock_database_admin import DatabaseAdminServicer import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc import google.cloud.spanner_v1.testing.spanner_pb2_grpc as spanner_grpc @@ -25,7 +24,6 @@ import google.cloud.spanner_v1.types.spanner as spanner import google.cloud.spanner_v1.types.transaction as transaction - class MockSpanner: def __init__(self): self.results = {} diff --git a/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py index 3001ce5c6a..fdc26b30ad 100644 --- a/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py +++ b/google/cloud/spanner_v1/testing/spanner_database_admin_pb2_grpc.py @@ -12,14 +12,14 @@ # ./google/spanner/admin/database/v1/*.proto """Client and server classes corresponding to protobuf-defined services.""" + +import grpc from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -import grpc - from google.cloud.spanner_admin_database_v1.types import ( backup as google_dot_spanner_dot_admin_dot_database_dot_v1_dot_backup__pb2, ) diff --git a/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py index 90bb228553..c4622a6a34 100644 --- a/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py +++ b/google/cloud/spanner_v1/testing/spanner_pb2_grpc.py @@ -11,9 +11,9 @@ # ./google/spanner/v1/*.proto """Client and server classes corresponding to protobuf-defined services.""" -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -import grpc +import grpc +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.cloud.spanner_v1.types import ( commit_response as google_dot_spanner_dot_v1_dot_commit__response__pb2, ) From 14fd3ff7c7c5bb2f08b0bb116b4143a5a6c45c99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Wed, 4 Dec 2024 16:18:09 +0100 Subject: [PATCH 09/15] perf: remove repeated GetSession calls for FixedSizePool Add a _last_use_time to Session and use this to determine whether the FixedSizePool should check whether the session still exists, and whether it should be replaced. This significantly reduces the number of times that GetSession is called when using FixedSizePool. --- google/cloud/spanner_v1/pool.py | 16 +++++-- google/cloud/spanner_v1/session.py | 17 +++++++ .../spanner_v1/testing/mock_database_admin.py | 1 + .../cloud/spanner_v1/testing/mock_spanner.py | 1 + tests/mockserver_tests/test_basics.py | 8 +--- tests/unit/test_pool.py | 44 +++++++++++++++---- 6 files changed, 69 insertions(+), 18 deletions(-) diff --git a/google/cloud/spanner_v1/pool.py b/google/cloud/spanner_v1/pool.py index 56837bfc0b..27d90897df 100644 --- a/google/cloud/spanner_v1/pool.py +++ b/google/cloud/spanner_v1/pool.py @@ -26,7 +26,12 @@ ) from warnings import warn -_NOW = datetime.datetime.utcnow # unit tests may replace + +def _now(): + datetime.datetime.now(datetime.UTC) + + +_NOW = _now # unit tests may replace class AbstractSessionPool(object): @@ -145,7 +150,8 @@ class FixedSizePool(AbstractSessionPool): - Pre-allocates / creates a fixed number of sessions. - "Pings" existing sessions via :meth:`session.exists` before returning - them, and replaces expired sessions. + sessions that have not been used for more than 55 minutes and replaces + expired sessions. - Blocks, with a timeout, when :meth:`get` is called on an empty pool. Raises after timing out. @@ -171,6 +177,7 @@ class FixedSizePool(AbstractSessionPool): DEFAULT_SIZE = 10 DEFAULT_TIMEOUT = 10 + DEFAULT_MAX_AGE_MINUTES = 55 def __init__( self, @@ -178,11 +185,13 @@ def __init__( default_timeout=DEFAULT_TIMEOUT, labels=None, database_role=None, + max_age_minutes=DEFAULT_MAX_AGE_MINUTES, ): super(FixedSizePool, self).__init__(labels=labels, database_role=database_role) self.size = size self.default_timeout = default_timeout self._sessions = queue.LifoQueue(size) + self._max_age = datetime.timedelta(minutes=max_age_minutes) def bind(self, database): """Associate the pool with a database. @@ -230,8 +239,9 @@ def get(self, timeout=None): timeout = self.default_timeout session = self._sessions.get(block=True, timeout=timeout) + age = _NOW() - session.last_use_time - if not session.exists(): + if age >= self._max_age and not session.exists(): session = self._database.session() session.create() diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py index 6281148590..2bff783c8d 100644 --- a/google/cloud/spanner_v1/session.py +++ b/google/cloud/spanner_v1/session.py @@ -17,6 +17,7 @@ from functools import total_ordering import random import time +from datetime import datetime, UTC from google.api_core.exceptions import Aborted from google.api_core.exceptions import GoogleAPICallError @@ -69,6 +70,7 @@ def __init__(self, database, labels=None, database_role=None): labels = {} self._labels = labels self._database_role = database_role + self._last_use_time = datetime.now(UTC) def __lt__(self, other): return self._session_id < other._session_id @@ -78,6 +80,14 @@ def session_id(self): """Read-only ID, set by the back-end during :meth:`create`.""" return self._session_id + @property + def last_use_time(self): + """ "Approximate last use time of this session + + :rtype: datetime + :returns: the approximate last use time of this session""" + return self._last_use_time + @property def database_role(self): """User-assigned database-role for the session. @@ -154,6 +164,7 @@ def create(self): metadata=metadata, ) self._session_id = session_pb.name.split("/")[-1] + self._last_use_time = datetime.now() def exists(self): """Test for the existence of this session. @@ -181,6 +192,7 @@ def exists(self): ) as span: try: api.get_session(name=self.name, metadata=metadata) + self._last_use_time = datetime.now() if span: span.set_attribute("session_found", True) except NotFound: @@ -222,6 +234,7 @@ def ping(self): metadata = _metadata_with_prefix(self._database.name) request = ExecuteSqlRequest(session=self.name, sql="SELECT 1") api.execute_sql(request=request, metadata=metadata) + self._last_use_time = datetime.now() def snapshot(self, **kw): """Create a snapshot to perform a set of reads with shared staleness. @@ -273,6 +286,7 @@ def read(self, table, columns, keyset, index="", limit=0, column_info=None): :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. """ + self._last_use_time = datetime.now() return self.snapshot().read( table, columns, keyset, index, limit, column_info=column_info ) @@ -339,6 +353,7 @@ def execute_sql( :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. """ + self._last_use_time = datetime.now() return self.snapshot().execute_sql( sql, params, @@ -378,6 +393,7 @@ def transaction(self): del self._transaction txn = self._transaction = Transaction(self) + self._last_use_time = datetime.now() return txn def run_in_transaction(self, func, *args, **kw): @@ -444,6 +460,7 @@ def run_in_transaction(self, func, *args, **kw): raise try: + self._last_use_time = datetime.now() txn.commit( return_commit_stats=self._database.log_commit_stats, request_options=commit_request_options, diff --git a/google/cloud/spanner_v1/testing/mock_database_admin.py b/google/cloud/spanner_v1/testing/mock_database_admin.py index e5aad9cd70..a9b4eb6392 100644 --- a/google/cloud/spanner_v1/testing/mock_database_admin.py +++ b/google/cloud/spanner_v1/testing/mock_database_admin.py @@ -16,6 +16,7 @@ from google.protobuf import empty_pb2 import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc + # An in-memory mock DatabaseAdmin server that can be used for testing. class DatabaseAdminServicer(database_admin_grpc.DatabaseAdminServicer): def __init__(self): diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index 052478a5c7..d01c63aff5 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -24,6 +24,7 @@ import google.cloud.spanner_v1.types.spanner as spanner import google.cloud.spanner_v1.types.transaction as transaction + class MockSpanner: def __init__(self): self.results = {} diff --git a/tests/mockserver_tests/test_basics.py b/tests/mockserver_tests/test_basics.py index f2dab9af06..12a224314f 100644 --- a/tests/mockserver_tests/test_basics.py +++ b/tests/mockserver_tests/test_basics.py @@ -29,7 +29,6 @@ FixedSizePool, BatchCreateSessionsRequest, ExecuteSqlRequest, - GetSessionRequest, ) from google.cloud.spanner_v1.database import Database from google.cloud.spanner_v1.instance import Instance @@ -125,12 +124,9 @@ def test_select1(self): self.assertEqual(1, row[0]) self.assertEqual(1, len(result_list)) requests = self.spanner_service.requests - self.assertEqual(3, len(requests)) + self.assertEqual(2, len(requests), msg=requests) self.assertTrue(isinstance(requests[0], BatchCreateSessionsRequest)) - # TODO: Optimize FixedSizePool so this GetSessionRequest is not executed - # every time a session is fetched. - self.assertTrue(isinstance(requests[1], GetSessionRequest)) - self.assertTrue(isinstance(requests[2], ExecuteSqlRequest)) + self.assertTrue(isinstance(requests[1], ExecuteSqlRequest)) def test_create_table(self): database_admin_api = self.client.database_admin_api diff --git a/tests/unit/test_pool.py b/tests/unit/test_pool.py index 23ed3e7251..17b98be396 100644 --- a/tests/unit/test_pool.py +++ b/tests/unit/test_pool.py @@ -15,6 +15,7 @@ from functools import total_ordering import unittest +from datetime import datetime, UTC, timedelta import mock @@ -184,13 +185,30 @@ def test_bind(self): for session in SESSIONS: session.create.assert_not_called() - def test_get_non_expired(self): + def test_get_active(self): pool = self._make_one(size=4) database = _Database("name") SESSIONS = sorted([_Session(database) for i in range(0, 4)]) database._sessions.extend(SESSIONS) pool.bind(database) + # check if sessions returned in LIFO order + for i in (3, 2, 1, 0): + session = pool.get() + self.assertIs(session, SESSIONS[i]) + self.assertFalse(session._exists_checked) + self.assertFalse(pool._sessions.full()) + + def test_get_non_expired(self): + pool = self._make_one(size=4) + database = _Database("name") + last_use_time = datetime.now(UTC) - timedelta(minutes=56) + SESSIONS = sorted( + [_Session(database, last_use_time=last_use_time) for i in range(0, 4)] + ) + database._sessions.extend(SESSIONS) + pool.bind(database) + # check if sessions returned in LIFO order for i in (3, 2, 1, 0): session = pool.get() @@ -201,7 +219,8 @@ def test_get_non_expired(self): def test_get_expired(self): pool = self._make_one(size=4) database = _Database("name") - SESSIONS = [_Session(database)] * 5 + last_use_time = datetime.now(UTC) - timedelta(minutes=65) + SESSIONS = [_Session(database, last_use_time=last_use_time)] * 5 SESSIONS[0]._exists = False database._sessions.extend(SESSIONS) pool.bind(database) @@ -497,7 +516,7 @@ def test_get_hit_w_ping(self): SESSIONS = [_Session(database)] * 4 database._sessions.extend(SESSIONS) - sessions_created = datetime.datetime.utcnow() - datetime.timedelta(seconds=4000) + sessions_created = datetime.datetime.now(UTC) - datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: sessions_created): pool.bind(database) @@ -519,7 +538,7 @@ def test_get_hit_w_ping_expired(self): SESSIONS[0]._exists = False database._sessions.extend(SESSIONS) - sessions_created = datetime.datetime.utcnow() - datetime.timedelta(seconds=4000) + sessions_created = datetime.datetime.now(UTC) - datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: sessions_created): pool.bind(database) @@ -575,7 +594,7 @@ def test_put_non_full(self): pool = self._make_one(size=1) session_queue = pool._sessions = _Queue() - now = datetime.datetime.utcnow() + now = datetime.datetime.now(UTC) database = _Database("name") session = _Session(database) @@ -631,7 +650,7 @@ def test_ping_oldest_stale_but_exists(self): database._sessions.extend(SESSIONS) pool.bind(database) - later = datetime.datetime.utcnow() + datetime.timedelta(seconds=4000) + later = datetime.datetime.now(UTC) + datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: later): pool.ping() @@ -649,7 +668,7 @@ def test_ping_oldest_stale_and_not_exists(self): database._sessions.extend(SESSIONS) pool.bind(database) - later = datetime.datetime.utcnow() + datetime.timedelta(seconds=4000) + later = datetime.datetime.now(UTC) + datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: later): pool.ping() @@ -733,7 +752,7 @@ def test_bind_w_timestamp_race(self): from google.cloud._testing import _Monkey from google.cloud.spanner_v1 import pool as MUT - NOW = datetime.datetime.utcnow() + NOW = datetime.datetime.now(UTC) pool = self._make_one() database = _Database("name") SESSIONS = [_Session(database) for _ in range(10)] @@ -915,7 +934,9 @@ def _make_transaction(*args, **kw): class _Session(object): _transaction = None - def __init__(self, database, exists=True, transaction=None): + def __init__( + self, database, exists=True, transaction=None, last_use_time=datetime.now(UTC) + ): self._database = database self._exists = exists self._exists_checked = False @@ -923,10 +944,15 @@ def __init__(self, database, exists=True, transaction=None): self.create = mock.Mock() self._deleted = False self._transaction = transaction + self._last_use_time = last_use_time def __lt__(self, other): return id(self) < id(other) + @property + def last_use_time(self): + return self._last_use_time + def exists(self): self._exists_checked = True return self._exists From d9891230e3da11ad4f9bba63852e1002ecc1e450 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Wed, 4 Dec 2024 16:24:08 +0100 Subject: [PATCH 10/15] chore: run code formatter --- google/cloud/spanner_v1/testing/mock_database_admin.py | 1 + google/cloud/spanner_v1/testing/mock_spanner.py | 1 + 2 files changed, 2 insertions(+) diff --git a/google/cloud/spanner_v1/testing/mock_database_admin.py b/google/cloud/spanner_v1/testing/mock_database_admin.py index e5aad9cd70..a9b4eb6392 100644 --- a/google/cloud/spanner_v1/testing/mock_database_admin.py +++ b/google/cloud/spanner_v1/testing/mock_database_admin.py @@ -16,6 +16,7 @@ from google.protobuf import empty_pb2 import google.cloud.spanner_v1.testing.spanner_database_admin_pb2_grpc as database_admin_grpc + # An in-memory mock DatabaseAdmin server that can be used for testing. class DatabaseAdminServicer(database_admin_grpc.DatabaseAdminServicer): def __init__(self): diff --git a/google/cloud/spanner_v1/testing/mock_spanner.py b/google/cloud/spanner_v1/testing/mock_spanner.py index 052478a5c7..d01c63aff5 100644 --- a/google/cloud/spanner_v1/testing/mock_spanner.py +++ b/google/cloud/spanner_v1/testing/mock_spanner.py @@ -24,6 +24,7 @@ import google.cloud.spanner_v1.types.spanner as spanner import google.cloud.spanner_v1.types.transaction as transaction + class MockSpanner: def __init__(self): self.results = {} From b7f85acd91143a57901ec561a799026a18e5cea1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Wed, 4 Dec 2024 16:36:47 +0100 Subject: [PATCH 11/15] chore: revert to utcnow() --- google/cloud/spanner_v1/pool.py | 7 +------ google/cloud/spanner_v1/session.py | 4 ++-- tests/unit/test_pool.py | 20 ++++++++++---------- 3 files changed, 13 insertions(+), 18 deletions(-) diff --git a/google/cloud/spanner_v1/pool.py b/google/cloud/spanner_v1/pool.py index 27d90897df..c95ef7a7b9 100644 --- a/google/cloud/spanner_v1/pool.py +++ b/google/cloud/spanner_v1/pool.py @@ -26,12 +26,7 @@ ) from warnings import warn - -def _now(): - datetime.datetime.now(datetime.UTC) - - -_NOW = _now # unit tests may replace +_NOW = datetime.datetime.utcnow # unit tests may replace class AbstractSessionPool(object): diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py index 2bff783c8d..bfc6422a71 100644 --- a/google/cloud/spanner_v1/session.py +++ b/google/cloud/spanner_v1/session.py @@ -17,7 +17,7 @@ from functools import total_ordering import random import time -from datetime import datetime, UTC +from datetime import datetime from google.api_core.exceptions import Aborted from google.api_core.exceptions import GoogleAPICallError @@ -70,7 +70,7 @@ def __init__(self, database, labels=None, database_role=None): labels = {} self._labels = labels self._database_role = database_role - self._last_use_time = datetime.now(UTC) + self._last_use_time = datetime.utcnow() def __lt__(self, other): return self._session_id < other._session_id diff --git a/tests/unit/test_pool.py b/tests/unit/test_pool.py index 17b98be396..2e3b46fa73 100644 --- a/tests/unit/test_pool.py +++ b/tests/unit/test_pool.py @@ -15,7 +15,7 @@ from functools import total_ordering import unittest -from datetime import datetime, UTC, timedelta +from datetime import datetime, timedelta import mock @@ -202,7 +202,7 @@ def test_get_active(self): def test_get_non_expired(self): pool = self._make_one(size=4) database = _Database("name") - last_use_time = datetime.now(UTC) - timedelta(minutes=56) + last_use_time = datetime.utcnow() - timedelta(minutes=56) SESSIONS = sorted( [_Session(database, last_use_time=last_use_time) for i in range(0, 4)] ) @@ -219,7 +219,7 @@ def test_get_non_expired(self): def test_get_expired(self): pool = self._make_one(size=4) database = _Database("name") - last_use_time = datetime.now(UTC) - timedelta(minutes=65) + last_use_time = datetime.utcnow() - timedelta(minutes=65) SESSIONS = [_Session(database, last_use_time=last_use_time)] * 5 SESSIONS[0]._exists = False database._sessions.extend(SESSIONS) @@ -516,7 +516,7 @@ def test_get_hit_w_ping(self): SESSIONS = [_Session(database)] * 4 database._sessions.extend(SESSIONS) - sessions_created = datetime.datetime.now(UTC) - datetime.timedelta(seconds=4000) + sessions_created = datetime.datetime.utcnow() - datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: sessions_created): pool.bind(database) @@ -538,7 +538,7 @@ def test_get_hit_w_ping_expired(self): SESSIONS[0]._exists = False database._sessions.extend(SESSIONS) - sessions_created = datetime.datetime.now(UTC) - datetime.timedelta(seconds=4000) + sessions_created = datetime.datetime.utcnow() - datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: sessions_created): pool.bind(database) @@ -594,7 +594,7 @@ def test_put_non_full(self): pool = self._make_one(size=1) session_queue = pool._sessions = _Queue() - now = datetime.datetime.now(UTC) + now = datetime.datetime.utcnow() database = _Database("name") session = _Session(database) @@ -650,7 +650,7 @@ def test_ping_oldest_stale_but_exists(self): database._sessions.extend(SESSIONS) pool.bind(database) - later = datetime.datetime.now(UTC) + datetime.timedelta(seconds=4000) + later = datetime.datetime.utcnow() + datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: later): pool.ping() @@ -668,7 +668,7 @@ def test_ping_oldest_stale_and_not_exists(self): database._sessions.extend(SESSIONS) pool.bind(database) - later = datetime.datetime.now(UTC) + datetime.timedelta(seconds=4000) + later = datetime.datetime.utcnow() + datetime.timedelta(seconds=4000) with _Monkey(MUT, _NOW=lambda: later): pool.ping() @@ -752,7 +752,7 @@ def test_bind_w_timestamp_race(self): from google.cloud._testing import _Monkey from google.cloud.spanner_v1 import pool as MUT - NOW = datetime.datetime.now(UTC) + NOW = datetime.datetime.utcnow() pool = self._make_one() database = _Database("name") SESSIONS = [_Session(database) for _ in range(10)] @@ -935,7 +935,7 @@ class _Session(object): _transaction = None def __init__( - self, database, exists=True, transaction=None, last_use_time=datetime.now(UTC) + self, database, exists=True, transaction=None, last_use_time=datetime.utcnow() ): self._database = database self._exists = exists From a36826b49ccef0c3c0364f6aeead29e51b3e1d98 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 5 Dec 2024 10:36:54 +0000 Subject: [PATCH 12/15] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- noxfile.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/noxfile.py b/noxfile.py index 905df735bc..f5a2761d73 100644 --- a/noxfile.py +++ b/noxfile.py @@ -33,7 +33,6 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ "3.7", @@ -235,34 +234,6 @@ def unit(session, protobuf_implementation): ) -@nox.session(python=DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION) -def mockserver(session): - # Install all test dependencies, then install this package in-place. - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - # install_unittest_dependencies(session, "-c", constraints_path) - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) - - # Run py.test against the mockserver tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "mockserver_tests"), - *session.posargs, - ) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. From 57caf8d76a7e25626d2b3d4aef356567d5f0ffbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Thu, 5 Dec 2024 13:21:28 +0100 Subject: [PATCH 13/15] fix: update _last_use_time in trace_call --- .../spanner_v1/_opentelemetry_tracing.py | 4 +++ google/cloud/spanner_v1/session.py | 5 ---- google/cloud/spanner_v1/snapshot.py | 2 ++ google/cloud/spanner_v1/transaction.py | 1 + noxfile.py | 29 +++++++++++++++++++ 5 files changed, 36 insertions(+), 5 deletions(-) diff --git a/google/cloud/spanner_v1/_opentelemetry_tracing.py b/google/cloud/spanner_v1/_opentelemetry_tracing.py index feb3b92756..efbeea05e7 100644 --- a/google/cloud/spanner_v1/_opentelemetry_tracing.py +++ b/google/cloud/spanner_v1/_opentelemetry_tracing.py @@ -15,6 +15,7 @@ """Manages OpenTelemetry trace creation and handling""" from contextlib import contextmanager +from datetime import datetime import os from google.cloud.spanner_v1 import SpannerClient @@ -56,6 +57,9 @@ def get_tracer(tracer_provider=None): @contextmanager def trace_call(name, session, extra_attributes=None, observability_options=None): + if session: + session._last_use_time = datetime.now() + if not HAS_OPENTELEMETRY_INSTALLED or not session: # Empty context manager. Users will have to check if the generated value is None or a span yield None diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py index bfc6422a71..92a78b6c70 100644 --- a/google/cloud/spanner_v1/session.py +++ b/google/cloud/spanner_v1/session.py @@ -164,7 +164,6 @@ def create(self): metadata=metadata, ) self._session_id = session_pb.name.split("/")[-1] - self._last_use_time = datetime.now() def exists(self): """Test for the existence of this session. @@ -286,7 +285,6 @@ def read(self, table, columns, keyset, index="", limit=0, column_info=None): :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. """ - self._last_use_time = datetime.now() return self.snapshot().read( table, columns, keyset, index, limit, column_info=column_info ) @@ -353,7 +351,6 @@ def execute_sql( :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. """ - self._last_use_time = datetime.now() return self.snapshot().execute_sql( sql, params, @@ -393,7 +390,6 @@ def transaction(self): del self._transaction txn = self._transaction = Transaction(self) - self._last_use_time = datetime.now() return txn def run_in_transaction(self, func, *args, **kw): @@ -460,7 +456,6 @@ def run_in_transaction(self, func, *args, **kw): raise try: - self._last_use_time = datetime.now() txn.commit( return_commit_stats=self._database.log_commit_stats, request_options=commit_request_options, diff --git a/google/cloud/spanner_v1/snapshot.py b/google/cloud/spanner_v1/snapshot.py index 143e17c503..89b5094706 100644 --- a/google/cloud/spanner_v1/snapshot.py +++ b/google/cloud/spanner_v1/snapshot.py @@ -14,6 +14,7 @@ """Model a set of read-only queries to a database as a snapshot.""" +from datetime import datetime import functools import threading from google.protobuf.struct_pb2 import Struct @@ -364,6 +365,7 @@ def read( ) self._read_request_count += 1 + self._session._last_use_time = datetime.now() if self._multi_use: return StreamedResultSet( diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py index beb3e46edb..62304bf756 100644 --- a/google/cloud/spanner_v1/transaction.py +++ b/google/cloud/spanner_v1/transaction.py @@ -13,6 +13,7 @@ # limitations under the License. """Spanner read-write transaction support.""" +from datetime import datetime import functools import threading from google.protobuf.struct_pb2 import Struct diff --git a/noxfile.py b/noxfile.py index f5a2761d73..905df735bc 100644 --- a/noxfile.py +++ b/noxfile.py @@ -33,6 +33,7 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" +DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ "3.7", @@ -234,6 +235,34 @@ def unit(session, protobuf_implementation): ) +@nox.session(python=DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION) +def mockserver(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + # install_unittest_dependencies(session, "-c", constraints_path) + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) + + # Run py.test against the mockserver tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "mockserver_tests"), + *session.posargs, + ) + + def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. From 23ea0405e3df2b28cc42b3d6b4bf6b9952471e60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Thu, 5 Dec 2024 13:23:19 +0100 Subject: [PATCH 14/15] chore: fix formatting --- google/cloud/spanner_v1/transaction.py | 1 - 1 file changed, 1 deletion(-) diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py index 62304bf756..beb3e46edb 100644 --- a/google/cloud/spanner_v1/transaction.py +++ b/google/cloud/spanner_v1/transaction.py @@ -13,7 +13,6 @@ # limitations under the License. """Spanner read-write transaction support.""" -from datetime import datetime import functools import threading from google.protobuf.struct_pb2 import Struct From ad9db416505e98bfdf2979153db015257f5f0594 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Knut=20Olav=20L=C3=B8ite?= Date: Thu, 5 Dec 2024 14:57:09 +0100 Subject: [PATCH 15/15] fix: remove unnecessary update of _last_use_time --- google/cloud/spanner_v1/session.py | 1 - 1 file changed, 1 deletion(-) diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py index 92a78b6c70..539f36af2b 100644 --- a/google/cloud/spanner_v1/session.py +++ b/google/cloud/spanner_v1/session.py @@ -191,7 +191,6 @@ def exists(self): ) as span: try: api.get_session(name=self.name, metadata=metadata) - self._last_use_time = datetime.now() if span: span.set_attribute("session_found", True) except NotFound: