diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 729aa278..8cee1114 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -146,6 +146,7 @@ Running System Tests - To run system tests for a given package, you can execute:: + $ export SYSTEM_TESTS_DATABASE=system-tests-named-db $ nox -e system .. note:: @@ -188,6 +189,7 @@ Running System Tests # Create the indexes $ gcloud datastore indexes create tests/system/index.yaml + $ gcloud alpha datastore indexes create --database=$SYSTEM_TESTS_DATABASE tests/system/index.yaml ************* diff --git a/google/cloud/ndb/_datastore_api.py b/google/cloud/ndb/_datastore_api.py index a4afbcde..19d716a3 100644 --- a/google/cloud/ndb/_datastore_api.py +++ b/google/cloud/ndb/_datastore_api.py @@ -19,6 +19,7 @@ import logging from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 from google.cloud.datastore import helpers from google.cloud.datastore_v1.types import datastore as datastore_pb2 from google.cloud.datastore_v1.types import entity as entity_pb2 @@ -56,7 +57,7 @@ def stub(): return context.client.stub -def make_call(rpc_name, request, retries=None, timeout=None): +def make_call(rpc_name, request, retries=None, timeout=None, metadata=()): """Make a call to the Datastore API. Args: @@ -68,6 +69,8 @@ def make_call(rpc_name, request, retries=None, timeout=None): If :data:`0` is passed, the call is attempted only once. timeout (float): Timeout, in seconds, to pass to gRPC call. If :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Returns: tasklets.Future: Future for the eventual response for the API call. @@ -85,7 +88,7 @@ def make_call(rpc_name, request, retries=None, timeout=None): def rpc_call(): context = context_module.get_toplevel_context() - call = method.future(request, timeout=timeout) + call = method.future(request, timeout=timeout, metadata=metadata) rpc = _remote.RemoteCall(call, rpc_name) utils.logging_debug(log, rpc) utils.logging_debug(log, "timeout={}", timeout) @@ -282,7 +285,7 @@ def lookup_callback(self, rpc): future.set_result(entity) -def _datastore_lookup(keys, read_options, retries=None, timeout=None): +def _datastore_lookup(keys, read_options, retries=None, timeout=None, metadata=()): """Issue a Lookup call to Datastore using gRPC. Args: @@ -295,6 +298,8 @@ def _datastore_lookup(keys, read_options, retries=None, timeout=None): If :data:`0` is passed, the call is attempted only once. timeout (float): Timeout, in seconds, to pass to gRPC call. If :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Returns: tasklets.Future: Future object for eventual result of lookup. @@ -302,11 +307,15 @@ def _datastore_lookup(keys, read_options, retries=None, timeout=None): client = context_module.get_context().client request = datastore_pb2.LookupRequest( project_id=client.project, + database_id=client.database, keys=[key for key in keys], read_options=read_options, ) + metadata = _add_routing_info(metadata, request) - return make_call("lookup", request, retries=retries, timeout=timeout) + return make_call( + "lookup", request, retries=retries, timeout=timeout, metadata=metadata + ) def get_read_options(options, default_read_consistency=None): @@ -843,7 +852,7 @@ def _complete(key_pb): return False -def _datastore_commit(mutations, transaction, retries=None, timeout=None): +def _datastore_commit(mutations, transaction, retries=None, timeout=None, metadata=()): """Call Commit on Datastore. Args: @@ -857,6 +866,8 @@ def _datastore_commit(mutations, transaction, retries=None, timeout=None): If :data:`0` is passed, the call is attempted only once. timeout (float): Timeout, in seconds, to pass to gRPC call. If :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Returns: tasklets.Tasklet: A future for @@ -870,12 +881,16 @@ def _datastore_commit(mutations, transaction, retries=None, timeout=None): client = context_module.get_context().client request = datastore_pb2.CommitRequest( project_id=client.project, + database_id=client.database, mode=mode, mutations=mutations, transaction=transaction, ) + metadata = _add_routing_info(metadata, request) - return make_call("commit", request, retries=retries, timeout=timeout) + return make_call( + "commit", request, retries=retries, timeout=timeout, metadata=metadata + ) def allocate(keys, options): @@ -973,7 +988,7 @@ def allocate_ids_callback(self, rpc): future.set_result(key) -def _datastore_allocate_ids(keys, retries=None, timeout=None): +def _datastore_allocate_ids(keys, retries=None, timeout=None, metadata=()): """Calls ``AllocateIds`` on Datastore. Args: @@ -984,15 +999,22 @@ def _datastore_allocate_ids(keys, retries=None, timeout=None): If :data:`0` is passed, the call is attempted only once. timeout (float): Timeout, in seconds, to pass to gRPC call. If :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Returns: tasklets.Future: A future for :class:`google.cloud.datastore_v1.datastore_pb2.AllocateIdsResponse` """ client = context_module.get_context().client - request = datastore_pb2.AllocateIdsRequest(project_id=client.project, keys=keys) + request = datastore_pb2.AllocateIdsRequest( + project_id=client.project, database_id=client.database, keys=keys + ) + metadata = _add_routing_info(metadata, request) - return make_call("allocate_ids", request, retries=retries, timeout=timeout) + return make_call( + "allocate_ids", request, retries=retries, timeout=timeout, metadata=metadata + ) @tasklets.tasklet @@ -1018,7 +1040,7 @@ def begin_transaction(read_only, retries=None, timeout=None): raise tasklets.Return(response.transaction) -def _datastore_begin_transaction(read_only, retries=None, timeout=None): +def _datastore_begin_transaction(read_only, retries=None, timeout=None, metadata=()): """Calls ``BeginTransaction`` on Datastore. Args: @@ -1029,6 +1051,8 @@ def _datastore_begin_transaction(read_only, retries=None, timeout=None): If :data:`0` is passed, the call is attempted only once. timeout (float): Timeout, in seconds, to pass to gRPC call. If :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Returns: tasklets.Tasklet: A future for @@ -1045,10 +1069,19 @@ def _datastore_begin_transaction(read_only, retries=None, timeout=None): ) request = datastore_pb2.BeginTransactionRequest( - project_id=client.project, transaction_options=options + project_id=client.project, + database_id=client.database, + transaction_options=options, + ) + metadata = _add_routing_info(metadata, request) + + return make_call( + "begin_transaction", + request, + retries=retries, + timeout=timeout, + metadata=metadata, ) - - return make_call("begin_transaction", request, retries=retries, timeout=timeout) @tasklets.tasklet @@ -1069,7 +1102,7 @@ def rollback(transaction, retries=None, timeout=None): yield _datastore_rollback(transaction, retries=retries, timeout=timeout) -def _datastore_rollback(transaction, retries=None, timeout=None): +def _datastore_rollback(transaction, retries=None, timeout=None, metadata=()): """Calls Rollback in Datastore. Args: @@ -1079,6 +1112,8 @@ def _datastore_rollback(transaction, retries=None, timeout=None): If :data:`0` is passed, the call is attempted only once. timeout (float): Timeout, in seconds, to pass to gRPC call. If :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Returns: tasklets.Tasklet: Future for @@ -1086,7 +1121,41 @@ def _datastore_rollback(transaction, retries=None, timeout=None): """ client = context_module.get_context().client request = datastore_pb2.RollbackRequest( - project_id=client.project, transaction=transaction + project_id=client.project, + database_id=client.database, + transaction=transaction, ) + metadata = _add_routing_info(metadata, request) + + return make_call( + "rollback", request, retries=retries, timeout=timeout, metadata=metadata + ) + + +def _add_routing_info(metadata, request): + """Adds routing header info to the given metadata. + + Args: + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Not modified. + request (Any): An appropriate request object for the call, eg, + `entity_pb2.LookupRequest` for calling ``Lookup``. + + Returns: + Sequence[Tuple[str, str]]: Sequence with routing info added, + if it is included in the request. + """ + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + return tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) - return make_call("rollback", request, retries=retries, timeout=timeout) + return tuple(metadata) diff --git a/google/cloud/ndb/_datastore_query.py b/google/cloud/ndb/_datastore_query.py index 05d951c5..90c32ba1 100644 --- a/google/cloud/ndb/_datastore_query.py +++ b/google/cloud/ndb/_datastore_query.py @@ -1010,17 +1010,22 @@ def _datastore_run_query(query): """ query_pb = _query_to_protobuf(query) partition_id = entity_pb2.PartitionId( - project_id=query.project, namespace_id=query.namespace + project_id=query.project, + database_id=query.database, + namespace_id=query.namespace, ) read_options = _datastore_api.get_read_options(query) request = datastore_pb2.RunQueryRequest( project_id=query.project, + database_id=query.database, partition_id=partition_id, query=query_pb, read_options=read_options, ) + metadata = _datastore_api._add_routing_info((), request) + response = yield _datastore_api.make_call( - "run_query", request, timeout=query.timeout + "run_query", request, timeout=query.timeout, metadata=metadata ) utils.logging_debug(log, response) raise tasklets.Return(response) diff --git a/google/cloud/ndb/_gql.py b/google/cloud/ndb/_gql.py index bc827670..2d0a2745 100644 --- a/google/cloud/ndb/_gql.py +++ b/google/cloud/ndb/_gql.py @@ -98,8 +98,7 @@ def __init__(self, query_string, _app=None, _auth_domain=None, namespace=None): Args: query_string (str): properly formatted GQL query string. - namespace (str): the namespace to use for this query. - + namespace (str): The namespace to use for this query. Defaults to the client's value. Raises: exceptions.BadQueryError: if the query is not parsable. """ @@ -853,7 +852,10 @@ def _key_function(values): context = context_module.get_context() client = context.client return key.Key( - *values, namespace=context.get_namespace(), project=client.project + *values, + project=client.project, + database=client.database, + namespace=context.get_namespace(), ) _raise_cast_error( "Key requires even number of operands or single string, {}".format(values) diff --git a/google/cloud/ndb/client.py b/google/cloud/ndb/client.py index 2ea7d963..c7959a92 100644 --- a/google/cloud/ndb/client.py +++ b/google/cloud/ndb/client.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""A client for NDB which manages credentials, project, namespace.""" +"""A client for NDB which manages credentials, project, namespace, and database.""" import contextlib import grpc @@ -92,17 +92,25 @@ class Client(google_client.ClientWithProject): client_options (Optional[:class:`~google.api_core.client_options.ClientOptions` or :class:`dict`]) Client options used to set user options on the client. API Endpoint should be set through client_options. + database (Optional[str]): Database to access. Defaults to the (default) database. """ SCOPE = ("https://www.googleapis.com/auth/datastore",) """The scopes required for authenticating as a Cloud Datastore consumer.""" def __init__( - self, project=None, namespace=None, credentials=None, client_options=None + self, + project=None, + namespace=None, + credentials=None, + client_options=None, + database=None, ): self.namespace = namespace + self.host = os.environ.get(environment_vars.GCD_HOST, DATASTORE_API_HOST) self.client_info = _CLIENT_INFO self._client_options = client_options + self.database = database # Use insecure connection when using Datastore Emulator, otherwise # use secure connection diff --git a/google/cloud/ndb/key.py b/google/cloud/ndb/key.py index d9ceea61..b2919159 100644 --- a/google/cloud/ndb/key.py +++ b/google/cloud/ndb/key.py @@ -24,12 +24,18 @@ * a Google Cloud Platform project (a string) * a list of one or more ``(kind, id)`` pairs where ``kind`` is a string and ``id`` is either a string or an integer +* an optional database (a string) * an optional namespace (a string) The application ID must always be part of the key, but since most applications can only access their own entities, it defaults to the current application ID and you rarely need to worry about it. +The database is an optional database ID. If unspecified, it defaults +to that of the client. +For usage in Cloud NDB, the default database should always be referred +to as an empty string; please do not use "(default)". + The namespace designates a top-level partition of the key space for a particular application. If you've never heard of namespaces, you can safely ignore this feature. @@ -95,7 +101,6 @@ from google.cloud.ndb import tasklets from google.cloud.ndb import utils - __all__ = ["Key", "UNDEFINED"] _APP_ID_ENVIRONMENT = "APPLICATION_ID" _APP_ID_DEFAULT = "_" @@ -103,6 +108,7 @@ _REFERENCE_APP_MISMATCH = ( "Key reference constructed uses a different app {!r} than the one specified {!r}" ) +_REFERENCE_DATABASE_MISMATCH = "Key reference constructed uses a different database {!r} than the one specified {!r}" _REFERENCE_NAMESPACE_MISMATCH = ( "Key reference constructed uses a different namespace {!r} than " "the one specified {!r}" @@ -120,9 +126,9 @@ UNDEFINED = object() """Sentinel value. -Used to indicate a namespace hasn't been explicitly set in key construction. +Used to indicate a database or namespace hasn't been explicitly set in key construction. Used to distinguish between not passing a value and passing `None`, which -indicates the default namespace. +indicates the default database/namespace. """ @@ -140,9 +146,10 @@ class Key(object): from google.cloud.ndb import context as context_module client = mock.Mock( project="testing", + database=None, namespace=None, stub=mock.Mock(spec=()), - spec=("project", "namespace", "stub"), + spec=("project", "database", "namespace", "stub"), ) context = context_module.Context(client).use() context.__enter__() @@ -269,6 +276,9 @@ class Key(object): parent (Optional[Key]): The parent of the key being constructed. If provided, the key path will be **relative** to the parent key's path. + database (Optional[str]): The database to use. + Defaults to that of the client if a parent was specified, and + to the default database if it was not. Raises: TypeError: If none of ``reference``, ``serialized``, ``urlsafe``, @@ -317,9 +327,10 @@ def __repr__(self): """String representation used by :class:`str() ` and :func:`repr`. We produce a short string that conveys all relevant information, - suppressing project and namespace when they are equal to the default. - In many cases, this string should be able to be used to invoke the - constructor. + suppressing project, database, and namespace when they are equal to their + respective defaults. + + In many cases, this string should be able to be used to invoke the constructor. For example: @@ -330,14 +341,16 @@ def __repr__(self): "Key('hi', 100)" >>> >>> key = ndb.Key( - ... "bye", "hundred", project="specific", namespace="space" + ... "bye", "hundred", project="specific", database="db", namespace="space", ... ) >>> str(key) - "Key('bye', 'hundred', project='specific', namespace='space')" + "Key('bye', 'hundred', project='specific', database='db', namespace='space')" """ args = ["{!r}".format(item) for item in self.flat()] if self.project() != _project_from_app(None): args.append("project={!r}".format(self.app())) + if self.database(): + args.append("database={!r}".format(self.database())) if self.namespace() is not None: args.append("namespace={!r}".format(self.namespace())) @@ -352,7 +365,7 @@ def __hash__(self): .. note:: - This ignores ``app`` and ``namespace``. Since :func:`hash` isn't + This ignores ``app``, ``database``, and ``namespace``. Since :func:`hash` isn't expected to return a unique value (it just reduces the chance of collision), this doesn't try to increase entropy by including other values. The primary concern is that hashes of equal keys are @@ -365,7 +378,7 @@ def __hash__(self): def _tuple(self): """Helper to return an orderable tuple.""" - return (self.app(), self.namespace(), self.pairs()) + return (self.app(), self.namespace(), self.database() or "", self.pairs()) def __eq__(self, other): """Equality comparison operation.""" @@ -409,16 +422,19 @@ def __getstate__(self): Returns: Tuple[Dict[str, Any]]: A tuple containing a single dictionary of - state to pickle. The dictionary has three keys ``pairs``, ``app`` - and ``namespace``. + state to pickle. The dictionary has four keys: ``pairs``, ``app``, + ``database``, and ``namespace``. """ - return ( + to_pickle = ( { "pairs": self.pairs(), "app": self.app(), "namespace": self.namespace(), }, ) + if self.database(): + to_pickle[0]["database"] = self.database() + return to_pickle def __setstate__(self, state): """Private API used for unpickling. @@ -427,7 +443,7 @@ def __setstate__(self, state): state (Tuple[Dict[str, Any]]): A tuple containing a single dictionary of pickled state. This should match the signature returned from :func:`__getstate__`, in particular, it should - have three keys ``pairs``, ``app`` and ``namespace``. + have four keys: ``pairs``, ``app``, ``database``, and ``namespace``. Raises: TypeError: If the ``state`` does not have length 1. @@ -447,8 +463,16 @@ def __setstate__(self, state): flat = _get_path(None, kwargs["pairs"]) _clean_flat_path(flat) project = _project_from_app(kwargs["app"]) + + database = None + if "database" in kwargs: + database = kwargs["database"] + self._key = _key_module.Key( - *flat, project=project, namespace=kwargs["namespace"] + *flat, + project=project, + namespace=kwargs["namespace"], + database=database, ) self._reference = None @@ -462,14 +486,15 @@ def __getnewargs__(self): Returns: Tuple[Dict[str, Any]]: A tuple containing a single dictionary of - state to pickle. The dictionary has three keys ``pairs``, ``app`` - and ``namespace``. + state to pickle. The dictionary has four keys: ``pairs``, ``app``, + ``database`` and ``namespace``. """ return ( { "pairs": self.pairs(), "app": self.app(), "namespace": self.namespace(), + "database": self.database() if self.database() is not None else None, }, ) @@ -565,6 +590,17 @@ def project(self): app = project + def database(self): + """The database ID for the key. + + .. doctest:: key-database + + >>> key = ndb.Key("A", "B", database="mydb") + >>> key.database() + 'mydb' + """ + return self._key.database + def id(self): """The string or integer ID in the last ``(kind, id)`` pair, if any. @@ -678,7 +714,7 @@ def reference(self): .. doctest:: key-reference - >>> key = ndb.Key("Trampoline", 88, project="xy", namespace="zt") + >>> key = ndb.Key("Trampoline", 88, project="xy", database="wv", namespace="zt") >>> key.reference() app: "xy" name_space: "zt" @@ -688,14 +724,23 @@ def reference(self): id: 88 } } + database_id: "wv" """ if self._reference is None: - self._reference = _app_engine_key_pb2.Reference( - app=self._key.project, - path=_to_legacy_path(self._key.path), - name_space=self._key.namespace, - ) + if self._key.database: + self._reference = _app_engine_key_pb2.Reference( + app=self._key.project, + path=_to_legacy_path(self._key.path), + database_id=self._key.database, + name_space=self._key.namespace, + ) + else: + self._reference = _app_engine_key_pb2.Reference( + app=self._key.project, + path=_to_legacy_path(self._key.path), + name_space=self._key.namespace, + ) return self._reference def serialized(self): @@ -703,9 +748,9 @@ def serialized(self): .. doctest:: key-serialized - >>> key = ndb.Key("Kind", 1337, project="example") + >>> key = ndb.Key("Kind", 1337, project="example", database="example-db") >>> key.serialized() - b'j\\x07exampler\\x0b\\x0b\\x12\\x04Kind\\x18\\xb9\\n\\x0c' + b'j\\x07exampler\\x0b\\x0b\\x12\\x04Kind\\x18\\xb9\\n\\x0c\\xba\\x01\\nexample-db' """ reference = self.reference() return reference.SerializeToString() @@ -730,6 +775,9 @@ def to_legacy_urlsafe(self, location_prefix): location prefix ("partition"), compatible with the Google Datastore admin console. + This only supports the default database. For a named database, + please use urlsafe() instead. + Arguments: location_prefix (str): A location prefix ("partition") to be prepended to the key's `project` when serializing the key. A @@ -742,9 +790,11 @@ def to_legacy_urlsafe(self, location_prefix): >>> key.to_legacy_urlsafe("s~") b'aglzfmV4YW1wbGVyCwsSBEtpbmQYuQoM' """ + if self._key.database: + raise ValueError("to_legacy_urlsafe only supports the default database") return google.cloud.datastore.Key( *self.flat(), - **{"namespace": self._key.namespace, "project": self._key.project} + **{"namespace": self._key.namespace, "project": self._key.project}, ).to_legacy_urlsafe(location_prefix=location_prefix) @_options.ReadOptions.options @@ -1085,7 +1135,7 @@ def _project_from_app(app, allow_empty=False): return parts[-1] -def _from_reference(reference, app, namespace): +def _from_reference(reference, app, namespace, database): """Convert Reference protobuf to :class:`~google.cloud.datastore.key.Key`. This is intended to work with the "legacy" representation of a @@ -1102,6 +1152,7 @@ def _from_reference(reference, app, namespace): app (Optional[str]): The application ID / project ID for the constructed key. namespace (Optional[str]): The namespace for the constructed key. + database (Optional[str]): The database for the constructed key. Returns: google.cloud.datastore.key.Key: The key corresponding to @@ -1110,6 +1161,8 @@ def _from_reference(reference, app, namespace): Raises: RuntimeError: If ``app`` is not :data:`None`, but not the same as ``reference.app``. + RuntimeError: If ``database`` is not :data:`None`, but not the same as + ``reference.database_id``. RuntimeError: If ``namespace`` is not :data:`None`, but not the same as ``reference.name_space``. """ @@ -1118,6 +1171,13 @@ def _from_reference(reference, app, namespace): if _project_from_app(app) != project: raise RuntimeError(_REFERENCE_APP_MISMATCH.format(reference.app, app)) + parsed_database = _key_module._get_empty(reference.database_id, "") + if database is not None: + if database != parsed_database: + raise RuntimeError( + _REFERENCE_DATABASE_MISMATCH.format(reference.database_id, database) + ) + parsed_namespace = _key_module._get_empty(reference.name_space, "") if namespace is not None: if namespace != parsed_namespace: @@ -1125,14 +1185,16 @@ def _from_reference(reference, app, namespace): _REFERENCE_NAMESPACE_MISMATCH.format(reference.name_space, namespace) ) - _key_module._check_database_id(reference.database_id) flat_path = _key_module._get_flat_path(reference.path) return google.cloud.datastore.Key( - *flat_path, project=project, namespace=parsed_namespace + *flat_path, + project=project, + database=parsed_database, + namespace=parsed_namespace, ) -def _from_serialized(serialized, app, namespace): +def _from_serialized(serialized, app, namespace, database): """Convert serialized protobuf to :class:`~google.cloud.datastore.key.Key`. This is intended to work with the "legacy" representation of a @@ -1145,6 +1207,7 @@ def _from_serialized(serialized, app, namespace): app (Optional[str]): The application ID / project ID for the constructed key. namespace (Optional[str]): The namespace for the constructed key. + database (Optional[str]): The database for the constructed key. Returns: Tuple[google.cloud.datastore.key.Key, .Reference]: The key @@ -1152,10 +1215,10 @@ def _from_serialized(serialized, app, namespace): """ reference = _app_engine_key_pb2.Reference() reference.ParseFromString(serialized) - return _from_reference(reference, app, namespace), reference + return _from_reference(reference, app, namespace, database), reference -def _from_urlsafe(urlsafe, app, namespace): +def _from_urlsafe(urlsafe, app, namespace, database): """Convert urlsafe string to :class:`~google.cloud.datastore.key.Key`. .. note:: @@ -1176,6 +1239,7 @@ def _from_urlsafe(urlsafe, app, namespace): app (Optional[str]): The application ID / project ID for the constructed key. namespace (Optional[str]): The namespace for the constructed key. + database (Optional[str]): The database for the constructed key. Returns: Tuple[google.cloud.datastore.key.Key, .Reference]: The key @@ -1186,7 +1250,7 @@ def _from_urlsafe(urlsafe, app, namespace): padding = b"=" * (-len(urlsafe) % 4) urlsafe += padding raw_bytes = base64.urlsafe_b64decode(urlsafe) - return _from_serialized(raw_bytes, app, namespace) + return _from_serialized(raw_bytes, app, namespace, database) def _constructor_handle_positional(path_args, kwargs): @@ -1252,6 +1316,7 @@ def _parse_from_ref( urlsafe=None, app=None, namespace=None, + database: str = None, **kwargs ): """Construct a key from a Reference. @@ -1273,6 +1338,7 @@ def _parse_from_ref( app (Optional[str]): The Google Cloud Platform project (previously on Google App Engine, this was called the Application ID). namespace (Optional[str]): The namespace for the key. + database (Optional[str]): The database for the Key. kwargs (Dict[str, Any]): Any extra keyword arguments not covered by the explicitly provided ones. These are passed through to indicate to the user that the wrong combination of arguments was used, e.g. @@ -1299,21 +1365,27 @@ def _parse_from_ref( ) if reference: - ds_key = _from_reference(reference, app, namespace) + ds_key = _from_reference(reference, app, namespace, database) elif serialized: - ds_key, reference = _from_serialized(serialized, app, namespace) + ds_key, reference = _from_serialized(serialized, app, namespace, database) else: # NOTE: We know here that ``urlsafe`` is truth-y; # ``_exactly_one_specified()`` guarantees this. - ds_key, reference = _from_urlsafe(urlsafe, app, namespace) + ds_key, reference = _from_urlsafe(urlsafe, app, namespace, database) return ds_key, reference def _parse_from_args( - pairs=None, flat=None, project=None, app=None, namespace=UNDEFINED, parent=None + pairs=None, + flat=None, + project=None, + app=None, + namespace=UNDEFINED, + parent=None, + database=UNDEFINED, ): - """Construct a key the path (and possibly a parent key). + """Construct a key from the path (and possibly a parent key). Args: pairs (Optional[Iterable[Tuple[str, Union[str, int]]]]): An iterable @@ -1329,6 +1401,9 @@ def _parse_from_args( parent (Optional[~.ndb.key.Key]): The parent of the key being constructed. If provided, the key path will be **relative** to the parent key's path. + database (Optional[str]): The database for the key. + Defaults to that of the client if a parent was specified, and + to the default database if it was not. Returns: ~.datastore.Key: The constructed key. @@ -1350,9 +1425,12 @@ def _parse_from_args( parent_ds_key = None if parent is None: project = _project_from_app(app) + if namespace is UNDEFINED: - context = context_module.get_context() - namespace = context.get_namespace() + namespace = context_module.get_context().get_namespace() + + if database is UNDEFINED: + database = context_module.get_context().client.database else: project = _project_from_app(app, allow_empty=True) @@ -1364,14 +1442,24 @@ def _parse_from_args( if namespace is UNDEFINED: namespace = None + if database is UNDEFINED: + database = None + # Offload verification of parent to ``google.cloud.datastore.Key()``. parent_ds_key = parent._key + if database == "": + database = None + if namespace == "": namespace = None return google.cloud.datastore.Key( - *flat, parent=parent_ds_key, project=project, namespace=namespace + *flat, + parent=parent_ds_key, + project=project, + database=database, + namespace=namespace, ) diff --git a/google/cloud/ndb/model.py b/google/cloud/ndb/model.py index 6b4382c0..b780f6a5 100644 --- a/google/cloud/ndb/model.py +++ b/google/cloud/ndb/model.py @@ -22,9 +22,10 @@ client = mock.Mock( project="testing", + database=None, namespace=None, stub=mock.Mock(spec=()), - spec=("project", "namespace", "stub"), + spec=("project", "namespace", "database", "stub"), ) context = context_module.Context(client).use() context.__enter__() @@ -4696,13 +4697,14 @@ def _get_kind(cls): >>> MyModel(value=7.34e22, description="Mass of the moon") MyModel(description='Mass of the moon', value=7.34e+22) - In addition to user-defined properties, there are six accepted keyword + In addition to user-defined properties, there are seven accepted keyword arguments: * ``key`` * ``id`` * ``app`` * ``namespace`` + * ``database`` * ``parent`` * ``projection`` @@ -4808,12 +4810,13 @@ class MyModel(ndb.Model): namespace (str): Namespace for the entity key. project (str): Project ID for the entity key. app (str): DEPRECATED: Synonym for ``project``. + database (str): Database for the entity key. kwargs (Dict[str, Any]): Additional keyword arguments. These should map to properties of this model. Raises: .BadArgumentError: If the constructor is called with ``key`` and one - of ``id``, ``app``, ``namespace`` or ``parent`` specified. + of ``id``, ``app``, ``namespace``, ``database``, or ``parent`` specified. """ # Class variables updated by _fix_up_properties() @@ -4861,6 +4864,7 @@ def __init__(_self, **kwargs): id_ = self._get_arg(kwargs, "id") project = self._get_arg(kwargs, "project") app = self._get_arg(kwargs, "app") + database = self._get_arg(kwargs, "database", key_module.UNDEFINED) namespace = self._get_arg(kwargs, "namespace", key_module.UNDEFINED) parent = self._get_arg(kwargs, "parent") projection = self._get_arg(kwargs, "projection") @@ -4877,13 +4881,14 @@ def __init__(_self, **kwargs): id_ is None and parent is None and project is None + and database is key_module.UNDEFINED and namespace is key_module.UNDEFINED ) if key is not None: if not key_parts_unspecified: raise exceptions.BadArgumentError( "Model constructor given 'key' does not accept " - "'id', 'project', 'app', 'namespace', or 'parent'." + "'id', 'project', 'app', 'namespace', 'database', or 'parent'." ) self._key = _validate_key(key, entity=self) elif not key_parts_unspecified: @@ -4892,6 +4897,7 @@ def __init__(_self, **kwargs): id_, parent=parent, project=project, + database=database, namespace=namespace, ) @@ -5714,6 +5720,7 @@ def _get_by_id( max_memcache_items=None, force_writes=None, _options=None, + database=None, ): """Get an instance of Model class by ID. @@ -5757,6 +5764,8 @@ def _get_by_id( ``global_cache_timeout``. max_memcache_items (int): No longer supported. force_writes (bool): No longer supported. + database (Optional[str]): Database for the entity to load. If not + passed, uses the client's value. Returns: Optional[Model]: The retrieved entity, if one is found. @@ -5768,6 +5777,7 @@ def _get_by_id( project=project, app=app, _options=_options, + database=database, ).result() get_by_id = _get_by_id @@ -5797,6 +5807,7 @@ def _get_by_id_async( max_memcache_items=None, force_writes=None, _options=None, + database: str = None, ): """Get an instance of Model class by ID. diff --git a/google/cloud/ndb/query.py b/google/cloud/ndb/query.py index eea2568c..fdcdacd5 100644 --- a/google/cloud/ndb/query.py +++ b/google/cloud/ndb/query.py @@ -140,6 +140,7 @@ def ranked(cls, rank): import logging import six +from google.cloud.ndb import context as context_module from google.cloud.ndb import exceptions from google.cloud.ndb import _options from google.cloud.ndb import tasklets @@ -1228,6 +1229,7 @@ class QueryOptions(_options.ReadOptions): "group_by", "namespace", "project", + "database", # Fetch options "keys_only", "limit", @@ -1266,6 +1268,9 @@ def __init__(self, config=None, context=None, **kwargs): if not self.project: self.project = context.client.project + # We always use the client's database, for consistency with python-datastore + self.database = context.client.database + if self.namespace is None: if self.ancestor is None: self.namespace = context.get_namespace() @@ -1375,6 +1380,9 @@ def __init__( offset = self._option("offset", offset) keys_only = self._option("keys_only", keys_only) + # Except in the case of ancestor queries, we always use the client's database + database = context_module.get_context().client.database or None + if ancestor is not None: if isinstance(ancestor, ParameterizedThing): if isinstance(ancestor, ParameterizedFunction): @@ -1394,6 +1402,9 @@ def __init__( raise TypeError("ancestor/project id mismatch") else: project = ancestor.app() + + database = ancestor.database() + if namespace is not None: # if namespace is the empty string, that means default # namespace, but after a put, if the ancestor is using @@ -1405,6 +1416,7 @@ def __init__( raise TypeError("ancestor/namespace mismatch") else: namespace = ancestor.namespace() + if filters is not None: if not isinstance(filters, Node): raise TypeError( @@ -1431,6 +1443,7 @@ def __init__( self.filters = filters self.order_by = order_by self.project = project + self.database = database self.namespace = namespace self.limit = limit self.offset = offset diff --git a/noxfile.py b/noxfile.py index e078dc01..6b2580ae 100644 --- a/noxfile.py +++ b/noxfile.py @@ -27,7 +27,6 @@ NOX_DIR = os.path.abspath(os.path.dirname(__file__)) DEFAULT_INTERPRETER = "3.8" ALL_INTERPRETERS = ("3.7", "3.8", "3.9", "3.10", "3.11") -MAJOR_INTERPRETERS = "3.8" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() BLACK_VERSION = "black==22.3.0" @@ -160,7 +159,8 @@ def doctest(session): session.run(*run_args) -@nox.session(py=MAJOR_INTERPRETERS) +# Run the system tests +@nox.session(py=DEFAULT_INTERPRETER) def system(session): """Run the system test suite.""" constraints_path = str( diff --git a/setup.py b/setup.py index f4300806..1b8dbe73 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ def main(): readme = readme_file.read() dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-cloud-datastore >= 2.7.2, <3.0.0dev", + "google-cloud-datastore >= 2.16.0, < 3.0.0dev", "protobuf >= 3.19.5, <5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "pymemcache >= 2.1.0, < 5.0.0dev", "redis >= 3.0.0, < 5.0.0dev", diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 70f746f0..ef05b87c 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-cloud-datastore==2.7.2 +google-cloud-datastore==2.16.0 google-api-core==1.34.0 protobuf==3.19.5 pymemcache==2.1.0 diff --git a/tests/conftest.py b/tests/conftest.py index 3ed9baf6..c8d6b07d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -88,8 +88,9 @@ def context_factory(): def context(**kwargs): client = mock.Mock( project="testing", + database=None, namespace=None, - spec=("project", "namespace"), + spec=("project", "database", "namespace"), stub=mock.Mock(spec=()), ) context = context_module.Context( @@ -117,20 +118,23 @@ def in_context(context): assert not context_module._state.context +@pytest.fixture +def database(): + return "testdb" + + @pytest.fixture def namespace(): return "UnitTest" @pytest.fixture -def client_context(namespace): +def client_context(namespace, database): from google.cloud import ndb client = ndb.Client() context_manager = client.context( - cache_policy=False, - legacy_data=False, - namespace=namespace, + cache_policy=False, legacy_data=False, database=database, namespace=namespace ) with context_manager as context: yield context diff --git a/tests/system/_helpers.py b/tests/system/_helpers.py new file mode 100644 index 00000000..26d3de77 --- /dev/null +++ b/tests/system/_helpers.py @@ -0,0 +1,18 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from os import getenv + +_DATASTORE_DATABASE = "SYSTEM_TESTS_DATABASE" +TEST_DATABASE = getenv(_DATASTORE_DATABASE, "system-tests-named-db") diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 1878a7b5..82e61762 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -11,7 +11,7 @@ from google.cloud.ndb import global_cache as global_cache_module -from . import KIND, OTHER_KIND +from . import KIND, OTHER_KIND, _helpers log = logging.getLogger(__name__) @@ -19,7 +19,13 @@ @pytest.fixture(scope="session", autouse=True) def preclean(): """Clean out default namespace in test database.""" - ds_client = _make_ds_client(None) + _preclean(None, None) + if _helpers.TEST_DATABASE: + _preclean(_helpers.TEST_DATABASE, None) + + +def _preclean(database, namespace): + ds_client = _make_ds_client(database, namespace) for kind in (KIND, OTHER_KIND): query = ds_client.query(kind=kind) query.keys_only() @@ -28,12 +34,17 @@ def preclean(): ds_client.delete_multi(keys) -def _make_ds_client(namespace): +def _make_ds_client(database, namespace): emulator = bool(os.environ.get("DATASTORE_EMULATOR_HOST")) if emulator: - client = datastore.Client(namespace=namespace, _http=requests.Session) + client = datastore.Client( + database=database, namespace=namespace, _http=requests.Session + ) else: - client = datastore.Client(namespace=namespace) + client = datastore.Client(database=database, namespace=namespace) + + assert client.database == database + assert client.namespace == namespace return client @@ -57,8 +68,11 @@ def to_delete(): @pytest.fixture -def ds_client(namespace): - return _make_ds_client(namespace) +def ds_client(database_id, namespace): + client = _make_ds_client(database_id, namespace) + assert client.database == database_id + assert client.namespace == namespace + return client @pytest.fixture @@ -75,7 +89,7 @@ def with_ds_client(ds_client, to_delete, deleted_keys, other_namespace): not_deleted = [ entity for entity in all_entities(ds_client, other_namespace) - if entity.key not in deleted_keys + if fix_key_db(entity.key, ds_client) not in deleted_keys ] if not_deleted: log.warning("CLEAN UP: Entities not deleted from test: {}".format(not_deleted)) @@ -113,14 +127,40 @@ def make_entity(*key_args, **entity_kwargs): yield make_entity +# Workaround: datastore batches reject if key.database is None and client.database == "" +# or vice-versa. This should be fixed, but for now just fix the keys +# See https://github.com/googleapis/python-datastore/issues/460 +def fix_key_db(key, database): + if key.database: + return key + else: + fixed_key = key.__class__( + *key.flat_path, + project=key.project, + database=database, + namespace=key.namespace + ) + # If the current parent has already been set, we re-use + # the same instance + fixed_key._parent = key._parent + return fixed_key + + @pytest.fixture def dispose_of(with_ds_client, to_delete): def delete_entity(*ds_keys): - to_delete.extend(ds_keys) + to_delete.extend( + map(lambda key: fix_key_db(key, with_ds_client.database), ds_keys) + ) return delete_entity +@pytest.fixture(params=["", _helpers.TEST_DATABASE]) +def database_id(request): + return request.param + + @pytest.fixture def namespace(): return str(uuid.uuid4()) @@ -132,8 +172,9 @@ def other_namespace(): @pytest.fixture -def client_context(namespace): - client = ndb.Client() +def client_context(database_id, namespace): + client = ndb.Client(database=database_id) + assert client.database == database_id context_manager = client.context( cache_policy=False, legacy_data=False, diff --git a/tests/system/test_crud.py b/tests/system/test_crud.py index cff12c91..9aeb0960 100644 --- a/tests/system/test_crud.py +++ b/tests/system/test_crud.py @@ -373,8 +373,8 @@ class SomeKind(ndb.Model): assert retrieved.bar == datetime.datetime(2010, 5, 11, 22, 42, tzinfo=mytz) -def test_parallel_threads(dispose_of, namespace): - client = ndb.Client(namespace=namespace) +def test_parallel_threads(dispose_of, database_id, namespace): + client = ndb.Client(database=database_id, namespace=namespace) class SomeKind(ndb.Model): foo = ndb.IntegerProperty() diff --git a/tests/system/test_metadata.py b/tests/system/test_metadata.py index b3a74376..3d0eee61 100644 --- a/tests/system/test_metadata.py +++ b/tests/system/test_metadata.py @@ -17,6 +17,8 @@ """ import pytest +from importlib import reload + from google.cloud import ndb from test_utils import retry @@ -26,8 +28,13 @@ @pytest.mark.usefixtures("client_context") -def test_kind_metadata(dispose_of): - from google.cloud.ndb.metadata import Kind +def test_kind_metadata(dispose_of, database_id): + # ndb.Model._kind_map gets reset in-between parameterized test runs, which results in failed kind lookups for the + # Kind metadata when we query later. Importing the metadata module has the effect of priming the kind map, + # so force a reload here to retrigger it. + from google.cloud.ndb import metadata + + reload(metadata) class AnyKind(ndb.Model): foo = ndb.IntegerProperty() @@ -35,17 +42,21 @@ class AnyKind(ndb.Model): class MyKind(ndb.Model): bar = ndb.StringProperty() - entity1 = AnyKind(foo=1, id="x", namespace="_test_namespace_") + entity1 = AnyKind(foo=1, id="x", database=database_id, namespace="_test_namespace_") entity1.put() dispose_of(entity1.key._key) - entity2 = MyKind(bar="x", id="x", namespace="_test_namespace_") + entity2 = MyKind( + bar="x", id="x", database=database_id, namespace="_test_namespace_" + ) entity2.put() dispose_of(entity2.key._key) @_retry_assertion_errors def query_metadata(): - query = ndb.Query(kind=Kind.KIND_NAME, namespace="_test_namespace_") + query = ndb.Query( + kind=ndb.metadata.Kind.KIND_NAME, namespace="_test_namespace_" + ) # database is implicit results = query.fetch() kinds = [result.kind_name for result in results] assert all(kind in kinds for kind in ["AnyKind", "MyKind"]) diff --git a/tests/system/test_misc.py b/tests/system/test_misc.py index d5bd42ae..3cb2e3d5 100644 --- a/tests/system/test_misc.py +++ b/tests/system/test_misc.py @@ -349,7 +349,7 @@ def callback(): @pytest.mark.skipif(not USE_REDIS_CACHE, reason="Redis is not configured") @pytest.mark.usefixtures("client_context") -def test_parallel_threads_lookup_w_redis_cache(namespace, dispose_of): +def test_parallel_threads_lookup_w_redis_cache(database_id, namespace, dispose_of): """Regression test for #496 https://github.com/googleapis/python-ndb/issues/496 @@ -362,7 +362,7 @@ def mset(self, mapping): return super(MonkeyPipeline, self).mset(mapping) with mock.patch("redis.client.Pipeline", MonkeyPipeline): - client = ndb.Client() + client = ndb.Client(database=database_id) global_cache = ndb.RedisCache.from_environment() activity = {"calls": 0} diff --git a/tests/system/test_query.py b/tests/system/test_query.py index 506e5aba..df00a6b6 100644 --- a/tests/system/test_query.py +++ b/tests/system/test_query.py @@ -342,7 +342,7 @@ class SomeKind(ndb.Model): assert results[0].key.namespace() == other_namespace -def test_namespace_set_on_client_with_id(dispose_of, other_namespace): +def test_namespace_set_on_client_with_id(dispose_of, database_id, other_namespace): """Regression test for #337 https://github.com/googleapis/python-ndb/issues/337 @@ -352,7 +352,7 @@ class SomeKind(ndb.Model): foo = ndb.IntegerProperty() bar = ndb.StringProperty() - client = ndb.Client(namespace=other_namespace) + client = ndb.Client(namespace=other_namespace, database=database_id) with client.context(cache_policy=False): id = test_utils.system.unique_resource_id() entity1 = SomeKind(id=id, foo=1, bar="a") @@ -784,6 +784,7 @@ def test_multiquery_with_order_key_property(ds_entity, client_context): https://github.com/googleapis/python-ndb/issues/629 """ project = client_context.client.project + database = client_context.client.database namespace = client_context.get_namespace() for i in range(5): @@ -793,7 +794,11 @@ def test_multiquery_with_order_key_property(ds_entity, client_context): entity_id, foo=i, bar=ds_key_module.Key( - "test_key", i + 1, project=project, namespace=namespace + "test_key", + i + 1, + project=project, + database=database, + namespace=namespace, ), ) @@ -1923,6 +1928,7 @@ class SomeKind(ndb.Model): @pytest.mark.usefixtures("client_context") def test_Key(ds_entity, client_context): project = client_context.client.project + database = client_context.client.database namespace = client_context.get_namespace() for i in range(5): entity_id = test_utils.system.unique_resource_id() @@ -1930,7 +1936,11 @@ def test_Key(ds_entity, client_context): KIND, entity_id, foo=ds_key_module.Key( - "test_key", i + 1, project=project, namespace=namespace + "test_key", + i + 1, + project=project, + database=database, + namespace=namespace, ), ) diff --git a/tests/unit/test__datastore_api.py b/tests/unit/test__datastore_api.py index 70739f51..783134b4 100644 --- a/tests/unit/test__datastore_api.py +++ b/tests/unit/test__datastore_api.py @@ -127,9 +127,10 @@ def test_explicit_timeout(stub, _retry): future.set_result("bar") request = object() - call = _api.make_call("foo", request, retries=0, timeout=20) + metadata = object() + call = _api.make_call("foo", request, retries=0, timeout=20, metadata=metadata) assert call.result() == "bar" - api.foo.future.assert_called_once_with(request, timeout=20) + api.foo.future.assert_called_once_with(request, timeout=20, metadata=metadata) @staticmethod @pytest.mark.usefixtures("in_context") @@ -560,22 +561,31 @@ def key_pb(key): def test__datastore_lookup(datastore_pb2, context): client = mock.Mock( project="theproject", + database="testdb", stub=mock.Mock(spec=("lookup",)), - spec=("project", "stub"), + spec=("project", "database", "stub"), ) with context.new(client=client).use() as context: client.stub.lookup = lookup = mock.Mock(spec=("future",)) future = tasklets.Future() future.set_result("response") lookup.future.return_value = future + datastore_pb2.LookupRequest.return_value.project_id = "theproject" + datastore_pb2.LookupRequest.return_value.database_id = "testdb" assert _api._datastore_lookup(["foo", "bar"], None).result() == "response" datastore_pb2.LookupRequest.assert_called_once_with( - project_id="theproject", keys=["foo", "bar"], read_options=None + project_id="theproject", + database_id="testdb", + keys=["foo", "bar"], + read_options=None, ) client.stub.lookup.future.assert_called_once_with( datastore_pb2.LookupRequest.return_value, timeout=_api._DEFAULT_TIMEOUT, + metadata=( + ("x-goog-request-params", "project_id=theproject&database_id=testdb"), + ), ) @@ -1236,6 +1246,7 @@ def test_wo_transaction(stub, datastore_pb2): datastore_pb2.CommitRequest.assert_called_once_with( project_id="testing", + database_id=None, mode=datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, mutations=mutations, transaction=None, @@ -1258,6 +1269,7 @@ def test_w_transaction(stub, datastore_pb2): datastore_pb2.CommitRequest.assert_called_once_with( project_id="testing", + database_id=None, mode=datastore_pb2.CommitRequest.Mode.TRANSACTIONAL, mutations=mutations, transaction=b"tx123", @@ -1349,7 +1361,7 @@ def test__datastore_allocate_ids(stub, datastore_pb2): assert _api._datastore_allocate_ids(keys).result() == "response" datastore_pb2.AllocateIdsRequest.assert_called_once_with( - project_id="testing", keys=keys + project_id="testing", database_id=None, keys=keys ) request = datastore_pb2.AllocateIdsRequest.return_value @@ -1389,7 +1401,9 @@ def test_read_only(stub, datastore_pb2): transaction_options = datastore_pb2.TransactionOptions.return_value datastore_pb2.BeginTransactionRequest.assert_called_once_with( - project_id="testing", transaction_options=transaction_options + project_id="testing", + database_id=None, + transaction_options=transaction_options, ) request = datastore_pb2.BeginTransactionRequest.return_value @@ -1412,7 +1426,9 @@ def test_read_write(stub, datastore_pb2): transaction_options = datastore_pb2.TransactionOptions.return_value datastore_pb2.BeginTransactionRequest.assert_called_once_with( - project_id="testing", transaction_options=transaction_options + project_id="testing", + database_id=None, + transaction_options=transaction_options, ) request = datastore_pb2.BeginTransactionRequest.return_value @@ -1443,7 +1459,7 @@ def test__datastore_rollback(stub, datastore_pb2): assert _api._datastore_rollback(b"tx123").result() == "response" datastore_pb2.RollbackRequest.assert_called_once_with( - project_id="testing", transaction=b"tx123" + project_id="testing", database_id=None, transaction=b"tx123" ) request = datastore_pb2.RollbackRequest.return_value @@ -1460,3 +1476,28 @@ def __init__(self, id=None, name=None): assert not _api._complete(mock.Mock(path=[MockElement()])) assert _api._complete(mock.Mock(path=[MockElement(id=1)])) assert _api._complete(mock.Mock(path=[MockElement(name="himom")])) + + +@pytest.mark.parametrize( + "project_id,database_id,expected", + [ + ("a", "b", "project_id=a&database_id=b"), + ("a", "", "project_id=a"), + ("", "b", "database_id=b"), + ], +) +def test__add_routing_info(project_id, database_id, expected): + expected_new_metadata = ("x-goog-request-params", expected) + request = datastore_pb2.LookupRequest( + project_id=project_id, database_id=database_id + ) + assert _api._add_routing_info((), request) == (expected_new_metadata,) + assert _api._add_routing_info(("already=there",), request) == ( + "already=there", + expected_new_metadata, + ) + + +def test__add_routing_info_no_request_info(): + request = datastore_pb2.LookupRequest() + assert _api._add_routing_info((), request) == () diff --git a/tests/unit/test__datastore_query.py b/tests/unit/test__datastore_query.py index fc4aca8a..83d25546 100644 --- a/tests/unit/test__datastore_query.py +++ b/tests/unit/test__datastore_query.py @@ -2020,14 +2020,17 @@ def test_it(_datastore_api): read_options = datastore_pb2.ReadOptions() request = datastore_pb2.RunQueryRequest( project_id="testing", + database_id=None, partition_id=entity_pb2.PartitionId(project_id="testing", namespace_id=""), query=query_pb, read_options=read_options, ) + metadata = ("x-goog-request-params", "project_id=testing") + _datastore_api._add_routing_info.return_value = metadata _datastore_api.get_read_options.return_value = read_options assert _datastore_query._datastore_run_query(query).result() == "foo" _datastore_api.make_call.assert_called_once_with( - "run_query", request, timeout=None + "run_query", request, timeout=None, metadata=metadata ) _datastore_api.get_read_options.assert_called_once_with(query) diff --git a/tests/unit/test__gql.py b/tests/unit/test__gql.py index a8caa069..ee9371c8 100644 --- a/tests/unit/test__gql.py +++ b/tests/unit/test__gql.py @@ -60,6 +60,11 @@ def test_constructor(): gql = gql_module.GQL(GQL_QUERY) assert gql.kind() == "SomeKind" + @staticmethod + def test_constructor_with_namespace(): + gql = gql_module.GQL(GQL_QUERY, namespace="test-namespace") + assert gql._namespace == "test-namespace" + @staticmethod def test_constructor_bad_query(): with pytest.raises(exceptions.BadQueryError): @@ -278,13 +283,13 @@ class SomeKind(model.Model): prop4 = model.IntegerProperty() rep = ( - "Query(kind='SomeKind', filters=AND(FilterNode('prop2', '=', {}" + "Query(namespace='test-namespace', kind='SomeKind', filters=AND(FilterNode('prop2', '=', {}" "), FilterNode('prop3', '>', 5)), order_by=[PropertyOrder(name=" "'prop4', reverse=False), PropertyOrder(name='prop1', " "reverse=True)], limit=10, offset=5, " "projection=['prop1', 'prop2'])" ) - gql = gql_module.GQL(GQL_QUERY) + gql = gql_module.GQL(GQL_QUERY, namespace="test-namespace") query = gql.get_query() compat_rep = "'xxx'" assert repr(query) == rep.format(compat_rep) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 302c1aa6..0f7019fc 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -45,9 +45,10 @@ def test_constructor_no_args(): with patch_credentials("testing"): client = client_module.Client() assert client.SCOPE == ("https://www.googleapis.com/auth/datastore",) - assert client.namespace is None assert client.host == _http.DATASTORE_API_HOST assert client.project == "testing" + assert client.database is None + assert client.namespace is None assert client.secure is True @staticmethod @@ -60,9 +61,10 @@ def test_constructor_no_args_emulator(): with patch_credentials("testing"): client = client_module.Client() assert client.SCOPE == ("https://www.googleapis.com/auth/datastore",) - assert client.namespace is None assert client.host == "foo" assert client.project == "testing" + assert client.database is None + assert client.namespace is None assert client.secure is False @staticmethod @@ -77,14 +79,16 @@ def test_constructor_all_args(): with patch_credentials("testing") as creds: client = client_module.Client( project="test-project", + database="test-database", namespace="test-namespace", credentials=creds, client_options=ClientOptions( api_endpoint="alternate-endpoint.example.com" ), ) - assert client.namespace == "test-namespace" assert client.project == "test-project" + assert client.database == "test-database" + assert client.namespace == "test-namespace" assert client.host == "alternate-endpoint.example.com" assert client.secure is True @@ -93,12 +97,14 @@ def test_constructor_client_options_as_dict(): with patch_credentials("testing") as creds: client = client_module.Client( project="test-project", + database="test-database", namespace="test-namespace", credentials=creds, client_options={"api_endpoint": "alternate-endpoint.example.com"}, ) - assert client.namespace == "test-namespace" assert client.project == "test-project" + assert client.database == "test-database" + assert client.namespace == "test-namespace" assert client.host == "alternate-endpoint.example.com" assert client.secure is True @@ -107,12 +113,14 @@ def test_constructor_client_options_no_api_endpoint(): with patch_credentials("testing") as creds: client = client_module.Client( project="test-project", + database="test-database", namespace="test-namespace", credentials=creds, client_options={"scopes": ["my_scope"]}, ) - assert client.namespace == "test-namespace" assert client.project == "test-project" + assert client.database == "test-database" + assert client.namespace == "test-namespace" assert client.host == _http.DATASTORE_API_HOST assert client.secure is True diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py index 151b1a52..e65338e9 100644 --- a/tests/unit/test_context.py +++ b/tests/unit/test_context.py @@ -61,7 +61,8 @@ def _make_one(self, **kwargs): client = mock.Mock( namespace=None, project="testing", - spec=("namespace", "project"), + database="testdb", + spec=("namespace", "project", "database"), stub=mock.Mock(spec=()), ) return context_module.Context(client, **kwargs) diff --git a/tests/unit/test_key.py b/tests/unit/test_key.py index df057dc6..58dbed48 100644 --- a/tests/unit/test_key.py +++ b/tests/unit/test_key.py @@ -57,6 +57,17 @@ def test_constructor_with_unicode(): assert key._key == google.cloud.datastore.Key("Kind", 42, project="testing") assert key._reference is None + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_different_database(context): + context.client.database = "DiffDatabase" + key = key_module.Key("Kind", 42) + + assert key._key == google.cloud.datastore.Key( + "Kind", 42, project="testing", database="DiffDatabase" + ) + assert key._reference is None + @staticmethod @pytest.mark.usefixtures("in_context") def test_constructor_with_different_namespace(context): @@ -125,6 +136,7 @@ def test_constructor_with_reference(): "Child", "Feather", project="sample-app", + database="base", namespace="space", ) assert key._reference is reference @@ -141,6 +153,23 @@ def test_constructor_with_serialized(): assert key._reference == make_reference( path=({"type": "Zorp", "id": 88},), app="s~sample-app-no-location", + database=None, + namespace=None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_serialized_with_database(): + serialized = b"j\x18s~sample-app-no-locationr\n\x0b\x12\x04Zorp\x18X\x0c\xba\x01\tsample-db" + key = key_module.Key(serialized=serialized) + + assert key._key == google.cloud.datastore.Key( + "Zorp", 88, project="sample-app-no-location", database="sample-db" + ) + assert key._reference == make_reference( + path=({"type": "Zorp", "id": 88},), + app="s~sample-app-no-location", + database="sample-db", namespace=None, ) @@ -152,6 +181,7 @@ def test_constructor_with_urlsafe(self): assert key._reference == make_reference( path=({"type": "Kind", "name": "Thing"},), app="s~fire", + database=None, namespace=None, ) @@ -199,6 +229,24 @@ def test_constructor_with_project_and_app(): with pytest.raises(TypeError): key_module.Key("Kind", 10, project="foo", app="bar") + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_default_database_as_empty_string(): + key = key_module.Key("Kind", 1337, database="") + + assert key._key == google.cloud.datastore.Key("Kind", 1337, project="testing") + assert key.database() is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_database(): + key = key_module.Key("Kind", 1337, database="foo") + + assert key._key == google.cloud.datastore.Key( + "Kind", 1337, project="testing", database="foo" + ) + assert key.database() == "foo" + @staticmethod @pytest.mark.usefixtures("in_context") def test_constructor_with_namespace(): @@ -237,6 +285,28 @@ def test_constructor_with_parent(self): ) assert key._reference is None + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent_and_database(): + parent = key_module.Key("Kind", "Thing", project="fire", database="foo") + key = key_module.Key("Zip", 10, parent=parent, database="foo") + + assert key._key == google.cloud.datastore.Key( + "Kind", "Thing", "Zip", 10, project="fire", database="foo" + ) + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent_and_database_undefined(): + parent = key_module.Key("Kind", "Thing", project="fire", database="foo") + key = key_module.Key("Zip", 10, parent=parent) + + assert key._key == google.cloud.datastore.Key( + "Kind", "Thing", "Zip", 10, project="fire", database="foo" + ) + assert key._reference is None + @pytest.mark.usefixtures("in_context") def test_constructor_with_parent_and_namespace(self): parent = key_module.Key(urlsafe=self.URLSAFE) @@ -308,9 +378,13 @@ def test___repr__defaults(): @staticmethod @pytest.mark.usefixtures("in_context") def test___repr__non_defaults(): - key = key_module.Key("X", 11, app="foo", namespace="bar") - assert repr(key) == "Key('X', 11, project='foo', namespace='bar')" - assert str(key) == "Key('X', 11, project='foo', namespace='bar')" + key = key_module.Key("X", 11, app="foo", namespace="bar", database="baz") + assert ( + repr(key) == "Key('X', 11, project='foo', database='baz', namespace='bar')" + ) + assert ( + str(key) == "Key('X', 11, project='foo', database='baz', namespace='bar')" + ) @staticmethod @pytest.mark.usefixtures("in_context") @@ -323,10 +397,11 @@ def test___hash__(): @staticmethod def test__tuple(): - key = key_module.Key("X", 11, app="foo", namespace="n") - assert key._tuple() == ("foo", "n", (("X", 11),)) + key = key_module.Key("X", 11, app="foo", database="d", namespace="n") + assert key._tuple() == ("foo", "n", "d", (("X", 11),)) @staticmethod + @pytest.mark.usefixtures("in_context") def test___eq__(): key1 = key_module.Key("X", 11, app="foo", namespace="n") key2 = key_module.Key("Y", 12, app="foo", namespace="n") @@ -340,6 +415,7 @@ def test___eq__(): assert not key1 == key5 @staticmethod + @pytest.mark.usefixtures("in_context") def test___ne__(): key1 = key_module.Key("X", 11, app="foo", namespace="n") key2 = key_module.Key("Y", 12, app="foo", namespace="n") @@ -355,68 +431,105 @@ def test___ne__(): assert not key1 != key6 @staticmethod + @pytest.mark.usefixtures("in_context") def test___lt__(): key1 = key_module.Key("X", 11, app="foo", namespace="n") key2 = key_module.Key("Y", 12, app="foo", namespace="n") key3 = key_module.Key("X", 11, app="goo", namespace="n") key4 = key_module.Key("X", 11, app="foo", namespace="o") key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") assert not key1 < key1 assert key1 < key2 assert key1 < key3 assert key1 < key4 with pytest.raises(TypeError): key1 < key5 + assert key1 < key6 + assert key6 < key7 @staticmethod + @pytest.mark.usefixtures("in_context") def test___le__(): key1 = key_module.Key("X", 11, app="foo", namespace="n") key2 = key_module.Key("Y", 12, app="foo", namespace="n") key3 = key_module.Key("X", 11, app="goo", namespace="n") key4 = key_module.Key("X", 11, app="foo", namespace="o") key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") assert key1 <= key1 assert key1 <= key2 assert key1 <= key3 assert key1 <= key4 with pytest.raises(TypeError): key1 <= key5 + assert key1 <= key6 + assert key6 <= key7 @staticmethod + @pytest.mark.usefixtures("in_context") def test___gt__(): key1 = key_module.Key("X", 11, app="foo", namespace="n") key2 = key_module.Key("M", 10, app="foo", namespace="n") key3 = key_module.Key("X", 11, app="boo", namespace="n") key4 = key_module.Key("X", 11, app="foo", namespace="a") key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") assert not key1 > key1 assert key1 > key2 assert key1 > key3 assert key1 > key4 with pytest.raises(TypeError): key1 > key5 + assert key6 > key1 + assert key7 > key6 @staticmethod + @pytest.mark.usefixtures("in_context") def test___ge__(): key1 = key_module.Key("X", 11, app="foo", namespace="n") key2 = key_module.Key("M", 10, app="foo", namespace="n") key3 = key_module.Key("X", 11, app="boo", namespace="n") key4 = key_module.Key("X", 11, app="foo", namespace="a") key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") assert key1 >= key1 assert key1 >= key2 assert key1 >= key3 assert key1 >= key4 with pytest.raises(TypeError): key1 >= key5 + assert key6 >= key1 + assert key7 >= key6 @staticmethod + @pytest.mark.usefixtures("in_context") def test_pickling(): key = key_module.Key("a", "b", app="c", namespace="d") pickled = pickle.dumps(key) unpickled = pickle.loads(pickled) assert key == unpickled + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_pickling_with_default_database(): + key = key_module.Key("a", "b", app="c", namespace="d", database="") + pickled = pickle.dumps(key) + unpickled = pickle.loads(pickled) + assert key == unpickled + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_pickling_with_database(): + key = key_module.Key("a", "b", app="c", namespace="d", database="e") + pickled = pickle.dumps(key) + unpickled = pickle.loads(pickled) + assert key == unpickled + @staticmethod @pytest.mark.usefixtures("in_context") def test___setstate__bad_state(): @@ -531,9 +644,14 @@ def test_kind(): @staticmethod @pytest.mark.usefixtures("in_context") def test_reference(): - key = key_module.Key("This", "key", app="fire") + key = key_module.Key( + "This", "key", app="fire", database="db", namespace="namespace" + ) assert key.reference() == make_reference( - path=({"type": "This", "name": "key"},), app="fire", namespace=None + path=({"type": "This", "name": "key"},), + app="fire", + database="db", + namespace="namespace", ) @staticmethod @@ -605,6 +723,15 @@ def test_to_legacy_urlsafe_w_ancestor(): key2 = key_module.Key(urlsafe=urlsafe) assert key == key2 + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_to_legacy_urlsafe_named_database_unsupported(): + key = key_module.Key("d", 123, database="anydb") + with pytest.raises( + ValueError, match="to_legacy_urlsafe only supports the default database" + ): + key.to_legacy_urlsafe(location_prefix="s~") + @staticmethod @pytest.mark.usefixtures("in_context") @mock.patch("google.cloud.ndb._datastore_api") @@ -874,65 +1001,84 @@ def test_app_fallback(context): class Test__from_reference: def test_basic(self): reference = make_reference() - ds_key = key_module._from_reference(reference, None, None) + ds_key = key_module._from_reference(reference, None, None, None) assert ds_key == google.cloud.datastore.Key( "Parent", 59, "Child", "Feather", project="sample-app", + database="base", namespace="space", ) def test_matching_app(self): reference = make_reference() - ds_key = key_module._from_reference(reference, "s~sample-app", None) + ds_key = key_module._from_reference(reference, "s~sample-app", None, None) assert ds_key == google.cloud.datastore.Key( "Parent", 59, "Child", "Feather", project="sample-app", + database="base", namespace="space", ) def test_differing_app(self): reference = make_reference() with pytest.raises(RuntimeError): - key_module._from_reference(reference, "pickles", None) + key_module._from_reference(reference, "pickles", None, None) def test_matching_namespace(self): reference = make_reference() - ds_key = key_module._from_reference(reference, None, "space") + ds_key = key_module._from_reference(reference, None, "space", None) assert ds_key == google.cloud.datastore.Key( "Parent", 59, "Child", "Feather", project="sample-app", + database="base", namespace="space", ) def test_differing_namespace(self): reference = make_reference() with pytest.raises(RuntimeError): - key_module._from_reference(reference, None, "pickles") + key_module._from_reference(reference, None, "pickles", None) + + def test_matching_database(self): + reference = make_reference() + ds_key = key_module._from_reference(reference, None, None, "base") + assert ds_key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database="base", + namespace="space", + ) + + def test_differing_database(self): + reference = make_reference() + with pytest.raises(RuntimeError): + key_module._from_reference(reference, None, None, "turtles") class Test__from_serialized: @staticmethod def test_basic(): - serialized = ( - b"j\x0cs~sample-appr\x1e\x0b\x12\x06Parent\x18;\x0c\x0b\x12\x05" - b'Child"\x07Feather\x0c\xa2\x01\x05space' - ) - ds_key, reference = key_module._from_serialized(serialized, None, None) + serialized = b'j\x0cs~sample-appr\x1e\x0b\x12\x06Parent\x18;\x0c\x0b\x12\x05Child"\x07Feather\x0c\xa2\x01\x05space\xba\x01\x04base' + ds_key, reference = key_module._from_serialized(serialized, None, None, None) assert ds_key == google.cloud.datastore.Key( "Parent", 59, "Child", "Feather", project="sample-app", + database="base", namespace="space", ) assert reference == make_reference() @@ -940,13 +1086,14 @@ def test_basic(): @staticmethod def test_no_app_prefix(): serialized = b"j\x18s~sample-app-no-locationr\n\x0b\x12\x04Zorp\x18X\x0c" - ds_key, reference = key_module._from_serialized(serialized, None, None) + ds_key, reference = key_module._from_serialized(serialized, None, None, None) assert ds_key == google.cloud.datastore.Key( "Zorp", 88, project="sample-app-no-location" ) assert reference == make_reference( path=({"type": "Zorp", "id": 88},), app="s~sample-app-no-location", + database=None, namespace=None, ) @@ -960,26 +1107,28 @@ def test_basic(): ) urlsafe_bytes = urlsafe.encode("ascii") for value in (urlsafe, urlsafe_bytes): - ds_key, reference = key_module._from_urlsafe(value, None, None) + ds_key, reference = key_module._from_urlsafe(value, None, None, None) assert ds_key == google.cloud.datastore.Key( "Parent", 59, "Child", "Feather", project="sample-app", + database=None, namespace="space", ) - assert reference == make_reference() + assert reference == make_reference(database=None) @staticmethod def test_needs_padding(): urlsafe = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA" - ds_key, reference = key_module._from_urlsafe(urlsafe, None, None) + ds_key, reference = key_module._from_urlsafe(urlsafe, None, None, None) assert ds_key == google.cloud.datastore.Key("Kind", "Thing", project="fire") assert reference == make_reference( path=({"type": "Kind", "name": "Thing"},), app="s~fire", + database=None, namespace=None, ) @@ -1009,7 +1158,7 @@ def test_dict_positional(): @staticmethod def test_dict_positional_with_other_kwargs(): args = ({"flat": ("OtherKind", "Cheese"), "app": "ehp"},) - kwargs = {"namespace": "over-here"} + kwargs = {"namespace": "over-here", "database": "over-there"} with pytest.raises(TypeError): key_module._constructor_handle_positional(args, kwargs) @@ -1017,11 +1166,13 @@ def test_dict_positional_with_other_kwargs(): def make_reference( path=({"type": "Parent", "id": 59}, {"type": "Child", "name": "Feather"}), app="s~sample-app", + database="base", namespace="space", ): elements = [_app_engine_key_pb2.Path.Element(**element) for element in path] return _app_engine_key_pb2.Reference( app=app, path=_app_engine_key_pb2.Path(element=elements), + database_id=database, name_space=namespace, ) diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py index 0a93afe9..6cb0ac90 100644 --- a/tests/unit/test_model.py +++ b/tests/unit/test_model.py @@ -2616,6 +2616,16 @@ def test__from_base_type(): assert value.kind() == "Kynd" assert value.id() == 123 + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_equality(): + class KeyPropTestModel(model.Model): + k = model.KeyProperty() + + kptm1 = KeyPropTestModel(k=key_module.Key("k", 1)) + kptm2 = KeyPropTestModel(k=key_module.Key("k", 1, database="")) + assert kptm1 == kptm2 + class TestBlobKeyProperty: @staticmethod @@ -4680,6 +4690,7 @@ def test__check_properties_not_found(): model.Model._check_properties(properties) @staticmethod + @pytest.mark.usefixtures("in_context") def test_query(): class XModel(model.Model): x = model.IntegerProperty() @@ -4689,6 +4700,7 @@ class XModel(model.Model): assert query.filters == (XModel.x == 42) @staticmethod + @pytest.mark.usefixtures("in_context") def test_query_distinct(): class XModel(model.Model): x = model.IntegerProperty() @@ -4721,6 +4733,7 @@ class XModel(model.Model): XModel.query(distinct=True, group_by=("x",)) @staticmethod + @pytest.mark.usefixtures("in_context") def test_query_projection_of_unindexed_attribute(): class XModel(model.Model): x = model.IntegerProperty(indexed=False) @@ -6114,7 +6127,7 @@ def test_not_entity_proto_raises_error(): def test_with_key(): m = model.Model() pb = _legacy_entity_pb.EntityProto() - key = key_module.Key("a", "b", app="c", namespace="") + key = key_module.Key("a", "b", app="c", database="", namespace="") ent = m._from_pb(pb, key=key) assert ent.key == key @@ -6268,6 +6281,60 @@ def _get_kind(cls): assert entity.other.foo == 1 +class Test_Keyword_Name: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_project(): + class HasProjectProp(model.Model): + project = model.StringProperty() + + has_project_prop = HasProjectProp( + project="the-property", _project="the-ds-project" + ) + assert has_project_prop.project == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_app(): + class HasAppProp(model.Model): + app = model.StringProperty() + + has_app_prop = HasAppProp(app="the-property", _app="the-gae-app") + assert has_app_prop.app == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_database(): + class HasDbProp(model.Model): + database = model.StringProperty() + + has_db_prop = HasDbProp(database="the-property", _database="the-ds-database") + assert has_db_prop.database == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_namespace(): + class HasNamespaceProp(model.Model): + namespace = model.StringProperty() + + has_namespace_prop = HasNamespaceProp( + namespace="the-property", _namespace="the-ds-namespace" + ) + assert has_namespace_prop.namespace == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_key(): + k = key_module.Key("HasKeyProp", "k") + + class HasKeyProp(model.Model): + key = model.StringProperty() + + has_key_prop = HasKeyProp(key="the-property", _key=k) + assert has_key_prop.key == "the-property" + assert has_key_prop._key == k + + def ManyFieldsFactory(): """Model type class factory. diff --git a/tests/unit/test_query.py b/tests/unit/test_query.py index fb4ba4da..df7df55a 100644 --- a/tests/unit/test_query.py +++ b/tests/unit/test_query.py @@ -38,6 +38,7 @@ def test___all__(): class TestQueryOptions: @staticmethod + @pytest.mark.usefixtures("in_context") def test_constructor(): options = query_module.QueryOptions(kind="test", project="app") assert options.kind == "test" @@ -49,6 +50,18 @@ def test_constructor_with_config(): options = query_module.QueryOptions(config=config, kind="test", project="app") assert options.kind == "test" assert options.project == "app" + assert options.database is None + assert options.namespace == "config_test" + + @staticmethod + def test_constructor_with_config_specified_db(): + config = query_module.QueryOptions( + kind="other", namespace="config_test", database="config_test" + ) + options = query_module.QueryOptions(config=config, kind="test", project="app") + assert options.kind == "test" + assert options.project == "app" + assert options.database == "config_test" assert options.namespace == "config_test" @staticmethod @@ -76,11 +89,19 @@ def test__eq__(): @staticmethod def test_copy(): options = query_module.QueryOptions(kind="test", project="app") - options = options.copy(project="app2", namespace="foo") + options = options.copy(project="app2", database="bar", namespace="foo") assert options.kind == "test" assert options.project == "app2" + assert options.database == "bar" assert options.namespace == "foo" + @staticmethod + def test_explicitly_set_default_database(in_context): + with in_context.new().use() as context: + context.client.database = "newdb" + options = query_module.QueryOptions(context=context) + assert options.database == "newdb" + @staticmethod def test_explicitly_set_default_namespace(in_context): with in_context.new(namespace="somethingelse").use() as context: @@ -598,7 +619,7 @@ def test_constructor(): @staticmethod def test_constructor_with_key(): - key = key_module.Key("a", "b", app="c", namespace="d") + key = key_module.Key("a", "b", app="c", namespace="d", database="db") filter_node = query_module.FilterNode("name", "=", key) assert filter_node._name == "name" assert filter_node._opsymbol == "=" @@ -1202,6 +1223,7 @@ def test_OR(): class TestQuery: @staticmethod + @pytest.mark.usefixtures("in_context") def test_constructor(): query = query_module.Query(kind="Foo") assert query.kind == "Foo" @@ -2146,7 +2168,9 @@ def next(self): _datastore_query.iterate.assert_called_once_with( query_module.QueryOptions( - filters=query.filters, project="testing", limit=5 + filters=query.filters, + project="testing", + limit=5, ), raw=True, ) @@ -2183,7 +2207,9 @@ def next(self): _datastore_query.iterate.assert_called_once_with( query_module.QueryOptions( - project="testing", limit=5, start_cursor="cursor000" + project="testing", + limit=5, + start_cursor="cursor000", ), raw=True, ) @@ -2210,7 +2236,9 @@ def has_next_async(self): _datastore_query.iterate.assert_called_once_with( query_module.QueryOptions( - project="testing", limit=5, start_cursor="cursor000" + project="testing", + limit=5, + start_cursor="cursor000", ), raw=True, ) @@ -2241,7 +2269,9 @@ def has_next_async(self): _datastore_query.iterate.assert_called_once_with( query_module.QueryOptions( - filters=query.filters, project="testing", limit=5 + filters=query.filters, + project="testing", + limit=5, ), raw=True, ) @@ -2275,7 +2305,8 @@ def next(self): assert more _datastore_query.iterate.assert_called_once_with( - query_module.QueryOptions(project="testing", limit=5), raw=True + query_module.QueryOptions(project="testing", limit=5), + raw=True, )