diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py
index 4543dba9..e7f0b690 100644
--- a/tests/unit/test__gapic.py
+++ b/tests/unit/test__gapic.py
@@ -12,86 +12,81 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
 import mock
+import pytest
 
 from google.cloud.datastore.client import _HAVE_GRPC
 
 
-@unittest.skipUnless(_HAVE_GRPC, "No gRPC")
-class Test_make_datastore_api(unittest.TestCase):
-    def _call_fut(self, client):
-        from google.cloud.datastore._gapic import make_datastore_api
-
-        return make_datastore_api(client)
-
-    @mock.patch(
-        "google.cloud.datastore_v1.services.datastore.client.DatastoreClient",
-        return_value=mock.sentinel.ds_client,
+@pytest.mark.skipif(not _HAVE_GRPC, reason="No gRPC")
+@mock.patch(
+    "google.cloud.datastore_v1.services.datastore.client.DatastoreClient",
+    return_value=mock.sentinel.ds_client,
+)
+@mock.patch(
+    "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport",
+    return_value=mock.sentinel.transport,
+)
+@mock.patch(
+    "google.cloud.datastore._gapic.make_secure_channel",
+    return_value=mock.sentinel.channel,
+)
+def test_live_api(make_chan, mock_transport, mock_klass):
+    from google.cloud._http import DEFAULT_USER_AGENT
+    from google.cloud.datastore._gapic import make_datastore_api
+
+    base_url = "https://datastore.googleapis.com:443"
+    client = mock.Mock(
+        _base_url=base_url,
+        _credentials=mock.sentinel.credentials,
+        _client_info=mock.sentinel.client_info,
+        spec=["_base_url", "_credentials", "_client_info"],
     )
-    @mock.patch(
-        "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport",
-        return_value=mock.sentinel.transport,
-    )
-    @mock.patch(
-        "google.cloud.datastore._gapic.make_secure_channel",
-        return_value=mock.sentinel.channel,
-    )
-    def test_live_api(self, make_chan, mock_transport, mock_klass):
-        from google.cloud._http import DEFAULT_USER_AGENT
+    ds_api = make_datastore_api(client)
+    assert ds_api is mock.sentinel.ds_client
 
-        base_url = "https://datastore.googleapis.com:443"
-        client = mock.Mock(
-            _base_url=base_url,
-            _credentials=mock.sentinel.credentials,
-            _client_info=mock.sentinel.client_info,
-            spec=["_base_url", "_credentials", "_client_info"],
-        )
-        ds_api = self._call_fut(client)
-        self.assertIs(ds_api, mock.sentinel.ds_client)
+    mock_transport.assert_called_once_with(channel=mock.sentinel.channel)
 
-        mock_transport.assert_called_once_with(channel=mock.sentinel.channel)
+    make_chan.assert_called_once_with(
+        mock.sentinel.credentials, DEFAULT_USER_AGENT, "datastore.googleapis.com:443",
+    )
 
-        make_chan.assert_called_once_with(
-            mock.sentinel.credentials,
-            DEFAULT_USER_AGENT,
-            "datastore.googleapis.com:443",
-        )
+    mock_klass.assert_called_once_with(
+        transport=mock.sentinel.transport, client_info=mock.sentinel.client_info
+    )
 
-        mock_klass.assert_called_once_with(
-            transport=mock.sentinel.transport, client_info=mock.sentinel.client_info
-        )
 
-    @mock.patch(
-        "google.cloud.datastore_v1.services.datastore.client.DatastoreClient",
-        return_value=mock.sentinel.ds_client,
-    )
-    @mock.patch(
-        "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport",
-        return_value=mock.sentinel.transport,
-    )
-    @mock.patch(
-        "google.cloud.datastore._gapic.insecure_channel",
-        return_value=mock.sentinel.channel,
+@pytest.mark.skipif(not _HAVE_GRPC, reason="No gRPC")
+@mock.patch(
+    "google.cloud.datastore_v1.services.datastore.client.DatastoreClient",
+    return_value=mock.sentinel.ds_client,
+)
+@mock.patch(
+    "google.cloud.datastore_v1.services.datastore.transports.grpc.DatastoreGrpcTransport",
+    return_value=mock.sentinel.transport,
+)
+@mock.patch(
+    "google.cloud.datastore._gapic.insecure_channel",
+    return_value=mock.sentinel.channel,
+)
+def test_emulator(make_chan, mock_transport, mock_klass):
+    from google.cloud.datastore._gapic import make_datastore_api
+
+    host = "localhost:8901"
+    base_url = "http://" + host
+    client = mock.Mock(
+        _base_url=base_url,
+        _credentials=mock.sentinel.credentials,
+        _client_info=mock.sentinel.client_info,
+        spec=["_base_url", "_credentials", "_client_info"],
     )
-    def test_emulator(self, make_chan, mock_transport, mock_klass):
+    ds_api = make_datastore_api(client)
+    assert ds_api is mock.sentinel.ds_client
 
-        host = "localhost:8901"
-        base_url = "http://" + host
-        client = mock.Mock(
-            _base_url=base_url,
-            _credentials=mock.sentinel.credentials,
-            _client_info=mock.sentinel.client_info,
-            spec=["_base_url", "_credentials", "_client_info"],
-        )
-        ds_api = self._call_fut(client)
-        self.assertIs(ds_api, mock.sentinel.ds_client)
+    mock_transport.assert_called_once_with(channel=mock.sentinel.channel)
 
-        mock_transport.assert_called_once_with(channel=mock.sentinel.channel)
+    make_chan.assert_called_once_with(host)
 
-        make_chan.assert_called_once_with(host)
-
-        mock_klass.assert_called_once_with(
-            transport=mock.sentinel.transport, client_info=mock.sentinel.client_info
-        )
+    mock_klass.assert_called_once_with(
+        transport=mock.sentinel.transport, client_info=mock.sentinel.client_info
+    )
diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py
index 2e8da9e9..67f28ffe 100644
--- a/tests/unit/test__http.py
+++ b/tests/unit/test__http.py
@@ -12,830 +12,848 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
+import http.client
 
 import mock
-from http import client
-
+import pytest
 import requests
 
 
-class Test__make_retry_timeout_kwargs(unittest.TestCase):
-    @staticmethod
-    def _call_fut(retry, timeout):
-        from google.cloud.datastore._http import _make_retry_timeout_kwargs
+def test__make_retry_timeout_kwargs_w_empty():
+    from google.cloud.datastore._http import _make_retry_timeout_kwargs
 
-        return _make_retry_timeout_kwargs(retry, timeout)
+    expected = {}
+    assert _make_retry_timeout_kwargs(None, None) == expected
 
-    def test_empty(self):
-        expected = {}
-        self.assertEqual(self._call_fut(None, None), expected)
 
-    def test_w_retry(self):
-        retry = object()
-        expected = {"retry": retry}
-        self.assertEqual(self._call_fut(retry, None), expected)
+def test__make_retry_timeout_kwargs_w_retry():
+    from google.cloud.datastore._http import _make_retry_timeout_kwargs
 
-    def test_w_timeout(self):
-        timeout = 5.0
-        expected = {"timeout": timeout}
-        self.assertEqual(self._call_fut(None, timeout), expected)
+    retry = object()
+    expected = {"retry": retry}
+    assert _make_retry_timeout_kwargs(retry, None) == expected
 
-    def test_w_retry_w_timeout(self):
-        retry = object()
-        timeout = 5.0
-        expected = {"retry": retry, "timeout": timeout}
-        self.assertEqual(self._call_fut(retry, timeout), expected)
 
+def test__make_retry_timeout_kwargs_w_timeout():
+    from google.cloud.datastore._http import _make_retry_timeout_kwargs
 
-class Foo:
-    def __init__(self, bar=None, baz=None):
-        self.bar = bar
-        self.baz = baz
+    timeout = 5.0
+    expected = {"timeout": timeout}
+    assert _make_retry_timeout_kwargs(None, timeout) == expected
 
 
-class Test__make_request_pb(unittest.TestCase):
-    @staticmethod
-    def _call_fut(request, request_pb_type):
-        from google.cloud.datastore._http import _make_request_pb
+def test__make_retry_timeout_kwargs_w_both():
+    from google.cloud.datastore._http import _make_retry_timeout_kwargs
 
-        return _make_request_pb(request, request_pb_type)
+    retry = object()
+    timeout = 5.0
+    expected = {"retry": retry, "timeout": timeout}
+    assert _make_retry_timeout_kwargs(retry, timeout) == expected
 
-    def test_w_empty_dict(self):
-        request = {}
 
-        foo = self._call_fut(request, Foo)
+def test__make_request_pb_w_empty_dict():
+    from google.cloud.datastore._http import _make_request_pb
 
-        self.assertIsInstance(foo, Foo)
-        self.assertIsNone(foo.bar)
-        self.assertIsNone(foo.baz)
+    request = {}
 
-    def test_w_partial_dict(self):
-        request = {"bar": "Bar"}
+    foo = _make_request_pb(request, Foo)
 
-        foo = self._call_fut(request, Foo)
+    assert isinstance(foo, Foo)
+    assert foo.bar is None
+    assert foo.baz is None
 
-        self.assertIsInstance(foo, Foo)
-        self.assertEqual(foo.bar, "Bar")
-        self.assertIsNone(foo.baz)
 
-    def test_w_complete_dict(self):
-        request = {"bar": "Bar", "baz": "Baz"}
+def test__make_request_pb_w_partial_dict():
+    from google.cloud.datastore._http import _make_request_pb
 
-        foo = self._call_fut(request, Foo)
+    request = {"bar": "Bar"}
 
-        self.assertIsInstance(foo, Foo)
-        self.assertEqual(foo.bar, "Bar")
-        self.assertEqual(foo.baz, "Baz")
+    foo = _make_request_pb(request, Foo)
 
-    def test_w_instance(self):
-        passed = Foo()
+    assert isinstance(foo, Foo)
+    assert foo.bar == "Bar"
+    assert foo.baz is None
 
-        foo = self._call_fut(passed, Foo)
 
-        self.assertIs(foo, passed)
+def test__make_request_pb_w_complete_dict():
+    from google.cloud.datastore._http import _make_request_pb
 
+    request = {"bar": "Bar", "baz": "Baz"}
 
-class Test__request(unittest.TestCase):
-    @staticmethod
-    def _call_fut(*args, **kwargs):
-        from google.cloud.datastore._http import _request
+    foo = _make_request_pb(request, Foo)
 
-        return _request(*args, **kwargs)
+    assert isinstance(foo, Foo)
+    assert foo.bar == "Bar"
+    assert foo.baz == "Baz"
 
-    def _helper(self, retry=None, timeout=None):
-        from google.cloud import _http as connection_module
 
-        project = "PROJECT"
-        method = "METHOD"
-        data = b"DATA"
-        base_url = "http://api-url"
-        user_agent = "USER AGENT"
-        client_info = _make_client_info(user_agent)
-        response_data = "CONTENT"
+def test__make_request_pb_w_instance():
+    from google.cloud.datastore._http import _make_request_pb
 
-        http = _make_requests_session([_make_response(content=response_data)])
+    passed = Foo()
 
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
+    foo = _make_request_pb(passed, Foo)
 
-        response = self._call_fut(
-            http, project, method, data, base_url, client_info, **kwargs
-        )
-        self.assertEqual(response, response_data)
+    assert foo is passed
 
-        # Check that the mocks were called as expected.
-        expected_url = _build_expected_url(base_url, project, method)
-        expected_headers = {
-            "Content-Type": "application/x-protobuf",
-            "User-Agent": user_agent,
-            connection_module.CLIENT_INFO_HEADER: user_agent,
-        }
 
-        if retry is not None:
-            retry.assert_called_once_with(http.request)
+def _request_helper(retry=None, timeout=None):
+    from google.cloud import _http as connection_module
+    from google.cloud.datastore._http import _request
 
-        kwargs.pop("retry", None)
-        http.request.assert_called_once_with(
-            method="POST",
-            url=expected_url,
-            headers=expected_headers,
-            data=data,
-            **kwargs
-        )
+    project = "PROJECT"
+    method = "METHOD"
+    data = b"DATA"
+    base_url = "http://api-url"
+    user_agent = "USER AGENT"
+    client_info = _make_client_info(user_agent)
+    response_data = "CONTENT"
 
-    def test_ok(self):
-        self._helper()
+    http = _make_requests_session([_make_response(content=response_data)])
 
-    def test_w_retry(self):
-        retry = mock.MagicMock()
-        self._helper(retry=retry)
+    kwargs = _retry_timeout_kw(retry, timeout, http)
 
-    def test_w_timeout(self):
-        timeout = 5.0
-        self._helper(timeout=timeout)
+    response = _request(http, project, method, data, base_url, client_info, **kwargs)
+    assert response == response_data
 
-    def test_failure(self):
-        from google.cloud.exceptions import BadRequest
-        from google.rpc import code_pb2
-        from google.rpc import status_pb2
+    # Check that the mocks were called as expected.
+    expected_url = _build_expected_url(base_url, project, method)
+    expected_headers = {
+        "Content-Type": "application/x-protobuf",
+        "User-Agent": user_agent,
+        connection_module.CLIENT_INFO_HEADER: user_agent,
+    }
 
-        project = "PROJECT"
-        method = "METHOD"
-        data = "DATA"
-        uri = "http://api-url"
-        user_agent = "USER AGENT"
-        client_info = _make_client_info(user_agent)
+    if retry is not None:
+        retry.assert_called_once_with(http.request)
 
-        error = status_pb2.Status()
-        error.message = "Entity value is indexed."
-        error.code = code_pb2.FAILED_PRECONDITION
+    kwargs.pop("retry", None)
+    http.request.assert_called_once_with(
+        method="POST", url=expected_url, headers=expected_headers, data=data, **kwargs
+    )
 
-        http = _make_requests_session(
-            [_make_response(client.BAD_REQUEST, content=error.SerializeToString())]
-        )
 
-        with self.assertRaises(BadRequest) as exc:
-            self._call_fut(http, project, method, data, uri, client_info)
+def test__request_defaults():
+    _request_helper()
 
-        expected_message = "400 Entity value is indexed."
-        self.assertEqual(str(exc.exception), expected_message)
 
+def test__request_w_retry():
+    retry = mock.MagicMock()
+    _request_helper(retry=retry)
 
-class Test__rpc(unittest.TestCase):
-    @staticmethod
-    def _call_fut(*args, **kwargs):
-        from google.cloud.datastore._http import _rpc
 
-        return _rpc(*args, **kwargs)
+def test__request_w_timeout():
+    timeout = 5.0
+    _request_helper(timeout=timeout)
 
-    def _helper(self, retry=None, timeout=None):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        http = object()
-        project = "projectOK"
-        method = "beginTransaction"
-        base_url = "test.invalid"
-        client_info = _make_client_info()
-        request_pb = datastore_pb2.BeginTransactionRequest(project_id=project)
+def test__request_failure():
+    from google.cloud.exceptions import BadRequest
+    from google.cloud.datastore._http import _request
+    from google.rpc import code_pb2
+    from google.rpc import status_pb2
 
-        response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc")
+    project = "PROJECT"
+    method = "METHOD"
+    data = "DATA"
+    uri = "http://api-url"
+    user_agent = "USER AGENT"
+    client_info = _make_client_info(user_agent)
 
-        kwargs = _make_retry_timeout_kwargs(retry, timeout)
+    error = status_pb2.Status()
+    error.message = "Entity value is indexed."
+    error.code = code_pb2.FAILED_PRECONDITION
 
-        patch = mock.patch(
-            "google.cloud.datastore._http._request",
-            return_value=response_pb._pb.SerializeToString(),
-        )
-        with patch as mock_request:
-            result = self._call_fut(
-                http,
-                project,
-                method,
-                base_url,
-                client_info,
-                request_pb,
-                datastore_pb2.BeginTransactionResponse,
-                **kwargs
-            )
-
-        self.assertEqual(result, response_pb._pb)
-
-        mock_request.assert_called_once_with(
+    session = _make_requests_session(
+        [_make_response(http.client.BAD_REQUEST, content=error.SerializeToString())]
+    )
+
+    with pytest.raises(BadRequest) as exc:
+        _request(session, project, method, data, uri, client_info)
+
+    expected_message = "400 Entity value is indexed."
+    assert exc.match(expected_message)
+
+
+def _rpc_helper(retry=None, timeout=None):
+    from google.cloud.datastore._http import _rpc
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    http = object()
+    project = "projectOK"
+    method = "beginTransaction"
+    base_url = "test.invalid"
+    client_info = _make_client_info()
+    request_pb = datastore_pb2.BeginTransactionRequest(project_id=project)
+
+    response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc")
+
+    kwargs = _retry_timeout_kw(retry, timeout)
+
+    patch = mock.patch(
+        "google.cloud.datastore._http._request",
+        return_value=response_pb._pb.SerializeToString(),
+    )
+    with patch as mock_request:
+        result = _rpc(
             http,
             project,
             method,
-            request_pb._pb.SerializeToString(),
             base_url,
             client_info,
+            request_pb,
+            datastore_pb2.BeginTransactionResponse,
             **kwargs
         )
 
-    def test_defaults(self):
-        self._helper()
+    assert result == response_pb._pb
 
-    def test_w_retry(self):
-        retry = mock.MagicMock()
-        self._helper(retry=retry)
+    mock_request.assert_called_once_with(
+        http,
+        project,
+        method,
+        request_pb._pb.SerializeToString(),
+        base_url,
+        client_info,
+        **kwargs
+    )
 
-    def test_w_timeout(self):
-        timeout = 5.0
-        self._helper(timeout=timeout)
 
+def test__rpc_defaults():
+    _rpc_helper()
 
-class TestHTTPDatastoreAPI(unittest.TestCase):
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore._http import HTTPDatastoreAPI
 
-        return HTTPDatastoreAPI
+def test__rpc_w_retry():
+    retry = mock.MagicMock()
+    _rpc_helper(retry=retry)
 
-    def _make_one(self, *args, **kwargs):
-        return self._get_target_class()(*args, **kwargs)
 
-    @staticmethod
-    def _make_query_pb(kind):
-        from google.cloud.datastore_v1.types import query as query_pb2
+def test__rpc_w_timeout():
+    timeout = 5.0
+    _rpc_helper(timeout=timeout)
 
-        return query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)])
 
-    def test_constructor(self):
-        client = object()
-        ds_api = self._make_one(client)
-        self.assertIs(ds_api.client, client)
+def test_api_ctor():
+    client = object()
+    ds_api = _make_http_datastore_api(client)
+    assert ds_api.client is client
 
-    def _lookup_single_helper(
-        self,
-        read_consistency=None,
-        transaction=None,
-        empty=True,
-        retry=None,
-        timeout=None,
-    ):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
 
-        project = "PROJECT"
-        key_pb = _make_key_pb(project)
+def _lookup_single_helper(
+    read_consistency=None, transaction=None, empty=True, retry=None, timeout=None,
+):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
 
-        options_kw = {}
-        if read_consistency is not None:
-            options_kw["read_consistency"] = read_consistency
-        if transaction is not None:
-            options_kw["transaction"] = transaction
+    project = "PROJECT"
+    key_pb = _make_key_pb(project)
 
-        read_options = datastore_pb2.ReadOptions(**options_kw)
+    options_kw = {}
+    if read_consistency is not None:
+        options_kw["read_consistency"] = read_consistency
+    if transaction is not None:
+        options_kw["transaction"] = transaction
 
-        rsp_pb = datastore_pb2.LookupResponse()
+    read_options = datastore_pb2.ReadOptions(**options_kw)
 
-        if not empty:
-            entity = entity_pb2.Entity()
-            entity.key._pb.CopyFrom(key_pb._pb)
-            rsp_pb._pb.found.add(entity=entity._pb)
+    rsp_pb = datastore_pb2.LookupResponse()
 
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
-        ds_api = self._make_one(client)
-        request = {
-            "project_id": project,
-            "keys": [key_pb],
-            "read_options": read_options,
-        }
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
+    if not empty:
+        entity = entity_pb2.Entity()
+        entity.key._pb.CopyFrom(key_pb._pb)
+        rsp_pb._pb.found.add(entity=entity._pb)
 
-        response = ds_api.lookup(request=request, **kwargs)
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
+    ds_api = _make_http_datastore_api(client)
+    request = {
+        "project_id": project,
+        "keys": [key_pb],
+        "read_options": read_options,
+    }
+    kwargs = _retry_timeout_kw(retry, timeout, http)
 
-        self.assertEqual(response, rsp_pb._pb)
+    response = ds_api.lookup(request=request, **kwargs)
 
-        if empty:
-            self.assertEqual(len(response.found), 0)
-        else:
-            self.assertEqual(len(response.found), 1)
+    response == rsp_pb._pb
 
-        self.assertEqual(len(response.missing), 0)
-        self.assertEqual(len(response.deferred), 0)
+    if empty:
+        assert len(response.found) == 0
+    else:
+        assert len(response.found) == 1
 
-        uri = _build_expected_url(client._base_url, project, "lookup")
-        request = _verify_protobuf_call(
-            http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout,
-        )
+    assert len(response.missing) == 0
+    assert len(response.deferred) == 0
 
-        if retry is not None:
-            retry.assert_called_once_with(http.request)
-
-        self.assertEqual(list(request.keys), [key_pb._pb])
-        self.assertEqual(request.read_options, read_options._pb)
-
-    def test_lookup_single_key_miss(self):
-        self._lookup_single_helper()
-
-    def test_lookup_single_key_miss_w_read_consistency(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
-        self._lookup_single_helper(read_consistency=read_consistency)
-
-    def test_lookup_single_key_miss_w_transaction(self):
-        transaction = b"TRANSACTION"
-        self._lookup_single_helper(transaction=transaction)
-
-    def test_lookup_single_key_hit(self):
-        self._lookup_single_helper(empty=False)
-
-    def test_lookup_single_key_hit_w_retry(self):
-        retry = mock.MagicMock()
-        self._lookup_single_helper(empty=False, retry=retry)
-
-    def test_lookup_single_key_hit_w_timeout(self):
-        timeout = 5.0
-        self._lookup_single_helper(empty=False, timeout=timeout)
-
-    def _lookup_multiple_helper(
-        self, found=0, missing=0, deferred=0, retry=None, timeout=None,
-    ):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        project = "PROJECT"
-        key_pb1 = _make_key_pb(project)
-        key_pb2 = _make_key_pb(project, id_=2345)
-        keys = [key_pb1, key_pb2]
-        read_options = datastore_pb2.ReadOptions()
-
-        rsp_pb = datastore_pb2.LookupResponse()
-
-        found_keys = []
-        for i_found in range(found):
-            key = keys[i_found]
-            found_keys.append(key._pb)
-            entity = entity_pb2.Entity()
-            entity.key._pb.CopyFrom(key._pb)
-            rsp_pb._pb.found.add(entity=entity._pb)
-
-        missing_keys = []
-        for i_missing in range(missing):
-            key = keys[i_missing]
-            missing_keys.append(key._pb)
-            entity = entity_pb2.Entity()
-            entity.key._pb.CopyFrom(key._pb)
-            rsp_pb._pb.missing.add(entity=entity._pb)
-
-        deferred_keys = []
-        for i_deferred in range(deferred):
-            key = keys[i_deferred]
-            deferred_keys.append(key._pb)
-            rsp_pb._pb.deferred.append(key._pb)
-
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
-        ds_api = self._make_one(client)
-        request = {
-            "project_id": project,
-            "keys": keys,
-            "read_options": read_options,
-        }
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
+    uri = _build_expected_url(client._base_url, project, "lookup")
+    request = _verify_protobuf_call(
+        http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout,
+    )
+
+    if retry is not None:
+        retry.assert_called_once_with(http.request)
 
-        response = ds_api.lookup(request=request, **kwargs)
+    assert list(request.keys) == [key_pb._pb]
+    assert request.read_options == read_options._pb
 
-        self.assertEqual(response, rsp_pb._pb)
 
-        self.assertEqual([found.entity.key for found in response.found], found_keys)
-        self.assertEqual(
-            [missing.entity.key for missing in response.missing], missing_keys
-        )
-        self.assertEqual(list(response.deferred), deferred_keys)
+def test_api_lookup_single_key_miss():
+    _lookup_single_helper()
 
-        uri = _build_expected_url(client._base_url, project, "lookup")
-        request = _verify_protobuf_call(
-            http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout,
-        )
-        self.assertEqual(list(request.keys), [key_pb1._pb, key_pb2._pb])
-        self.assertEqual(request.read_options, read_options._pb)
-
-    def test_lookup_multiple_keys_w_empty_response(self):
-        self._lookup_multiple_helper()
-
-    def test_lookup_multiple_keys_w_retry(self):
-        retry = mock.MagicMock()
-        self._lookup_multiple_helper(retry=retry)
-
-    def test_lookup_multiple_keys_w_timeout(self):
-        timeout = 5.0
-        self._lookup_multiple_helper(timeout=timeout)
-
-    def test_lookup_multiple_keys_w_found(self):
-        self._lookup_multiple_helper(found=2)
-
-    def test_lookup_multiple_keys_w_missing(self):
-        self._lookup_multiple_helper(missing=2)
-
-    def test_lookup_multiple_keys_w_deferred(self):
-        self._lookup_multiple_helper(deferred=2)
-
-    def _run_query_helper(
-        self,
-        read_consistency=None,
-        transaction=None,
-        namespace=None,
-        found=0,
-        retry=None,
-        timeout=None,
-    ):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore_v1.types import query as query_pb2
-
-        project = "PROJECT"
-        kind = "Nonesuch"
-        query_pb = self._make_query_pb(kind)
-
-        partition_kw = {"project_id": project}
-        if namespace is not None:
-            partition_kw["namespace_id"] = namespace
-
-        partition_id = entity_pb2.PartitionId(**partition_kw)
-
-        options_kw = {}
-        if read_consistency is not None:
-            options_kw["read_consistency"] = read_consistency
-        if transaction is not None:
-            options_kw["transaction"] = transaction
-        read_options = datastore_pb2.ReadOptions(**options_kw)
-
-        cursor = b"\x00"
-        batch_kw = {
-            "entity_result_type": query_pb2.EntityResult.ResultType.FULL,
-            "end_cursor": cursor,
-            "more_results": query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS,
-        }
-        if found:
-            batch_kw["entity_results"] = [
-                query_pb2.EntityResult(entity=entity_pb2.Entity())
-            ] * found
-        rsp_pb = datastore_pb2.RunQueryResponse(
-            batch=query_pb2.QueryResultBatch(**batch_kw)
-        )
 
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
-        ds_api = self._make_one(client)
-        request = {
-            "project_id": project,
-            "partition_id": partition_id,
-            "read_options": read_options,
-            "query": query_pb,
-        }
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
-
-        response = ds_api.run_query(request=request, **kwargs)
-
-        self.assertEqual(response, rsp_pb._pb)
-
-        uri = _build_expected_url(client._base_url, project, "runQuery")
-        request = _verify_protobuf_call(
-            http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout,
-        )
-        self.assertEqual(request.partition_id, partition_id._pb)
-        self.assertEqual(request.query, query_pb._pb)
-        self.assertEqual(request.read_options, read_options._pb)
+def test_api_lookup_single_key_miss_w_read_consistency():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-    def test_run_query_simple(self):
-        self._run_query_helper()
+    read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
+    _lookup_single_helper(read_consistency=read_consistency)
 
-    def test_run_query_w_retry(self):
-        retry = mock.MagicMock()
-        self._run_query_helper(retry=retry)
 
-    def test_run_query_w_timeout(self):
-        timeout = 5.0
-        self._run_query_helper(timeout=timeout)
+def test_api_lookup_single_key_miss_w_transaction():
+    transaction = b"TRANSACTION"
+    _lookup_single_helper(transaction=transaction)
 
-    def test_run_query_w_read_consistency(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
-        self._run_query_helper(read_consistency=read_consistency)
+def test_api_lookup_single_key_hit():
+    _lookup_single_helper(empty=False)
 
-    def test_run_query_w_transaction(self):
-        transaction = b"TRANSACTION"
-        self._run_query_helper(transaction=transaction)
 
-    def test_run_query_w_namespace_nonempty_result(self):
-        namespace = "NS"
-        self._run_query_helper(namespace=namespace, found=1)
+def test_api_lookup_single_key_hit_w_retry():
+    retry = mock.MagicMock()
+    _lookup_single_helper(empty=False, retry=retry)
 
-    def _begin_transaction_helper(self, options=None, retry=None, timeout=None):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        project = "PROJECT"
-        transaction = b"TRANSACTION"
-        rsp_pb = datastore_pb2.BeginTransactionResponse()
-        rsp_pb.transaction = transaction
+def test_api_lookup_single_key_hit_w_timeout():
+    timeout = 5.0
+    _lookup_single_helper(empty=False, timeout=timeout)
 
-        # Create mock HTTP and client with response.
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
 
-        # Make request.
-        ds_api = self._make_one(client)
-        request = {"project_id": project}
+def _lookup_multiple_helper(
+    found=0, missing=0, deferred=0, retry=None, timeout=None,
+):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
 
-        if options is not None:
-            request["transaction_options"] = options
+    project = "PROJECT"
+    key_pb1 = _make_key_pb(project)
+    key_pb2 = _make_key_pb(project, id_=2345)
+    keys = [key_pb1, key_pb2]
+    read_options = datastore_pb2.ReadOptions()
 
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
+    rsp_pb = datastore_pb2.LookupResponse()
 
-        response = ds_api.begin_transaction(request=request, **kwargs)
+    found_keys = []
+    for i_found in range(found):
+        key = keys[i_found]
+        found_keys.append(key._pb)
+        entity = entity_pb2.Entity()
+        entity.key._pb.CopyFrom(key._pb)
+        rsp_pb._pb.found.add(entity=entity._pb)
 
-        # Check the result and verify the callers.
-        self.assertEqual(response, rsp_pb._pb)
+    missing_keys = []
+    for i_missing in range(missing):
+        key = keys[i_missing]
+        missing_keys.append(key._pb)
+        entity = entity_pb2.Entity()
+        entity.key._pb.CopyFrom(key._pb)
+        rsp_pb._pb.missing.add(entity=entity._pb)
 
-        uri = _build_expected_url(client._base_url, project, "beginTransaction")
-        request = _verify_protobuf_call(
-            http,
-            uri,
-            datastore_pb2.BeginTransactionRequest(),
-            retry=retry,
-            timeout=timeout,
-        )
+    deferred_keys = []
+    for i_deferred in range(deferred):
+        key = keys[i_deferred]
+        deferred_keys.append(key._pb)
+        rsp_pb._pb.deferred.append(key._pb)
 
-    def test_begin_transaction_wo_options(self):
-        self._begin_transaction_helper()
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
+    ds_api = _make_http_datastore_api(client)
+    request = {
+        "project_id": project,
+        "keys": keys,
+        "read_options": read_options,
+    }
+    kwargs = _retry_timeout_kw(retry, timeout, http)
 
-    def test_begin_transaction_w_options(self):
-        from google.cloud.datastore_v1.types import TransactionOptions
+    response = ds_api.lookup(request=request, **kwargs)
 
-        read_only = TransactionOptions.ReadOnly._meta.pb()
-        options = TransactionOptions(read_only=read_only)
-        self._begin_transaction_helper(options=options)
+    assert response == rsp_pb._pb
 
-    def test_begin_transaction_w_retry(self):
-        retry = mock.MagicMock()
-        self._begin_transaction_helper(retry=retry)
+    assert [found.entity.key for found in response.found] == found_keys
+    assert [missing.entity.key for missing in response.missing] == missing_keys
+    assert list(response.deferred) == deferred_keys
 
-    def test_begin_transaction_w_timeout(self):
-        timeout = 5.0
-        self._begin_transaction_helper(timeout=timeout)
+    uri = _build_expected_url(client._base_url, project, "lookup")
+    request = _verify_protobuf_call(
+        http, uri, datastore_pb2.LookupRequest(), retry=retry, timeout=timeout,
+    )
+    assert list(request.keys) == [key_pb1._pb, key_pb2._pb]
+    assert request.read_options == read_options._pb
 
-    def _commit_helper(self, transaction=None, retry=None, timeout=None):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore.helpers import _new_value_pb
 
-        project = "PROJECT"
-        key_pb = _make_key_pb(project)
-        rsp_pb = datastore_pb2.CommitResponse()
-        req_pb = datastore_pb2.CommitRequest()
-        mutation = req_pb._pb.mutations.add()
-        insert = mutation.upsert
-        insert.key.CopyFrom(key_pb._pb)
-        value_pb = _new_value_pb(insert, "foo")
-        value_pb.string_value = u"Foo"
+def test_api_lookup_multiple_keys_w_empty_response():
+    _lookup_multiple_helper()
 
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
 
-        rq_class = datastore_pb2.CommitRequest
-        ds_api = self._make_one(client)
+def test_api_lookup_multiple_keys_w_retry():
+    retry = mock.MagicMock()
+    _lookup_multiple_helper(retry=retry)
 
-        request = {"project_id": project, "mutations": [mutation]}
 
-        if transaction is not None:
-            request["transaction"] = transaction
-            mode = request["mode"] = rq_class.Mode.TRANSACTIONAL
-        else:
-            mode = request["mode"] = rq_class.Mode.NON_TRANSACTIONAL
+def test_api_lookup_multiple_keys_w_timeout():
+    timeout = 5.0
+    _lookup_multiple_helper(timeout=timeout)
 
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
 
-        result = ds_api.commit(request=request, **kwargs)
+def test_api_lookup_multiple_keys_w_found():
+    _lookup_multiple_helper(found=2)
 
-        self.assertEqual(result, rsp_pb._pb)
 
-        uri = _build_expected_url(client._base_url, project, "commit")
-        request = _verify_protobuf_call(
-            http, uri, rq_class(), retry=retry, timeout=timeout,
-        )
-        self.assertEqual(list(request.mutations), [mutation])
-        self.assertEqual(request.mode, mode)
+def test_api_lookup_multiple_keys_w_missing():
+    _lookup_multiple_helper(missing=2)
 
-        if transaction is not None:
-            self.assertEqual(request.transaction, transaction)
-        else:
-            self.assertEqual(request.transaction, b"")
 
-    def test_commit_wo_transaction(self):
-        self._commit_helper()
+def test_api_lookup_multiple_keys_w_deferred():
+    _lookup_multiple_helper(deferred=2)
 
-    def test_commit_w_transaction(self):
-        transaction = b"xact"
 
-        self._commit_helper(transaction=transaction)
+def _run_query_helper(
+    read_consistency=None,
+    transaction=None,
+    namespace=None,
+    found=0,
+    retry=None,
+    timeout=None,
+):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore_v1.types import query as query_pb2
 
-    def test_commit_w_retry(self):
-        retry = mock.MagicMock()
-        self._commit_helper(retry=retry)
+    project = "PROJECT"
+    kind = "Nonesuch"
+    query_pb = query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)])
 
-    def test_commit_w_timeout(self):
-        timeout = 5.0
-        self._commit_helper(timeout=timeout)
+    partition_kw = {"project_id": project}
+    if namespace is not None:
+        partition_kw["namespace_id"] = namespace
 
-    def _rollback_helper(self, retry=None, timeout=None):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    partition_id = entity_pb2.PartitionId(**partition_kw)
 
-        project = "PROJECT"
-        transaction = b"xact"
-        rsp_pb = datastore_pb2.RollbackResponse()
+    options_kw = {}
+    if read_consistency is not None:
+        options_kw["read_consistency"] = read_consistency
+    if transaction is not None:
+        options_kw["transaction"] = transaction
+    read_options = datastore_pb2.ReadOptions(**options_kw)
 
-        # Create mock HTTP and client with response.
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
+    cursor = b"\x00"
+    batch_kw = {
+        "entity_result_type": query_pb2.EntityResult.ResultType.FULL,
+        "end_cursor": cursor,
+        "more_results": query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS,
+    }
+    if found:
+        batch_kw["entity_results"] = [
+            query_pb2.EntityResult(entity=entity_pb2.Entity())
+        ] * found
+    rsp_pb = datastore_pb2.RunQueryResponse(
+        batch=query_pb2.QueryResultBatch(**batch_kw)
+    )
+
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
+    ds_api = _make_http_datastore_api(client)
+    request = {
+        "project_id": project,
+        "partition_id": partition_id,
+        "read_options": read_options,
+        "query": query_pb,
+    }
+    kwargs = _retry_timeout_kw(retry, timeout, http)
 
-        # Make request.
-        ds_api = self._make_one(client)
-        request = {"project_id": project, "transaction": transaction}
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
+    response = ds_api.run_query(request=request, **kwargs)
 
-        response = ds_api.rollback(request=request, **kwargs)
+    assert response == rsp_pb._pb
 
-        # Check the result and verify the callers.
-        self.assertEqual(response, rsp_pb._pb)
+    uri = _build_expected_url(client._base_url, project, "runQuery")
+    request = _verify_protobuf_call(
+        http, uri, datastore_pb2.RunQueryRequest(), retry=retry, timeout=timeout,
+    )
+    assert request.partition_id == partition_id._pb
+    assert request.query == query_pb._pb
+    assert request.read_options == read_options._pb
 
-        uri = _build_expected_url(client._base_url, project, "rollback")
-        request = _verify_protobuf_call(
-            http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout,
-        )
-        self.assertEqual(request.transaction, transaction)
 
-    def test_rollback_ok(self):
-        self._rollback_helper()
+def test_api_run_query_simple():
+    _run_query_helper()
 
-    def test_rollback_w_retry(self):
-        retry = mock.MagicMock()
-        self._rollback_helper(retry=retry)
 
-    def test_rollback_w_timeout(self):
-        timeout = 5.0
-        self._rollback_helper(timeout=timeout)
+def test_api_run_query_w_retry():
+    retry = mock.MagicMock()
+    _run_query_helper(retry=retry)
 
-    def _allocate_ids_helper(self, count=0, retry=None, timeout=None):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        project = "PROJECT"
-        before_key_pbs = []
-        after_key_pbs = []
-        rsp_pb = datastore_pb2.AllocateIdsResponse()
+def test_api_run_query_w_timeout():
+    timeout = 5.0
+    _run_query_helper(timeout=timeout)
 
-        for i_count in range(count):
-            requested = _make_key_pb(project, id_=None)
-            before_key_pbs.append(requested)
-            allocated = _make_key_pb(project, id_=i_count)
-            after_key_pbs.append(allocated)
-            rsp_pb._pb.keys.add().CopyFrom(allocated._pb)
 
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
-        ds_api = self._make_one(client)
+def test_api_run_query_w_read_consistency():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        request = {"project_id": project, "keys": before_key_pbs}
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
+    read_consistency = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
+    _run_query_helper(read_consistency=read_consistency)
 
-        response = ds_api.allocate_ids(request=request, **kwargs)
 
-        self.assertEqual(response, rsp_pb._pb)
-        self.assertEqual(list(response.keys), [i._pb for i in after_key_pbs])
+def test_api_run_query_w_transaction():
+    transaction = b"TRANSACTION"
+    _run_query_helper(transaction=transaction)
 
-        uri = _build_expected_url(client._base_url, project, "allocateIds")
-        request = _verify_protobuf_call(
-            http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout,
-        )
-        self.assertEqual(len(request.keys), len(before_key_pbs))
-        for key_before, key_after in zip(before_key_pbs, request.keys):
-            self.assertEqual(key_before, key_after)
 
-    def test_allocate_ids_empty(self):
-        self._allocate_ids_helper()
+def test_api_run_query_w_namespace_nonempty_result():
+    namespace = "NS"
+    _run_query_helper(namespace=namespace, found=1)
 
-    def test_allocate_ids_non_empty(self):
-        self._allocate_ids_helper(count=2)
 
-    def test_allocate_ids_w_retry(self):
-        retry = mock.MagicMock()
-        self._allocate_ids_helper(retry=retry)
+def _begin_transaction_helper(options=None, retry=None, timeout=None):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-    def test_allocate_ids_w_timeout(self):
-        timeout = 5.0
-        self._allocate_ids_helper(timeout=timeout)
+    project = "PROJECT"
+    transaction = b"TRANSACTION"
+    rsp_pb = datastore_pb2.BeginTransactionResponse()
+    rsp_pb.transaction = transaction
 
-    def _reserve_ids_helper(self, count=0, retry=None, timeout=None):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    # Create mock HTTP and client with response.
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
 
-        project = "PROJECT"
-        before_key_pbs = []
-        rsp_pb = datastore_pb2.ReserveIdsResponse()
+    # Make request.
+    ds_api = _make_http_datastore_api(client)
+    request = {"project_id": project}
 
-        for i_count in range(count):
-            requested = _make_key_pb(project, id_=i_count)
-            before_key_pbs.append(requested)
+    if options is not None:
+        request["transaction_options"] = options
 
-        http = _make_requests_session(
-            [_make_response(content=rsp_pb._pb.SerializeToString())]
-        )
-        client_info = _make_client_info()
-        client = mock.Mock(
-            _http=http,
-            _base_url="test.invalid",
-            _client_info=client_info,
-            spec=["_http", "_base_url", "_client_info"],
-        )
-        ds_api = self._make_one(client)
+    kwargs = _retry_timeout_kw(retry, timeout, http)
 
-        request = {"project_id": project, "keys": before_key_pbs}
-        kwargs = _make_retry_timeout_kwargs(retry, timeout, http)
+    response = ds_api.begin_transaction(request=request, **kwargs)
 
-        response = ds_api.reserve_ids(request=request, **kwargs)
+    # Check the result and verify the callers.
+    assert response == rsp_pb._pb
 
-        self.assertEqual(response, rsp_pb._pb)
+    uri = _build_expected_url(client._base_url, project, "beginTransaction")
+    request = _verify_protobuf_call(
+        http,
+        uri,
+        datastore_pb2.BeginTransactionRequest(),
+        retry=retry,
+        timeout=timeout,
+    )
 
-        uri = _build_expected_url(client._base_url, project, "reserveIds")
-        request = _verify_protobuf_call(
-            http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout,
-        )
-        self.assertEqual(len(request.keys), len(before_key_pbs))
-        for key_before, key_after in zip(before_key_pbs, request.keys):
-            self.assertEqual(key_before, key_after)
 
-    def test_reserve_ids_empty(self):
-        self._reserve_ids_helper()
+def test_api_begin_transaction_wo_options():
+    _begin_transaction_helper()
+
+
+def test_api_begin_transaction_w_options():
+    from google.cloud.datastore_v1.types import TransactionOptions
+
+    read_only = TransactionOptions.ReadOnly._meta.pb()
+    options = TransactionOptions(read_only=read_only)
+    _begin_transaction_helper(options=options)
+
+
+def test_api_begin_transaction_w_retry():
+    retry = mock.MagicMock()
+    _begin_transaction_helper(retry=retry)
+
+
+def test_api_begin_transaction_w_timeout():
+    timeout = 5.0
+    _begin_transaction_helper(timeout=timeout)
+
+
+def _commit_helper(transaction=None, retry=None, timeout=None):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
+
+    project = "PROJECT"
+    key_pb = _make_key_pb(project)
+    rsp_pb = datastore_pb2.CommitResponse()
+    req_pb = datastore_pb2.CommitRequest()
+    mutation = req_pb._pb.mutations.add()
+    insert = mutation.upsert
+    insert.key.CopyFrom(key_pb._pb)
+    value_pb = _new_value_pb(insert, "foo")
+    value_pb.string_value = u"Foo"
 
-    def test_reserve_ids_non_empty(self):
-        self._reserve_ids_helper(count=2)
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
 
-    def test_reserve_ids_w_retry(self):
-        retry = mock.MagicMock()
-        self._reserve_ids_helper(retry=retry)
+    rq_class = datastore_pb2.CommitRequest
+    ds_api = _make_http_datastore_api(client)
 
-    def test_reserve_ids_w_timeout(self):
-        timeout = 5.0
-        self._reserve_ids_helper(timeout=timeout)
+    request = {"project_id": project, "mutations": [mutation]}
 
+    if transaction is not None:
+        request["transaction"] = transaction
+        mode = request["mode"] = rq_class.Mode.TRANSACTIONAL
+    else:
+        mode = request["mode"] = rq_class.Mode.NON_TRANSACTIONAL
+
+    kwargs = _retry_timeout_kw(retry, timeout, http)
+
+    result = ds_api.commit(request=request, **kwargs)
+
+    assert result == rsp_pb._pb
+
+    uri = _build_expected_url(client._base_url, project, "commit")
+    request = _verify_protobuf_call(
+        http, uri, rq_class(), retry=retry, timeout=timeout,
+    )
+    assert list(request.mutations) == [mutation]
+    assert request.mode == mode
+
+    if transaction is not None:
+        assert request.transaction == transaction
+    else:
+        assert request.transaction == b""
+
+
+def test_api_commit_wo_transaction():
+    _commit_helper()
+
+
+def test_api_commit_w_transaction():
+    transaction = b"xact"
+
+    _commit_helper(transaction=transaction)
+
+
+def test_api_commit_w_retry():
+    retry = mock.MagicMock()
+    _commit_helper(retry=retry)
+
+
+def test_api_commit_w_timeout():
+    timeout = 5.0
+    _commit_helper(timeout=timeout)
+
+
+def _rollback_helper(retry=None, timeout=None):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    transaction = b"xact"
+    rsp_pb = datastore_pb2.RollbackResponse()
+
+    # Create mock HTTP and client with response.
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
+
+    # Make request.
+    ds_api = _make_http_datastore_api(client)
+    request = {"project_id": project, "transaction": transaction}
+    kwargs = _retry_timeout_kw(retry, timeout, http)
+
+    response = ds_api.rollback(request=request, **kwargs)
+
+    # Check the result and verify the callers.
+    assert response == rsp_pb._pb
+
+    uri = _build_expected_url(client._base_url, project, "rollback")
+    request = _verify_protobuf_call(
+        http, uri, datastore_pb2.RollbackRequest(), retry=retry, timeout=timeout,
+    )
+    assert request.transaction == transaction
+
+
+def test_api_rollback_ok():
+    _rollback_helper()
+
+
+def test_api_rollback_w_retry():
+    retry = mock.MagicMock()
+    _rollback_helper(retry=retry)
+
+
+def test_api_rollback_w_timeout():
+    timeout = 5.0
+    _rollback_helper(timeout=timeout)
+
+
+def _allocate_ids_helper(count=0, retry=None, timeout=None):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    before_key_pbs = []
+    after_key_pbs = []
+    rsp_pb = datastore_pb2.AllocateIdsResponse()
+
+    for i_count in range(count):
+        requested = _make_key_pb(project, id_=None)
+        before_key_pbs.append(requested)
+        allocated = _make_key_pb(project, id_=i_count)
+        after_key_pbs.append(allocated)
+        rsp_pb._pb.keys.add().CopyFrom(allocated._pb)
+
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
+    ds_api = _make_http_datastore_api(client)
+
+    request = {"project_id": project, "keys": before_key_pbs}
+    kwargs = _retry_timeout_kw(retry, timeout, http)
+
+    response = ds_api.allocate_ids(request=request, **kwargs)
 
-def _make_response(status=client.OK, content=b"", headers={}):
+    assert response == rsp_pb._pb
+    assert list(response.keys) == [i._pb for i in after_key_pbs]
+
+    uri = _build_expected_url(client._base_url, project, "allocateIds")
+    request = _verify_protobuf_call(
+        http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout,
+    )
+    assert len(request.keys) == len(before_key_pbs)
+    for key_before, key_after in zip(before_key_pbs, request.keys):
+        assert key_before == key_after
+
+
+def test_api_allocate_ids_empty():
+    _allocate_ids_helper()
+
+
+def test_api_allocate_ids_non_empty():
+    _allocate_ids_helper(count=2)
+
+
+def test_api_allocate_ids_w_retry():
+    retry = mock.MagicMock()
+    _allocate_ids_helper(retry=retry)
+
+
+def test_api_allocate_ids_w_timeout():
+    timeout = 5.0
+    _allocate_ids_helper(timeout=timeout)
+
+
+def _reserve_ids_helper(count=0, retry=None, timeout=None):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    before_key_pbs = []
+    rsp_pb = datastore_pb2.ReserveIdsResponse()
+
+    for i_count in range(count):
+        requested = _make_key_pb(project, id_=i_count)
+        before_key_pbs.append(requested)
+
+    http = _make_requests_session(
+        [_make_response(content=rsp_pb._pb.SerializeToString())]
+    )
+    client_info = _make_client_info()
+    client = mock.Mock(
+        _http=http,
+        _base_url="test.invalid",
+        _client_info=client_info,
+        spec=["_http", "_base_url", "_client_info"],
+    )
+    ds_api = _make_http_datastore_api(client)
+
+    request = {"project_id": project, "keys": before_key_pbs}
+    kwargs = _retry_timeout_kw(retry, timeout, http)
+
+    response = ds_api.reserve_ids(request=request, **kwargs)
+
+    assert response == rsp_pb._pb
+
+    uri = _build_expected_url(client._base_url, project, "reserveIds")
+    request = _verify_protobuf_call(
+        http, uri, datastore_pb2.AllocateIdsRequest(), retry=retry, timeout=timeout,
+    )
+    assert len(request.keys) == len(before_key_pbs)
+    for key_before, key_after in zip(before_key_pbs, request.keys):
+        assert key_before == key_after
+
+
+def test_api_reserve_ids_empty():
+    _reserve_ids_helper()
+
+
+def test_api_reserve_ids_non_empty():
+    _reserve_ids_helper(count=2)
+
+
+def test_api_reserve_ids_w_retry():
+    retry = mock.MagicMock()
+    _reserve_ids_helper(retry=retry)
+
+
+def test_api_reserve_ids_w_timeout():
+    timeout = 5.0
+    _reserve_ids_helper(timeout=timeout)
+
+
+def _make_http_datastore_api(*args, **kwargs):
+    from google.cloud.datastore._http import HTTPDatastoreAPI
+
+    return HTTPDatastoreAPI(*args, **kwargs)
+
+
+def _make_response(status=http.client.OK, content=b"", headers={}):
     response = requests.Response()
     response.status_code = status
     response._content = content
@@ -906,7 +924,7 @@ def _verify_protobuf_call(http, expected_url, pb, retry=None, timeout=None):
     return pb
 
 
-def _make_retry_timeout_kwargs(retry, timeout, http=None):
+def _retry_timeout_kw(retry, timeout, http=None):
     kwargs = {}
 
     if retry is not None:
@@ -918,3 +936,9 @@ def _make_retry_timeout_kwargs(retry, timeout, http=None):
         kwargs["timeout"] = timeout
 
     return kwargs
+
+
+class Foo:
+    def __init__(self, bar=None, baz=None):
+        self.bar = bar
+        self.baz = baz
diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py
index ead00623..fffbefa2 100644
--- a/tests/unit/test_batch.py
+++ b/tests/unit/test_batch.py
@@ -12,469 +12,487 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
 import mock
+import pytest
 
 
-class TestBatch(unittest.TestCase):
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.batch import Batch
-
-        return Batch
-
-    def _make_one(self, client):
-        return self._get_target_class()(client)
-
-    def test_ctor(self):
-        project = "PROJECT"
-        namespace = "NAMESPACE"
-        client = _Client(project, namespace=namespace)
-        batch = self._make_one(client)
-
-        self.assertEqual(batch.project, project)
-        self.assertIs(batch._client, client)
-        self.assertEqual(batch.namespace, namespace)
-        self.assertIsNone(batch._id)
-        self.assertEqual(batch._status, batch._INITIAL)
-        self.assertEqual(batch._mutations, [])
-        self.assertEqual(batch._partial_key_entities, [])
-
-    def test_current(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        client = _Client(project)
-        batch1 = self._make_one(client)
-        batch2 = self._make_one(client)
-        self.assertIsNone(batch1.current())
-        self.assertIsNone(batch2.current())
-        with batch1:
-            self.assertIs(batch1.current(), batch1)
-            self.assertIs(batch2.current(), batch1)
-            with batch2:
-                self.assertIs(batch1.current(), batch2)
-                self.assertIs(batch2.current(), batch2)
-            self.assertIs(batch1.current(), batch1)
-            self.assertIs(batch2.current(), batch1)
-        self.assertIsNone(batch1.current())
-        self.assertIsNone(batch2.current())
-
-        commit_method = client._datastore_api.commit
-        self.assertEqual(commit_method.call_count, 2)
-        mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": None,
-            }
-        )
-
-    def test_put_entity_wo_key(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
+def _make_batch(client):
+    from google.cloud.datastore.batch import Batch
 
-        batch.begin()
-        self.assertRaises(ValueError, batch.put, _Entity())
+    return Batch(client)
 
-    def test_put_entity_wrong_status(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        entity = _Entity()
-        entity.key = _Key("OTHER")
 
-        self.assertEqual(batch._status, batch._INITIAL)
-        self.assertRaises(ValueError, batch.put, entity)
+def test_batch_ctor():
+    project = "PROJECT"
+    namespace = "NAMESPACE"
+    client = _Client(project, namespace=namespace)
+    batch = _make_batch(client)
 
-    def test_put_entity_w_key_wrong_project(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        entity = _Entity()
-        entity.key = _Key("OTHER")
+    assert batch.project == project
+    assert batch._client is client
+    assert batch.namespace == namespace
+    assert batch._id is None
+    assert batch._status == batch._INITIAL
+    assert batch._mutations == []
+    assert batch._partial_key_entities == []
 
-        batch.begin()
-        self.assertRaises(ValueError, batch.put, entity)
 
-    def test_put_entity_w_partial_key(self):
-        project = "PROJECT"
-        properties = {"foo": "bar"}
-        client = _Client(project)
-        batch = self._make_one(client)
-        entity = _Entity(properties)
-        key = entity.key = _Key(project)
-        key._id = None
+def test_batch_current():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        batch.begin()
+    project = "PROJECT"
+    client = _Client(project)
+    batch1 = _make_batch(client)
+    batch2 = _make_batch(client)
+
+    assert batch1.current() is None
+    assert batch2.current() is None
+
+    with batch1:
+        assert batch1.current() is batch1
+        assert batch2.current() is batch1
+
+        with batch2:
+            assert batch1.current() is batch2
+            assert batch2.current() is batch2
+
+        assert batch1.current() is batch1
+        assert batch2.current() is batch1
+
+    assert batch1.current() is None
+    assert batch2.current() is None
+
+    commit_method = client._datastore_api.commit
+    assert commit_method.call_count == 2
+    mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
+    commit_method.assert_called_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": [],
+            "transaction": None,
+        }
+    )
+
+
+def test_batch_put_w_entity_wo_key():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    entity = _Entity()
+
+    batch.begin()
+    with pytest.raises(ValueError):
         batch.put(entity)
 
-        mutated_entity = _mutated_pb(self, batch.mutations, "insert")
-        self.assertEqual(mutated_entity.key, key._key)
-        self.assertEqual(batch._partial_key_entities, [entity])
 
-    def test_put_entity_w_completed_key(self):
-        project = "PROJECT"
-        properties = {"foo": "bar", "baz": "qux", "spam": [1, 2, 3], "frotz": []}
-        client = _Client(project)
-        batch = self._make_one(client)
-        entity = _Entity(properties)
-        entity.exclude_from_indexes = ("baz", "spam")
-        key = entity.key = _Key(project)
+def test_batch_put_w_wrong_status():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    entity = _Entity()
+    entity.key = _Key(project=project)
 
-        batch.begin()
+    assert batch._status == batch._INITIAL
+    with pytest.raises(ValueError):
         batch.put(entity)
 
-        mutated_entity = _mutated_pb(self, batch.mutations, "upsert")
-        self.assertEqual(mutated_entity.key, key._key)
-
-        prop_dict = dict(mutated_entity.properties.items())
-        self.assertEqual(len(prop_dict), 4)
-        self.assertFalse(prop_dict["foo"].exclude_from_indexes)
-        self.assertTrue(prop_dict["baz"].exclude_from_indexes)
-        self.assertFalse(prop_dict["spam"].exclude_from_indexes)
-        spam_values = prop_dict["spam"].array_value.values
-        self.assertTrue(spam_values[0].exclude_from_indexes)
-        self.assertTrue(spam_values[1].exclude_from_indexes)
-        self.assertTrue(spam_values[2].exclude_from_indexes)
-        self.assertTrue("frotz" in prop_dict)
-
-    def test_delete_wrong_status(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        key = _Key(project)
-        key._id = None
-
-        self.assertEqual(batch._status, batch._INITIAL)
-        self.assertRaises(ValueError, batch.delete, key)
-
-    def test_delete_w_partial_key(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        key = _Key(project)
-        key._id = None
 
-        batch.begin()
-        self.assertRaises(ValueError, batch.delete, key)
+def test_batch_put_w_key_wrong_project():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    entity = _Entity()
+    entity.key = _Key(project="OTHER")
 
-    def test_delete_w_key_wrong_project(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        key = _Key("OTHER")
+    batch.begin()
+    with pytest.raises(ValueError):
+        batch.put(entity)
 
-        batch.begin()
-        self.assertRaises(ValueError, batch.delete, key)
 
-    def test_delete_w_completed_key(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        key = _Key(project)
+def test_batch_put_w_entity_w_partial_key():
+    project = "PROJECT"
+    properties = {"foo": "bar"}
+    client = _Client(project)
+    batch = _make_batch(client)
+    entity = _Entity(properties)
+    key = entity.key = _Key(project)
+    key._id = None
 
-        batch.begin()
+    batch.begin()
+    batch.put(entity)
+
+    mutated_entity = _mutated_pb(batch.mutations, "insert")
+    assert mutated_entity.key == key._key
+    assert batch._partial_key_entities == [entity]
+
+
+def test_batch_put_w_entity_w_completed_key():
+    project = "PROJECT"
+    properties = {"foo": "bar", "baz": "qux", "spam": [1, 2, 3], "frotz": []}
+    client = _Client(project)
+    batch = _make_batch(client)
+    entity = _Entity(properties)
+    entity.exclude_from_indexes = ("baz", "spam")
+    key = entity.key = _Key(project)
+
+    batch.begin()
+    batch.put(entity)
+
+    mutated_entity = _mutated_pb(batch.mutations, "upsert")
+    assert mutated_entity.key == key._key
+
+    prop_dict = dict(mutated_entity.properties.items())
+    assert len(prop_dict) == 4
+    assert not prop_dict["foo"].exclude_from_indexes
+    assert prop_dict["baz"].exclude_from_indexes
+    assert not prop_dict["spam"].exclude_from_indexes
+
+    spam_values = prop_dict["spam"].array_value.values
+    assert spam_values[0].exclude_from_indexes
+    assert spam_values[1].exclude_from_indexes
+    assert spam_values[2].exclude_from_indexes
+    assert "frotz" in prop_dict
+
+
+def test_batch_delete_w_wrong_status():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    key = _Key(project=project)
+    key._id = None
+
+    assert batch._status == batch._INITIAL
+
+    with pytest.raises(ValueError):
         batch.delete(key)
 
-        mutated_key = _mutated_pb(self, batch.mutations, "delete")
-        self.assertEqual(mutated_key, key._key)
 
-    def test_begin(self):
-        project = "PROJECT"
-        client = _Client(project, None)
-        batch = self._make_one(client)
-        self.assertEqual(batch._status, batch._INITIAL)
-        batch.begin()
-        self.assertEqual(batch._status, batch._IN_PROGRESS)
-
-    def test_begin_fail(self):
-        project = "PROJECT"
-        client = _Client(project, None)
-        batch = self._make_one(client)
-        batch._status = batch._IN_PROGRESS
-        with self.assertRaises(ValueError):
-            batch.begin()
-
-    def test_rollback(self):
-        project = "PROJECT"
-        client = _Client(project, None)
-        batch = self._make_one(client)
+def test_batch_delete_w_partial_key():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    key = _Key(project=project)
+    key._id = None
+
+    batch.begin()
+
+    with pytest.raises(ValueError):
+        batch.delete(key)
+
+
+def test_batch_delete_w_key_wrong_project():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    key = _Key(project="OTHER")
+
+    batch.begin()
+
+    with pytest.raises(ValueError):
+        batch.delete(key)
+
+
+def test_batch_delete_w_completed_key():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    key = _Key(project)
+
+    batch.begin()
+    batch.delete(key)
+
+    mutated_key = _mutated_pb(batch.mutations, "delete")
+    assert mutated_key == key._key
+
+
+def test_batch_begin_w_wrong_status():
+    project = "PROJECT"
+    client = _Client(project, None)
+    batch = _make_batch(client)
+    batch._status = batch._IN_PROGRESS
+
+    with pytest.raises(ValueError):
         batch.begin()
-        self.assertEqual(batch._status, batch._IN_PROGRESS)
+
+
+def test_batch_begin():
+    project = "PROJECT"
+    client = _Client(project, None)
+    batch = _make_batch(client)
+    assert batch._status == batch._INITIAL
+
+    batch.begin()
+
+    assert batch._status == batch._IN_PROGRESS
+
+
+def test_batch_rollback_w_wrong_status():
+    project = "PROJECT"
+    client = _Client(project, None)
+    batch = _make_batch(client)
+    assert batch._status == batch._INITIAL
+
+    with pytest.raises(ValueError):
         batch.rollback()
-        self.assertEqual(batch._status, batch._ABORTED)
 
-    def test_rollback_wrong_status(self):
-        project = "PROJECT"
-        client = _Client(project, None)
-        batch = self._make_one(client)
 
-        self.assertEqual(batch._status, batch._INITIAL)
-        self.assertRaises(ValueError, batch.rollback)
+def test_batch_rollback():
+    project = "PROJECT"
+    client = _Client(project, None)
+    batch = _make_batch(client)
+    batch.begin()
+    assert batch._status == batch._IN_PROGRESS
 
-    def test_commit(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    batch.rollback()
 
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
+    assert batch._status == batch._ABORTED
 
-        self.assertEqual(batch._status, batch._INITIAL)
-        batch.begin()
-        self.assertEqual(batch._status, batch._IN_PROGRESS)
-        batch.commit()
-        self.assertEqual(batch._status, batch._FINISHED)
-
-        commit_method = client._datastore_api.commit
-        mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": None,
-            }
-        )
-
-    def test_commit_w_timeout(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        timeout = 100000
-
-        self.assertEqual(batch._status, batch._INITIAL)
-        batch.begin()
-        self.assertEqual(batch._status, batch._IN_PROGRESS)
-        batch.commit(timeout=timeout)
-        self.assertEqual(batch._status, batch._FINISHED)
-
-        commit_method = client._datastore_api.commit
-        mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": None,
-            },
-            timeout=timeout,
-        )
-
-    def test_commit_w_retry(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-        retry = mock.Mock()
-
-        self.assertEqual(batch._status, batch._INITIAL)
-        batch.begin()
-        self.assertEqual(batch._status, batch._IN_PROGRESS)
-        batch.commit(retry=retry)
-        self.assertEqual(batch._status, batch._FINISHED)
-
-        commit_method = client._datastore_api.commit
-        mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": None,
-            },
-            retry=retry,
-        )
-
-    def test_commit_wrong_status(self):
-        project = "PROJECT"
-        client = _Client(project)
-        batch = self._make_one(client)
-
-        self.assertEqual(batch._status, batch._INITIAL)
-        self.assertRaises(ValueError, batch.commit)
-
-    def test_commit_w_partial_key_entities(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        new_id = 1234
-        ds_api = _make_datastore_api(new_id)
-        client = _Client(project, datastore_api=ds_api)
-        batch = self._make_one(client)
-        entity = _Entity({})
-        key = entity.key = _Key(project)
-        key._id = None
-        batch._partial_key_entities.append(entity)
-
-        self.assertEqual(batch._status, batch._INITIAL)
-        batch.begin()
-        self.assertEqual(batch._status, batch._IN_PROGRESS)
+
+def test_batch_commit_wrong_status():
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    assert batch._status == batch._INITIAL
+
+    with pytest.raises(ValueError):
         batch.commit()
-        self.assertEqual(batch._status, batch._FINISHED)
-
-        mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
-        ds_api.commit.assert_called_once_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": None,
-            }
-        )
-        self.assertFalse(entity.key.is_partial)
-        self.assertEqual(entity.key._id, new_id)
-
-    def test_as_context_mgr_wo_error(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        properties = {"foo": "bar"}
-        entity = _Entity(properties)
-        key = entity.key = _Key(project)
-
-        client = _Client(project)
-        self.assertEqual(list(client._batches), [])
-
-        with self._make_one(client) as batch:
-            self.assertEqual(list(client._batches), [batch])
+
+
+def _batch_commit_helper(timeout=None, retry=None):
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    client = _Client(project)
+    batch = _make_batch(client)
+    assert batch._status == batch._INITIAL
+
+    batch.begin()
+    assert batch._status == batch._IN_PROGRESS
+
+    kwargs = {}
+
+    if timeout is not None:
+        kwargs["timeout"] = timeout
+
+    if retry is not None:
+        kwargs["retry"] = retry
+
+    batch.commit(**kwargs)
+    assert batch._status == batch._FINISHED
+
+    commit_method = client._datastore_api.commit
+    mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
+    commit_method.assert_called_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": [],
+            "transaction": None,
+        },
+        **kwargs
+    )
+
+
+def test_batch_commit():
+    _batch_commit_helper()
+
+
+def test_batch_commit_w_timeout():
+    timeout = 100000
+    _batch_commit_helper(timeout=timeout)
+
+
+def test_batch_commit_w_retry():
+    retry = mock.Mock(spec=[])
+    _batch_commit_helper(retry=retry)
+
+
+def test_batch_commit_w_partial_key_entity():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    new_id = 1234
+    ds_api = _make_datastore_api(new_id)
+    client = _Client(project, datastore_api=ds_api)
+    batch = _make_batch(client)
+    entity = _Entity({})
+    key = entity.key = _Key(project)
+    key._id = None
+    batch._partial_key_entities.append(entity)
+    assert batch._status == batch._INITIAL
+
+    batch.begin()
+    assert batch._status == batch._IN_PROGRESS
+
+    batch.commit()
+    assert batch._status == batch._FINISHED
+
+    mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
+    ds_api.commit.assert_called_once_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": [],
+            "transaction": None,
+        }
+    )
+    assert not entity.key.is_partial
+    assert entity.key._id == new_id
+
+
+def test_batch_as_context_mgr_wo_error():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    properties = {"foo": "bar"}
+    entity = _Entity(properties)
+    key = entity.key = _Key(project)
+
+    client = _Client(project)
+    assert list(client._batches) == []
+
+    with _make_batch(client) as batch:
+        assert list(client._batches) == [batch]
+        batch.put(entity)
+
+    assert list(client._batches) == []
+
+    mutated_entity = _mutated_pb(batch.mutations, "upsert")
+    assert mutated_entity.key == key._key
+
+    commit_method = client._datastore_api.commit
+    mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
+    commit_method.assert_called_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": batch.mutations,
+            "transaction": None,
+        }
+    )
+
+
+def test_batch_as_context_mgr_nested():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    properties = {"foo": "bar"}
+    entity1 = _Entity(properties)
+    key1 = entity1.key = _Key(project)
+    entity2 = _Entity(properties)
+    key2 = entity2.key = _Key(project)
+
+    client = _Client(project)
+    assert list(client._batches) == []
+
+    with _make_batch(client) as batch1:
+        assert list(client._batches) == [batch1]
+        batch1.put(entity1)
+
+        with _make_batch(client) as batch2:
+            assert list(client._batches) == [batch2, batch1]
+            batch2.put(entity2)
+
+        assert list(client._batches) == [batch1]
+
+    assert list(client._batches) == []
+
+    mutated_entity1 = _mutated_pb(batch1.mutations, "upsert")
+    assert mutated_entity1.key == key1._key
+
+    mutated_entity2 = _mutated_pb(batch2.mutations, "upsert")
+    assert mutated_entity2.key == key2._key
+
+    commit_method = client._datastore_api.commit
+    assert commit_method.call_count == 2
+
+    mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
+    commit_method.assert_called_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": batch1.mutations,
+            "transaction": None,
+        }
+    )
+    commit_method.assert_called_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": batch2.mutations,
+            "transaction": None,
+        }
+    )
+
+
+def test_batch_as_context_mgr_w_error():
+    project = "PROJECT"
+    properties = {"foo": "bar"}
+    entity = _Entity(properties)
+    key = entity.key = _Key(project)
+
+    client = _Client(project)
+    assert list(client._batches) == []
+
+    try:
+        with _make_batch(client) as batch:
+            assert list(client._batches) == [batch]
             batch.put(entity)
 
-        self.assertEqual(list(client._batches), [])
-
-        mutated_entity = _mutated_pb(self, batch.mutations, "upsert")
-        self.assertEqual(mutated_entity.key, key._key)
-        commit_method = client._datastore_api.commit
-        mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": batch.mutations,
-                "transaction": None,
-            }
-        )
-
-    def test_as_context_mgr_nested(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        properties = {"foo": "bar"}
-        entity1 = _Entity(properties)
-        key1 = entity1.key = _Key(project)
-        entity2 = _Entity(properties)
-        key2 = entity2.key = _Key(project)
-
-        client = _Client(project)
-        self.assertEqual(list(client._batches), [])
-
-        with self._make_one(client) as batch1:
-            self.assertEqual(list(client._batches), [batch1])
-            batch1.put(entity1)
-            with self._make_one(client) as batch2:
-                self.assertEqual(list(client._batches), [batch2, batch1])
-                batch2.put(entity2)
-
-            self.assertEqual(list(client._batches), [batch1])
-
-        self.assertEqual(list(client._batches), [])
-
-        mutated_entity1 = _mutated_pb(self, batch1.mutations, "upsert")
-        self.assertEqual(mutated_entity1.key, key1._key)
-
-        mutated_entity2 = _mutated_pb(self, batch2.mutations, "upsert")
-        self.assertEqual(mutated_entity2.key, key2._key)
-
-        commit_method = client._datastore_api.commit
-        self.assertEqual(commit_method.call_count, 2)
-        mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": batch1.mutations,
-                "transaction": None,
-            }
-        )
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": batch2.mutations,
-                "transaction": None,
-            }
-        )
-
-    def test_as_context_mgr_w_error(self):
-        project = "PROJECT"
-        properties = {"foo": "bar"}
-        entity = _Entity(properties)
-        key = entity.key = _Key(project)
-
-        client = _Client(project)
-        self.assertEqual(list(client._batches), [])
-
-        try:
-            with self._make_one(client) as batch:
-                self.assertEqual(list(client._batches), [batch])
-                batch.put(entity)
-                raise ValueError("testing")
-        except ValueError:
-            pass
+            raise ValueError("testing")
 
-        self.assertEqual(list(client._batches), [])
+    except ValueError:
+        pass
 
-        mutated_entity = _mutated_pb(self, batch.mutations, "upsert")
-        self.assertEqual(mutated_entity.key, key._key)
+    assert list(client._batches) == []
 
-    def test_as_context_mgr_enter_fails(self):
-        klass = self._get_target_class()
+    mutated_entity = _mutated_pb(batch.mutations, "upsert")
+    assert mutated_entity.key == key._key
 
-        class FailedBegin(klass):
-            def begin(self):
-                raise RuntimeError
+    client._datastore_api.commit.assert_not_called()
 
-        client = _Client(None, None)
-        self.assertEqual(client._batches, [])
 
-        batch = FailedBegin(client)
-        with self.assertRaises(RuntimeError):
-            # The context manager will never be entered because
-            # of the failure.
-            with batch:  # pragma: NO COVER
-                pass
-        # Make sure no batch was added.
-        self.assertEqual(client._batches, [])
+def test_batch_as_context_mgr_w_enter_fails():
+    from google.cloud.datastore.batch import Batch
 
+    class FailedBegin(Batch):
+        def begin(self):
+            raise RuntimeError
 
-class Test__parse_commit_response(unittest.TestCase):
-    def _call_fut(self, commit_response_pb):
-        from google.cloud.datastore.batch import _parse_commit_response
+    client = _Client(None, None)
+    assert list(client._batches) == []
 
-        return _parse_commit_response(commit_response_pb)
+    batch = FailedBegin(client)
 
-    def test_it(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
+    with pytest.raises(RuntimeError):
+        # The context manager will never be entered because
+        # of the failure.
+        with batch:  # pragma: NO COVER
+            pass
+
+    # Make sure no batch was added.
+    assert list(client._batches) == []
 
-        index_updates = 1337
-        keys = [
-            entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Foo", id=1234)]),
-            entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Bar", name="baz")]),
-        ]
-        response = datastore_pb2.CommitResponse(
-            mutation_results=[datastore_pb2.MutationResult(key=key) for key in keys],
-            index_updates=index_updates,
-        )
-        result = self._call_fut(response)
-        self.assertEqual(result, (index_updates, [i._pb for i in keys]))
+
+def test__parse_commit_response():
+    from google.cloud.datastore.batch import _parse_commit_response
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+
+    index_updates = 1337
+    keys = [
+        entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Foo", id=1234)]),
+        entity_pb2.Key(path=[entity_pb2.Key.PathElement(kind="Bar", name="baz")]),
+    ]
+    response = datastore_pb2.CommitResponse(
+        mutation_results=[datastore_pb2.MutationResult(key=key) for key in keys],
+        index_updates=index_updates,
+    )
+
+    result = _parse_commit_response(response)
+
+    assert result == (index_updates, [i._pb for i in keys])
 
 
 class _Entity(dict):
@@ -539,18 +557,14 @@ def current_batch(self):
             return self._batches[0]
 
 
-def _assert_num_mutations(test_case, mutation_pb_list, num_mutations):
-    test_case.assertEqual(len(mutation_pb_list), num_mutations)
-
-
-def _mutated_pb(test_case, mutation_pb_list, mutation_type):
+def _mutated_pb(mutation_pb_list, mutation_type):
     # Make sure there is only one mutation.
-    _assert_num_mutations(test_case, mutation_pb_list, 1)
+    assert len(mutation_pb_list) == 1
 
     # We grab the only mutation.
     mutated_pb = mutation_pb_list[0]
     # Then check if it is the correct type.
-    test_case.assertEqual(mutated_pb._pb.WhichOneof("operation"), mutation_type)
+    assert mutated_pb._pb.WhichOneof("operation") == mutation_type
 
     return getattr(mutated_pb, mutation_type)
 
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index f4c27cf4..7f38a5ad 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -12,1483 +12,1477 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
 import mock
+import pytest
 
+PROJECT = "dummy-project-123"
 
-def _make_credentials():
-    import google.auth.credentials
 
-    return mock.Mock(spec=google.auth.credentials.Credentials)
+def test__get_gcd_project_wo_value_set():
+    from google.cloud.datastore.client import _get_gcd_project
 
+    environ = {}
 
-def _make_entity_pb(project, kind, integer_id, name=None, str_val=None):
-    from google.cloud.datastore_v1.types import entity as entity_pb2
-    from google.cloud.datastore.helpers import _new_value_pb
+    with mock.patch("os.getenv", new=environ.get):
+        project = _get_gcd_project()
+        assert project is None
 
-    entity_pb = entity_pb2.Entity()
-    entity_pb.key.partition_id.project_id = project
-    path_element = entity_pb._pb.key.path.add()
-    path_element.kind = kind
-    path_element.id = integer_id
-    if name is not None and str_val is not None:
-        value_pb = _new_value_pb(entity_pb, name)
-        value_pb.string_value = str_val
 
-    return entity_pb
+def test__get_gcd_project_w_value_set():
+    from google.cloud.datastore.client import _get_gcd_project
+    from google.cloud.datastore.client import DATASTORE_DATASET
 
+    environ = {DATASTORE_DATASET: PROJECT}
 
-class Test__get_gcd_project(unittest.TestCase):
-    def _call_fut(self):
-        from google.cloud.datastore.client import _get_gcd_project
+    with mock.patch("os.getenv", new=environ.get):
+        project = _get_gcd_project()
+        assert project == PROJECT
 
-        return _get_gcd_project()
 
-    def test_no_value(self):
-        environ = {}
-        with mock.patch("os.getenv", new=environ.get):
-            project = self._call_fut()
-            self.assertIsNone(project)
+def _determine_default_helper(gcd=None, fallback=None, project_called=None):
+    from google.cloud.datastore.client import _determine_default_project
 
-    def test_value_set(self):
-        from google.cloud.datastore.client import DATASTORE_DATASET
+    _callers = []
 
-        MOCK_PROJECT = object()
-        environ = {DATASTORE_DATASET: MOCK_PROJECT}
-        with mock.patch("os.getenv", new=environ.get):
-            project = self._call_fut()
-            self.assertEqual(project, MOCK_PROJECT)
+    def gcd_mock():
+        _callers.append("gcd_mock")
+        return gcd
 
+    def fallback_mock(project=None):
+        _callers.append(("fallback_mock", project))
+        return fallback
 
-class Test__determine_default_project(unittest.TestCase):
-    def _call_fut(self, project=None):
-        from google.cloud.datastore.client import _determine_default_project
+    patch = mock.patch.multiple(
+        "google.cloud.datastore.client",
+        _get_gcd_project=gcd_mock,
+        _base_default_project=fallback_mock,
+    )
+    with patch:
+        returned_project = _determine_default_project(project_called)
 
-        return _determine_default_project(project=project)
+    return returned_project, _callers
 
-    def _determine_default_helper(self, gcd=None, fallback=None, project_called=None):
-        _callers = []
 
-        def gcd_mock():
-            _callers.append("gcd_mock")
-            return gcd
+def test__determine_default_project_wo_value():
+    project, callers = _determine_default_helper()
+    assert project is None
+    assert callers == ["gcd_mock", ("fallback_mock", None)]
 
-        def fallback_mock(project=None):
-            _callers.append(("fallback_mock", project))
-            return fallback
 
-        patch = mock.patch.multiple(
-            "google.cloud.datastore.client",
-            _get_gcd_project=gcd_mock,
-            _base_default_project=fallback_mock,
-        )
-        with patch:
-            returned_project = self._call_fut(project_called)
-
-        return returned_project, _callers
-
-    def test_no_value(self):
-        project, callers = self._determine_default_helper()
-        self.assertIsNone(project)
-        self.assertEqual(callers, ["gcd_mock", ("fallback_mock", None)])
-
-    def test_explicit(self):
-        PROJECT = object()
-        project, callers = self._determine_default_helper(project_called=PROJECT)
-        self.assertEqual(project, PROJECT)
-        self.assertEqual(callers, [])
-
-    def test_gcd(self):
-        PROJECT = object()
-        project, callers = self._determine_default_helper(gcd=PROJECT)
-        self.assertEqual(project, PROJECT)
-        self.assertEqual(callers, ["gcd_mock"])
-
-    def test_fallback(self):
-        PROJECT = object()
-        project, callers = self._determine_default_helper(fallback=PROJECT)
-        self.assertEqual(project, PROJECT)
-        self.assertEqual(callers, ["gcd_mock", ("fallback_mock", None)])
-
-
-class TestClient(unittest.TestCase):
-
-    PROJECT = "PROJECT"
-
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.client import Client
-
-        return Client
-
-    def _make_one(
-        self,
-        project=PROJECT,
-        namespace=None,
-        credentials=None,
-        client_info=None,
-        client_options=None,
-        _http=None,
-        _use_grpc=None,
-    ):
-        return self._get_target_class()(
-            project=project,
-            namespace=namespace,
-            credentials=credentials,
-            client_info=client_info,
-            client_options=client_options,
-            _http=_http,
-            _use_grpc=_use_grpc,
-        )
+def test__determine_default_project_w_explicit():
+    project, callers = _determine_default_helper(project_called=PROJECT)
+    assert project == PROJECT
+    assert callers == []
 
-    def test_constructor_w_project_no_environ(self):
-        # Some environments (e.g. AppVeyor CI) run in GCE, so
-        # this test would fail artificially.
-        patch = mock.patch(
-            "google.cloud.datastore.client._base_default_project", return_value=None
-        )
-        with patch:
-            self.assertRaises(EnvironmentError, self._make_one, None)
 
-    def test_constructor_w_implicit_inputs(self):
-        from google.cloud.datastore.client import _CLIENT_INFO
-        from google.cloud.datastore.client import _DATASTORE_BASE_URL
+def test__determine_default_project_w_gcd():
+    project, callers = _determine_default_helper(gcd=PROJECT)
+    assert project == PROJECT
+    assert callers == ["gcd_mock"]
 
-        klass = self._get_target_class()
-        other = "other"
-        creds = _make_credentials()
 
-        klass = self._get_target_class()
-        patch1 = mock.patch(
-            "google.cloud.datastore.client._determine_default_project",
-            return_value=other,
-        )
-        patch2 = mock.patch("google.auth.default", return_value=(creds, None))
-
-        with patch1 as _determine_default_project:
-            with patch2 as default:
-                client = klass()
-
-        self.assertEqual(client.project, other)
-        self.assertIsNone(client.namespace)
-        self.assertIs(client._credentials, creds)
-        self.assertIs(client._client_info, _CLIENT_INFO)
-        self.assertIsNone(client._http_internal)
-        self.assertIsNone(client._client_options)
-        self.assertEqual(client.base_url, _DATASTORE_BASE_URL)
-
-        self.assertIsNone(client.current_batch)
-        self.assertIsNone(client.current_transaction)
-
-        default.assert_called_once_with(scopes=klass.SCOPE,)
-        _determine_default_project.assert_called_once_with(None)
-
-    def test_constructor_w_explicit_inputs(self):
-        from google.api_core.client_options import ClientOptions
-
-        other = "other"
-        namespace = "namespace"
-        creds = _make_credentials()
-        client_info = mock.Mock()
-        client_options = ClientOptions("endpoint")
-        http = object()
-        client = self._make_one(
-            project=other,
-            namespace=namespace,
-            credentials=creds,
-            client_info=client_info,
-            client_options=client_options,
-            _http=http,
-        )
-        self.assertEqual(client.project, other)
-        self.assertEqual(client.namespace, namespace)
-        self.assertIs(client._credentials, creds)
-        self.assertIs(client._client_info, client_info)
-        self.assertIs(client._http_internal, http)
-        self.assertIsNone(client.current_batch)
-        self.assertIs(client._base_url, "endpoint")
-        self.assertEqual(list(client._batch_stack), [])
-
-    def test_constructor_use_grpc_default(self):
-        import google.cloud.datastore.client as MUT
-
-        project = "PROJECT"
-        creds = _make_credentials()
-        http = object()
-
-        with mock.patch.object(MUT, "_USE_GRPC", new=True):
-            client1 = self._make_one(project=project, credentials=creds, _http=http)
-            self.assertTrue(client1._use_grpc)
-            # Explicitly over-ride the environment.
-            client2 = self._make_one(
-                project=project, credentials=creds, _http=http, _use_grpc=False
-            )
-            self.assertFalse(client2._use_grpc)
-
-        with mock.patch.object(MUT, "_USE_GRPC", new=False):
-            client3 = self._make_one(project=project, credentials=creds, _http=http)
-            self.assertFalse(client3._use_grpc)
-            # Explicitly over-ride the environment.
-            client4 = self._make_one(
-                project=project, credentials=creds, _http=http, _use_grpc=True
-            )
-            self.assertTrue(client4._use_grpc)
-
-    def test_constructor_w_emulator_w_creds(self):
-        from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST
-
-        host = "localhost:1234"
-        fake_environ = {DATASTORE_EMULATOR_HOST: host}
-        project = "PROJECT"
-        creds = _make_credentials()
-        http = object()
-
-        with mock.patch("os.environ", new=fake_environ):
-            with self.assertRaises(ValueError):
-                self._make_one(project=project, credentials=creds, _http=http)
-
-    def test_constructor_w_emulator_wo_creds(self):
-        from google.auth.credentials import AnonymousCredentials
-        from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST
-
-        host = "localhost:1234"
-        fake_environ = {DATASTORE_EMULATOR_HOST: host}
-        project = "PROJECT"
-        http = object()
-
-        with mock.patch("os.environ", new=fake_environ):
-            client = self._make_one(project=project, _http=http)
-
-        self.assertEqual(client.base_url, "http://" + host)
-        self.assertIsInstance(client._credentials, AnonymousCredentials)
-
-    def test_base_url_property(self):
-        from google.cloud.datastore.client import _DATASTORE_BASE_URL
-        from google.api_core.client_options import ClientOptions
-
-        alternate_url = "https://alias.example.com/"
-        project = "PROJECT"
-        creds = _make_credentials()
-        http = object()
-        client_options = ClientOptions()
-
-        client = self._make_one(
-            project=project,
-            credentials=creds,
-            _http=http,
-            client_options=client_options,
-        )
-        self.assertEqual(client.base_url, _DATASTORE_BASE_URL)
-        client.base_url = alternate_url
-        self.assertEqual(client.base_url, alternate_url)
-
-    def test_base_url_property_w_client_options(self):
-        alternate_url = "https://alias.example.com/"
-        project = "PROJECT"
-        creds = _make_credentials()
-        http = object()
-        client_options = {"api_endpoint": "endpoint"}
-
-        client = self._make_one(
-            project=project,
-            credentials=creds,
-            _http=http,
-            client_options=client_options,
-        )
-        self.assertEqual(client.base_url, "endpoint")
-        client.base_url = alternate_url
-        self.assertEqual(client.base_url, alternate_url)
+def test__determine_default_project_w_fallback():
+    project, callers = _determine_default_helper(fallback=PROJECT)
+    assert project == PROJECT
+    assert callers == ["gcd_mock", ("fallback_mock", None)]
 
-    def test__datastore_api_property_already_set(self):
-        client = self._make_one(
-            project="prahj-ekt", credentials=_make_credentials(), _use_grpc=True
-        )
-        already = client._datastore_api_internal = object()
-        self.assertIs(client._datastore_api, already)
-
-    def test__datastore_api_property_gapic(self):
-        client_info = mock.Mock()
-        client = self._make_one(
-            project="prahj-ekt",
-            credentials=_make_credentials(),
-            client_info=client_info,
-            _http=object(),
-            _use_grpc=True,
-        )
 
-        self.assertIsNone(client._datastore_api_internal)
-        patch = mock.patch(
-            "google.cloud.datastore.client.make_datastore_api",
-            return_value=mock.sentinel.ds_api,
+def _make_client(
+    project=PROJECT,
+    namespace=None,
+    credentials=None,
+    client_info=None,
+    client_options=None,
+    _http=None,
+    _use_grpc=None,
+):
+    from google.cloud.datastore.client import Client
+
+    return Client(
+        project=project,
+        namespace=namespace,
+        credentials=credentials,
+        client_info=client_info,
+        client_options=client_options,
+        _http=_http,
+        _use_grpc=_use_grpc,
+    )
+
+
+def test_client_ctor_w_project_no_environ():
+    # Some environments (e.g. AppVeyor CI) run in GCE, so
+    # this test would fail artificially.
+    patch = mock.patch(
+        "google.cloud.datastore.client._base_default_project", return_value=None
+    )
+    with patch:
+        with pytest.raises(EnvironmentError):
+            _make_client(project=None)
+
+
+def test_client_ctor_w_implicit_inputs():
+    from google.cloud.datastore.client import Client
+    from google.cloud.datastore.client import _CLIENT_INFO
+    from google.cloud.datastore.client import _DATASTORE_BASE_URL
+
+    other = "other"
+    patch1 = mock.patch(
+        "google.cloud.datastore.client._determine_default_project", return_value=other,
+    )
+
+    creds = _make_credentials()
+    patch2 = mock.patch("google.auth.default", return_value=(creds, None))
+
+    with patch1 as _determine_default_project:
+        with patch2 as default:
+            client = Client()
+
+    assert client.project == other
+    assert client.namespace is None
+    assert client._credentials is creds
+    assert client._client_info is _CLIENT_INFO
+    assert client._http_internal is None
+    assert client._client_options is None
+    assert client.base_url == _DATASTORE_BASE_URL
+
+    assert client.current_batch is None
+    assert client.current_transaction is None
+
+    default.assert_called_once_with(scopes=Client.SCOPE,)
+    _determine_default_project.assert_called_once_with(None)
+
+
+def test_client_ctor_w_explicit_inputs():
+    from google.api_core.client_options import ClientOptions
+
+    other = "other"
+    namespace = "namespace"
+    creds = _make_credentials()
+    client_info = mock.Mock()
+    client_options = ClientOptions("endpoint")
+    http = object()
+    client = _make_client(
+        project=other,
+        namespace=namespace,
+        credentials=creds,
+        client_info=client_info,
+        client_options=client_options,
+        _http=http,
+    )
+    assert client.project == other
+    assert client.namespace == namespace
+    assert client._credentials is creds
+    assert client._client_info is client_info
+    assert client._http_internal is http
+    assert client.current_batch is None
+    assert client._base_url == "endpoint"
+    assert list(client._batch_stack) == []
+
+
+def test_client_ctor_use_grpc_default():
+    import google.cloud.datastore.client as MUT
+
+    project = "PROJECT"
+    creds = _make_credentials()
+    http = object()
+
+    with mock.patch.object(MUT, "_USE_GRPC", new=True):
+        client1 = _make_client(project=PROJECT, credentials=creds, _http=http)
+        assert client1._use_grpc
+        # Explicitly over-ride the environment.
+        client2 = _make_client(
+            project=project, credentials=creds, _http=http, _use_grpc=False
         )
-        with patch as make_api:
-            ds_api = client._datastore_api
-
-        self.assertIs(ds_api, mock.sentinel.ds_api)
-        self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api)
-        make_api.assert_called_once_with(client)
-
-    def test__datastore_api_property_http(self):
-        client_info = mock.Mock()
-        client = self._make_one(
-            project="prahj-ekt",
-            credentials=_make_credentials(),
-            client_info=client_info,
-            _http=object(),
-            _use_grpc=False,
+        assert not client2._use_grpc
+
+    with mock.patch.object(MUT, "_USE_GRPC", new=False):
+        client3 = _make_client(project=PROJECT, credentials=creds, _http=http)
+        assert not client3._use_grpc
+        # Explicitly over-ride the environment.
+        client4 = _make_client(
+            project=project, credentials=creds, _http=http, _use_grpc=True
         )
+        assert client4._use_grpc
 
-        self.assertIsNone(client._datastore_api_internal)
-        patch = mock.patch(
-            "google.cloud.datastore.client.HTTPDatastoreAPI",
-            return_value=mock.sentinel.ds_api,
-        )
-        with patch as make_api:
-            ds_api = client._datastore_api
 
-        self.assertIs(ds_api, mock.sentinel.ds_api)
-        self.assertIs(client._datastore_api_internal, mock.sentinel.ds_api)
-        make_api.assert_called_once_with(client)
+def test_client_ctor_w_emulator_w_creds():
+    from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST
 
-    def test__push_batch_and__pop_batch(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        batch = client.batch()
-        xact = client.transaction()
-        client._push_batch(batch)
-        self.assertEqual(list(client._batch_stack), [batch])
-        self.assertIs(client.current_batch, batch)
-        self.assertIsNone(client.current_transaction)
-        client._push_batch(xact)
-        self.assertIs(client.current_batch, xact)
-        self.assertIs(client.current_transaction, xact)
-        # list(_LocalStack) returns in reverse order.
-        self.assertEqual(list(client._batch_stack), [xact, batch])
-        self.assertIs(client._pop_batch(), xact)
-        self.assertEqual(list(client._batch_stack), [batch])
-        self.assertIs(client._pop_batch(), batch)
-        self.assertEqual(list(client._batch_stack), [])
-
-    def test_get_miss(self):
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        get_multi = client.get_multi = mock.Mock(return_value=[])
-
-        key = object()
-
-        self.assertIsNone(client.get(key))
-
-        get_multi.assert_called_once_with(
-            keys=[key],
-            missing=None,
-            deferred=None,
-            transaction=None,
-            eventual=False,
-            retry=None,
-            timeout=None,
-        )
+    host = "localhost:1234"
+    fake_environ = {DATASTORE_EMULATOR_HOST: host}
+    project = "PROJECT"
+    creds = _make_credentials()
+    http = object()
 
-    def test_get_hit(self):
-        TXN_ID = "123"
-        _called_with = []
-        _entity = object()
-
-        def _get_multi(*args, **kw):
-            _called_with.append((args, kw))
-            return [_entity]
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        client.get_multi = _get_multi
-
-        key, missing, deferred = object(), [], []
-
-        self.assertIs(client.get(key, missing, deferred, TXN_ID), _entity)
-
-        self.assertEqual(_called_with[0][0], ())
-        self.assertEqual(_called_with[0][1]["keys"], [key])
-        self.assertIs(_called_with[0][1]["missing"], missing)
-        self.assertIs(_called_with[0][1]["deferred"], deferred)
-        self.assertEqual(_called_with[0][1]["transaction"], TXN_ID)
-
-    def test_get_multi_no_keys(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        results = client.get_multi([])
-        self.assertEqual(results, [])
-
-    def test_get_multi_miss(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore.key import Key
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        ds_api = _make_datastore_api()
-        client._datastore_api_internal = ds_api
-
-        key = Key("Kind", 1234, project=self.PROJECT)
-        results = client.get_multi([key])
-        self.assertEqual(results, [])
-
-        read_options = datastore_pb2.ReadOptions()
-        ds_api.lookup.assert_called_once_with(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key.to_protobuf()],
-                "read_options": read_options,
-            }
-        )
+    with mock.patch("os.environ", new=fake_environ):
+        with pytest.raises(ValueError):
+            _make_client(project=project, credentials=creds, _http=http)
 
-    def test_get_multi_miss_w_missing(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.key import Key
-
-        KIND = "Kind"
-        ID = 1234
-
-        # Make a missing entity pb to be returned from mock backend.
-        missed = entity_pb2.Entity()
-        missed.key.partition_id.project_id = self.PROJECT
-        path_element = missed._pb.key.path.add()
-        path_element.kind = KIND
-        path_element.id = ID
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        # Set missing entity on mock connection.
-        lookup_response = _make_lookup_response(missing=[missed._pb])
-        ds_api = _make_datastore_api(lookup_response=lookup_response)
-        client._datastore_api_internal = ds_api
-
-        key = Key(KIND, ID, project=self.PROJECT)
-        missing = []
-        entities = client.get_multi([key], missing=missing)
-        self.assertEqual(entities, [])
-        key_pb = key.to_protobuf()
-        self.assertEqual([missed.key.to_protobuf() for missed in missing], [key_pb._pb])
-
-        read_options = datastore_pb2.ReadOptions()
-        ds_api.lookup.assert_called_once_with(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key_pb],
-                "read_options": read_options,
-            }
-        )
 
-    def test_get_multi_w_missing_non_empty(self):
-        from google.cloud.datastore.key import Key
+def test_client_ctor_w_emulator_wo_creds():
+    from google.auth.credentials import AnonymousCredentials
+    from google.cloud.datastore.client import DATASTORE_EMULATOR_HOST
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        key = Key("Kind", 1234, project=self.PROJECT)
+    host = "localhost:1234"
+    fake_environ = {DATASTORE_EMULATOR_HOST: host}
+    project = "PROJECT"
+    http = object()
 
-        missing = ["this", "list", "is", "not", "empty"]
-        self.assertRaises(ValueError, client.get_multi, [key], missing=missing)
+    with mock.patch("os.environ", new=fake_environ):
+        client = _make_client(project=project, _http=http)
 
-    def test_get_multi_w_deferred_non_empty(self):
-        from google.cloud.datastore.key import Key
+    assert client.base_url == "http://" + host
+    assert isinstance(client._credentials, AnonymousCredentials)
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        key = Key("Kind", 1234, project=self.PROJECT)
 
-        deferred = ["this", "list", "is", "not", "empty"]
-        self.assertRaises(ValueError, client.get_multi, [key], deferred=deferred)
+def test_client_base_url_property():
+    from google.api_core.client_options import ClientOptions
+    from google.cloud.datastore.client import _DATASTORE_BASE_URL
 
-    def test_get_multi_miss_w_deferred(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore.key import Key
+    alternate_url = "https://alias.example.com/"
+    creds = _make_credentials()
+    client_options = ClientOptions()
 
-        key = Key("Kind", 1234, project=self.PROJECT)
-        key_pb = key.to_protobuf()
+    client = _make_client(credentials=creds, client_options=client_options)
+    assert client.base_url == _DATASTORE_BASE_URL
 
-        # Set deferred entity on mock connection.
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        lookup_response = _make_lookup_response(deferred=[key_pb])
-        ds_api = _make_datastore_api(lookup_response=lookup_response)
-        client._datastore_api_internal = ds_api
+    client.base_url = alternate_url
+    assert client.base_url == alternate_url
 
-        deferred = []
-        entities = client.get_multi([key], deferred=deferred)
-        self.assertEqual(entities, [])
-        self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key_pb])
 
-        read_options = datastore_pb2.ReadOptions()
-        ds_api.lookup.assert_called_once_with(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key_pb],
-                "read_options": read_options,
-            }
-        )
+def test_client_base_url_property_w_client_options():
+    alternate_url = "https://alias.example.com/"
+    creds = _make_credentials()
+    client_options = {"api_endpoint": "endpoint"}
 
-    def test_get_multi_w_deferred_from_backend_but_not_passed(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-        from google.cloud.datastore.key import Key
-
-        key1 = Key("Kind", project=self.PROJECT)
-        key1_pb = key1.to_protobuf()
-        key2 = Key("Kind", 2345, project=self.PROJECT)
-        key2_pb = key2.to_protobuf()
-
-        entity1_pb = entity_pb2.Entity()
-        entity1_pb._pb.key.CopyFrom(key1_pb._pb)
-        entity2_pb = entity_pb2.Entity()
-        entity2_pb._pb.key.CopyFrom(key2_pb._pb)
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        # Mock up two separate requests. Using an iterable as side_effect
-        # allows multiple return values.
-        lookup_response1 = _make_lookup_response(
-            results=[entity1_pb], deferred=[key2_pb]
-        )
-        lookup_response2 = _make_lookup_response(results=[entity2_pb])
-        ds_api = _make_datastore_api()
-        ds_api.lookup = mock.Mock(
-            side_effect=[lookup_response1, lookup_response2], spec=[]
-        )
-        client._datastore_api_internal = ds_api
-
-        missing = []
-        found = client.get_multi([key1, key2], missing=missing)
-        self.assertEqual(len(found), 2)
-        self.assertEqual(len(missing), 0)
-
-        # Check the actual contents on the response.
-        self.assertIsInstance(found[0], Entity)
-        self.assertEqual(found[0].key.path, key1.path)
-        self.assertEqual(found[0].key.project, key1.project)
-
-        self.assertIsInstance(found[1], Entity)
-        self.assertEqual(found[1].key.path, key2.path)
-        self.assertEqual(found[1].key.project, key2.project)
-
-        self.assertEqual(ds_api.lookup.call_count, 2)
-        read_options = datastore_pb2.ReadOptions()
-
-        ds_api.lookup.assert_any_call(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key2_pb],
-                "read_options": read_options,
-            },
-        )
+    client = _make_client(credentials=creds, client_options=client_options,)
+    assert client.base_url == "endpoint"
 
-        ds_api.lookup.assert_any_call(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key1_pb, key2_pb],
-                "read_options": read_options,
-            },
-        )
+    client.base_url = alternate_url
+    assert client.base_url == alternate_url
 
-    def test_get_multi_hit_w_retry_w_timeout(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore.key import Key
-
-        kind = "Kind"
-        id_ = 1234
-        path = [{"kind": kind, "id": id_}]
-        retry = mock.Mock()
-        timeout = 100000
-
-        # Make a found entity pb to be returned from mock backend.
-        entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo")
-
-        # Make a connection to return the entity pb.
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        lookup_response = _make_lookup_response(results=[entity_pb])
-        ds_api = _make_datastore_api(lookup_response=lookup_response)
-        client._datastore_api_internal = ds_api
-
-        key = Key(kind, id_, project=self.PROJECT)
-        (result,) = client.get_multi([key], retry=retry, timeout=timeout)
-        new_key = result.key
-
-        # Check the returned value is as expected.
-        self.assertIsNot(new_key, key)
-        self.assertEqual(new_key.project, self.PROJECT)
-        self.assertEqual(new_key.path, path)
-        self.assertEqual(list(result), ["foo"])
-        self.assertEqual(result["foo"], "Foo")
-
-        read_options = datastore_pb2.ReadOptions()
-
-        ds_api.lookup.assert_called_once_with(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key.to_protobuf()],
-                "read_options": read_options,
-            },
-            retry=retry,
-            timeout=timeout,
-        )
 
-    def test_get_multi_hit_w_transaction(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore.key import Key
-
-        txn_id = b"123"
-        kind = "Kind"
-        id_ = 1234
-        path = [{"kind": kind, "id": id_}]
-
-        # Make a found entity pb to be returned from mock backend.
-        entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo")
-
-        # Make a connection to return the entity pb.
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        lookup_response = _make_lookup_response(results=[entity_pb])
-        ds_api = _make_datastore_api(lookup_response=lookup_response)
-        client._datastore_api_internal = ds_api
-
-        key = Key(kind, id_, project=self.PROJECT)
-        txn = client.transaction()
-        txn._id = txn_id
-        (result,) = client.get_multi([key], transaction=txn)
-        new_key = result.key
-
-        # Check the returned value is as expected.
-        self.assertIsNot(new_key, key)
-        self.assertEqual(new_key.project, self.PROJECT)
-        self.assertEqual(new_key.path, path)
-        self.assertEqual(list(result), ["foo"])
-        self.assertEqual(result["foo"], "Foo")
-
-        read_options = datastore_pb2.ReadOptions(transaction=txn_id)
-        ds_api.lookup.assert_called_once_with(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key.to_protobuf()],
-                "read_options": read_options,
-            }
-        )
+def test_client__datastore_api_property_already_set():
+    client = _make_client(credentials=_make_credentials(), _use_grpc=True)
+    already = client._datastore_api_internal = object()
+    assert client._datastore_api is already
 
-    def test_get_multi_hit_multiple_keys_same_project(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore.key import Key
-
-        kind = "Kind"
-        id1 = 1234
-        id2 = 2345
-
-        # Make a found entity pb to be returned from mock backend.
-        entity_pb1 = _make_entity_pb(self.PROJECT, kind, id1)
-        entity_pb2 = _make_entity_pb(self.PROJECT, kind, id2)
-
-        # Make a connection to return the entity pbs.
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        lookup_response = _make_lookup_response(results=[entity_pb1, entity_pb2])
-        ds_api = _make_datastore_api(lookup_response=lookup_response)
-        client._datastore_api_internal = ds_api
-
-        key1 = Key(kind, id1, project=self.PROJECT)
-        key2 = Key(kind, id2, project=self.PROJECT)
-        retrieved1, retrieved2 = client.get_multi([key1, key2])
-
-        # Check values match.
-        self.assertEqual(retrieved1.key.path, key1.path)
-        self.assertEqual(dict(retrieved1), {})
-        self.assertEqual(retrieved2.key.path, key2.path)
-        self.assertEqual(dict(retrieved2), {})
-
-        read_options = datastore_pb2.ReadOptions()
-        ds_api.lookup.assert_called_once_with(
-            request={
-                "project_id": self.PROJECT,
-                "keys": [key1.to_protobuf(), key2.to_protobuf()],
-                "read_options": read_options,
-            }
-        )
 
-    def test_get_multi_hit_multiple_keys_different_project(self):
-        from google.cloud.datastore.key import Key
+def test_client__datastore_api_property_gapic():
+    client_info = mock.Mock()
+    client = _make_client(
+        project="prahj-ekt",
+        credentials=_make_credentials(),
+        client_info=client_info,
+        _http=object(),
+        _use_grpc=True,
+    )
 
-        PROJECT1 = "PROJECT"
-        PROJECT2 = "PROJECT-ALT"
+    assert client._datastore_api_internal is None
+    patch = mock.patch(
+        "google.cloud.datastore.client.make_datastore_api",
+        return_value=mock.sentinel.ds_api,
+    )
+    with patch as make_api:
+        ds_api = client._datastore_api
+
+    assert ds_api is mock.sentinel.ds_api
+    assert client._datastore_api_internal is mock.sentinel.ds_api
+    make_api.assert_called_once_with(client)
+
+
+def test__datastore_api_property_http():
+    client_info = mock.Mock()
+    client = _make_client(
+        project="prahj-ekt",
+        credentials=_make_credentials(),
+        client_info=client_info,
+        _http=object(),
+        _use_grpc=False,
+    )
 
-        # Make sure our IDs are actually different.
-        self.assertNotEqual(PROJECT1, PROJECT2)
+    assert client._datastore_api_internal is None
+    patch = mock.patch(
+        "google.cloud.datastore.client.HTTPDatastoreAPI",
+        return_value=mock.sentinel.ds_api,
+    )
+    with patch as make_api:
+        ds_api = client._datastore_api
 
-        key1 = Key("KIND", 1234, project=PROJECT1)
-        key2 = Key("KIND", 1234, project=PROJECT2)
+    assert ds_api is mock.sentinel.ds_api
+    assert client._datastore_api_internal is mock.sentinel.ds_api
+    make_api.assert_called_once_with(client)
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
 
-        with self.assertRaises(ValueError):
-            client.get_multi([key1, key2])
+def test_client__push_batch_and__pop_batch():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    batch = client.batch()
+    xact = client.transaction()
 
-    def test_get_multi_max_loops(self):
-        from google.cloud.datastore.key import Key
+    client._push_batch(batch)
+    assert list(client._batch_stack) == [batch]
+    assert client.current_batch is batch
+    assert client.current_transaction is None
 
-        kind = "Kind"
-        id_ = 1234
+    client._push_batch(xact)
+    assert client.current_batch is xact
+    assert client.current_transaction is xact
+    # list(_LocalStack) returns in reverse order.
+    assert list(client._batch_stack) == [xact, batch]
 
-        # Make a found entity pb to be returned from mock backend.
-        entity_pb = _make_entity_pb(self.PROJECT, kind, id_, "foo", "Foo")
+    assert client._pop_batch() is xact
+    assert list(client._batch_stack) == [batch]
+    assert client.current_batch is batch
+    assert client.current_transaction is None
 
-        # Make a connection to return the entity pb.
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        lookup_response = _make_lookup_response(results=[entity_pb])
-        ds_api = _make_datastore_api(lookup_response=lookup_response)
-        client._datastore_api_internal = ds_api
+    assert client._pop_batch() is batch
+    assert list(client._batch_stack) == []
 
-        key = Key(kind, id_, project=self.PROJECT)
-        deferred = []
-        missing = []
 
-        patch = mock.patch("google.cloud.datastore.client._MAX_LOOPS", new=-1)
-        with patch:
-            result = client.get_multi([key], missing=missing, deferred=deferred)
+def test_client_get_miss():
 
-        # Make sure we have no results, even though the connection has been
-        # set up as in `test_hit` to return a single result.
-        self.assertEqual(result, [])
-        self.assertEqual(missing, [])
-        self.assertEqual(deferred, [])
-        ds_api.lookup.assert_not_called()
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    get_multi = client.get_multi = mock.Mock(return_value=[])
 
-    def test_put(self):
+    key = object()
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        put_multi = client.put_multi = mock.Mock()
-        entity = mock.Mock()
+    assert client.get(key) is None
 
-        client.put(entity)
+    get_multi.assert_called_once_with(
+        keys=[key],
+        missing=None,
+        deferred=None,
+        transaction=None,
+        eventual=False,
+        retry=None,
+        timeout=None,
+    )
 
-        put_multi.assert_called_once_with(entities=[entity], retry=None, timeout=None)
 
-    def test_put_w_retry_w_timeout(self):
+def test_client_get_hit():
+    txn_id = "123"
+    _entity = object()
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    get_multi = client.get_multi = mock.Mock(return_value=[_entity])
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        put_multi = client.put_multi = mock.Mock()
-        entity = mock.Mock()
-        retry = mock.Mock()
-        timeout = 100000
+    key, missing, deferred = object(), [], []
 
-        client.put(entity, retry=retry, timeout=timeout)
+    assert client.get(key, missing, deferred, txn_id) is _entity
 
-        put_multi.assert_called_once_with(
-            entities=[entity], retry=retry, timeout=timeout
-        )
+    get_multi.assert_called_once_with(
+        keys=[key],
+        missing=missing,
+        deferred=deferred,
+        transaction=txn_id,
+        eventual=False,
+        retry=None,
+        timeout=None,
+    )
 
-    def test_put_multi_no_entities(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        self.assertIsNone(client.put_multi([]))
 
-    def test_put_multi_w_single_empty_entity(self):
-        # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/649
-        from google.cloud.datastore.entity import Entity
+def test_client_get_multi_no_keys():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    ds_api = _make_datastore_api()
+    client._datastore_api_internal = ds_api
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        self.assertRaises(ValueError, client.put_multi, Entity())
+    results = client.get_multi([])
 
-    def test_put_multi_no_batch_w_partial_key_w_retry_w_timeout(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    assert results == []
 
-        entity = _Entity(foo=u"bar")
-        key = entity.key = _Key(_Key.kind, None)
-        retry = mock.Mock()
-        timeout = 100000
+    ds_api.lookup.assert_not_called()
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        key_pb = _make_key(234)
-        ds_api = _make_datastore_api(key_pb)
-        client._datastore_api_internal = ds_api
 
-        result = client.put_multi([entity], retry=retry, timeout=timeout)
-        self.assertIsNone(result)
+def test_client_get_multi_miss():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.key import Key
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    ds_api = _make_datastore_api()
+    client._datastore_api_internal = ds_api
+
+    key = Key("Kind", 1234, project=PROJECT)
+    results = client.get_multi([key])
+    assert results == []
+
+    read_options = datastore_pb2.ReadOptions()
+    ds_api.lookup.assert_called_once_with(
+        request={
+            "project_id": PROJECT,
+            "keys": [key.to_protobuf()],
+            "read_options": read_options,
+        }
+    )
 
-        self.assertEqual(ds_api.commit.call_count, 1)
-        _, positional, keyword = ds_api.commit.mock_calls[0]
 
-        self.assertEqual(len(positional), 0)
+def test_client_get_multi_miss_w_missing():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.key import Key
+
+    KIND = "Kind"
+    ID = 1234
+
+    # Make a missing entity pb to be returned from mock backend.
+    missed = entity_pb2.Entity()
+    missed.key.partition_id.project_id = PROJECT
+    path_element = missed._pb.key.path.add()
+    path_element.kind = KIND
+    path_element.id = ID
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    # Set missing entity on mock connection.
+    lookup_response = _make_lookup_response(missing=[missed._pb])
+    ds_api = _make_datastore_api(lookup_response=lookup_response)
+    client._datastore_api_internal = ds_api
+
+    key = Key(KIND, ID, project=PROJECT)
+    missing = []
+    entities = client.get_multi([key], missing=missing)
+    assert entities == []
+    key_pb = key.to_protobuf()
+    assert [missed.key.to_protobuf() for missed in missing] == [key_pb._pb]
+
+    read_options = datastore_pb2.ReadOptions()
+    ds_api.lookup.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": [key_pb], "read_options": read_options}
+    )
 
-        self.assertEqual(len(keyword), 3)
-        self.assertEqual(keyword["retry"], retry)
-        self.assertEqual(keyword["timeout"], timeout)
 
-        self.assertEqual(len(keyword["request"]), 4)
-        self.assertEqual(keyword["request"]["project_id"], self.PROJECT)
-        self.assertEqual(
-            keyword["request"]["mode"],
-            datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL,
-        )
-        self.assertEqual(keyword["request"]["transaction"], None)
-        mutations = keyword["request"]["mutations"]
-        mutated_entity = _mutated_pb(self, mutations, "insert")
-        self.assertEqual(mutated_entity.key, key.to_protobuf())
-
-        prop_list = list(mutated_entity.properties.items())
-        self.assertTrue(len(prop_list), 1)
-        name, value_pb = prop_list[0]
-        self.assertEqual(name, "foo")
-        self.assertEqual(value_pb.string_value, u"bar")
-
-    def test_put_multi_existing_batch_w_completed_key(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        entity = _Entity(foo=u"bar")
-        key = entity.key = _Key()
-
-        with _NoCommitBatch(client) as CURR_BATCH:
-            result = client.put_multi([entity])
-
-        self.assertIsNone(result)
-        mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, "upsert")
-        self.assertEqual(mutated_entity.key, key.to_protobuf())
-
-        prop_list = list(mutated_entity.properties.items())
-        self.assertTrue(len(prop_list), 1)
-        name, value_pb = prop_list[0]
-        self.assertEqual(name, "foo")
-        self.assertEqual(value_pb.string_value, u"bar")
-
-    def test_delete(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        delete_multi = client.delete_multi = mock.Mock()
-        key = mock.Mock()
-
-        client.delete(key)
-
-        delete_multi.assert_called_once_with(keys=[key], retry=None, timeout=None)
-
-    def test_delete_w_retry_w_timeout(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        delete_multi = client.delete_multi = mock.Mock()
-        key = mock.Mock()
-        retry = mock.Mock()
-        timeout = 100000
-
-        client.delete(key, retry=retry, timeout=timeout)
-
-        delete_multi.assert_called_once_with(keys=[key], retry=retry, timeout=timeout)
-
-    def test_delete_multi_no_keys(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        client._datastore_api_internal = _make_datastore_api()
-
-        result = client.delete_multi([])
-        self.assertIsNone(result)
-        client._datastore_api_internal.commit.assert_not_called()
-
-    def test_delete_multi_no_batch_w_retry_w_timeout(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        key = _Key()
-        retry = mock.Mock()
-        timeout = 100000
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        ds_api = _make_datastore_api()
-        client._datastore_api_internal = ds_api
-
-        result = client.delete_multi([key], retry=retry, timeout=timeout)
-        self.assertIsNone(result)
-
-        self.assertEqual(ds_api.commit.call_count, 1)
-        _, positional, keyword = ds_api.commit.mock_calls[0]
-
-        self.assertEqual(len(positional), 0)
-
-        self.assertEqual(len(keyword), 3)
-        self.assertEqual(keyword["retry"], retry)
-        self.assertEqual(keyword["timeout"], timeout)
-
-        self.assertEqual(len(keyword["request"]), 4)
-        self.assertEqual(keyword["request"]["project_id"], self.PROJECT)
-        self.assertEqual(
-            keyword["request"]["mode"],
-            datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL,
-        )
-        self.assertEqual(keyword["request"]["transaction"], None)
-        mutations = keyword["request"]["mutations"]
-        mutated_key = _mutated_pb(self, mutations, "delete")
-        self.assertEqual(mutated_key, key.to_protobuf())
+def test_client_get_multi_w_missing_non_empty():
+    from google.cloud.datastore.key import Key
 
-    def test_delete_multi_w_existing_batch(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        client._datastore_api_internal = _make_datastore_api()
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    key = Key("Kind", 1234, project=PROJECT)
 
-        key = _Key()
+    missing = ["this", "list", "is", "not", "empty"]
+    with pytest.raises(ValueError):
+        client.get_multi([key], missing=missing)
 
-        with _NoCommitBatch(client) as CURR_BATCH:
-            result = client.delete_multi([key])
 
-        self.assertIsNone(result)
-        mutated_key = _mutated_pb(self, CURR_BATCH.mutations, "delete")
-        self.assertEqual(mutated_key, key._key)
-        client._datastore_api_internal.commit.assert_not_called()
+def test_client_get_multi_w_deferred_non_empty():
+    from google.cloud.datastore.key import Key
 
-    def test_delete_multi_w_existing_transaction(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        client._datastore_api_internal = _make_datastore_api()
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    key = Key("Kind", 1234, project=PROJECT)
 
-        key = _Key()
+    deferred = ["this", "list", "is", "not", "empty"]
+    with pytest.raises(ValueError):
+        client.get_multi([key], deferred=deferred)
 
-        with _NoCommitTransaction(client) as CURR_XACT:
-            result = client.delete_multi([key])
 
-        self.assertIsNone(result)
-        mutated_key = _mutated_pb(self, CURR_XACT.mutations, "delete")
-        self.assertEqual(mutated_key, key._key)
-        client._datastore_api_internal.commit.assert_not_called()
+def test_client_get_multi_miss_w_deferred():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.key import Key
+
+    key = Key("Kind", 1234, project=PROJECT)
+    key_pb = key.to_protobuf()
+
+    # Set deferred entity on mock connection.
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    lookup_response = _make_lookup_response(deferred=[key_pb])
+    ds_api = _make_datastore_api(lookup_response=lookup_response)
+    client._datastore_api_internal = ds_api
+
+    deferred = []
+    entities = client.get_multi([key], deferred=deferred)
+    assert entities == []
+    assert [def_key.to_protobuf() for def_key in deferred] == [key_pb]
+
+    read_options = datastore_pb2.ReadOptions()
+    ds_api.lookup.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": [key_pb], "read_options": read_options}
+    )
 
-    def test_delete_multi_w_existing_transaction_entity(self):
-        from google.cloud.datastore.entity import Entity
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        client._datastore_api_internal = _make_datastore_api()
+def test_client_get_multi_w_deferred_from_backend_but_not_passed():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.key import Key
+
+    key1 = Key("Kind", project=PROJECT)
+    key1_pb = key1.to_protobuf()
+    key2 = Key("Kind", 2345, project=PROJECT)
+    key2_pb = key2.to_protobuf()
+
+    entity1_pb = entity_pb2.Entity()
+    entity1_pb._pb.key.CopyFrom(key1_pb._pb)
+    entity2_pb = entity_pb2.Entity()
+    entity2_pb._pb.key.CopyFrom(key2_pb._pb)
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    # Mock up two separate requests. Using an iterable as side_effect
+    # allows multiple return values.
+    lookup_response1 = _make_lookup_response(results=[entity1_pb], deferred=[key2_pb])
+    lookup_response2 = _make_lookup_response(results=[entity2_pb])
+    ds_api = _make_datastore_api()
+    ds_api.lookup = mock.Mock(side_effect=[lookup_response1, lookup_response2], spec=[])
+    client._datastore_api_internal = ds_api
+
+    missing = []
+    found = client.get_multi([key1, key2], missing=missing)
+    assert len(found) == 2
+    assert len(missing) == 0
+
+    # Check the actual contents on the response.
+    assert isinstance(found[0], Entity)
+    assert found[0].key.path == key1.path
+    assert found[0].key.project == key1.project
+
+    assert isinstance(found[1], Entity)
+    assert found[1].key.path == key2.path
+    assert found[1].key.project == key2.project
+
+    assert ds_api.lookup.call_count == 2
+    read_options = datastore_pb2.ReadOptions()
+
+    ds_api.lookup.assert_any_call(
+        request={
+            "project_id": PROJECT,
+            "keys": [key2_pb],
+            "read_options": read_options,
+        },
+    )
 
-        key = _Key()
-        entity = Entity(key=key)
+    ds_api.lookup.assert_any_call(
+        request={
+            "project_id": PROJECT,
+            "keys": [key1_pb, key2_pb],
+            "read_options": read_options,
+        },
+    )
 
-        with _NoCommitTransaction(client) as CURR_XACT:
-            result = client.delete_multi([entity])
 
-        self.assertIsNone(result)
-        mutated_key = _mutated_pb(self, CURR_XACT.mutations, "delete")
-        self.assertEqual(mutated_key, key._key)
-        client._datastore_api_internal.commit.assert_not_called()
+def test_client_get_multi_hit_w_retry_w_timeout():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.key import Key
+
+    kind = "Kind"
+    id_ = 1234
+    path = [{"kind": kind, "id": id_}]
+    retry = mock.Mock()
+    timeout = 100000
+
+    # Make a found entity pb to be returned from mock backend.
+    entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo")
+
+    # Make a connection to return the entity pb.
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    lookup_response = _make_lookup_response(results=[entity_pb])
+    ds_api = _make_datastore_api(lookup_response=lookup_response)
+    client._datastore_api_internal = ds_api
+
+    key = Key(kind, id_, project=PROJECT)
+    (result,) = client.get_multi([key], retry=retry, timeout=timeout)
+    new_key = result.key
+
+    # Check the returned value is as expected.
+    assert new_key is not key
+    assert new_key.project == PROJECT
+    assert new_key.path == path
+    assert list(result) == ["foo"]
+    assert result["foo"] == "Foo"
+
+    read_options = datastore_pb2.ReadOptions()
+
+    ds_api.lookup.assert_called_once_with(
+        request={
+            "project_id": PROJECT,
+            "keys": [key.to_protobuf()],
+            "read_options": read_options,
+        },
+        retry=retry,
+        timeout=timeout,
+    )
 
-    def test_allocate_ids_w_partial_key(self):
-        num_ids = 2
 
-        incomplete_key = _Key(_Key.kind, None)
+def test_client_get_multi_hit_w_transaction():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.key import Key
+
+    txn_id = b"123"
+    kind = "Kind"
+    id_ = 1234
+    path = [{"kind": kind, "id": id_}]
+
+    # Make a found entity pb to be returned from mock backend.
+    entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo")
+
+    # Make a connection to return the entity pb.
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    lookup_response = _make_lookup_response(results=[entity_pb])
+    ds_api = _make_datastore_api(lookup_response=lookup_response)
+    client._datastore_api_internal = ds_api
+
+    key = Key(kind, id_, project=PROJECT)
+    txn = client.transaction()
+    txn._id = txn_id
+    (result,) = client.get_multi([key], transaction=txn)
+    new_key = result.key
+
+    # Check the returned value is as expected.
+    assert new_key is not key
+    assert new_key.project == PROJECT
+    assert new_key.path == path
+    assert list(result) == ["foo"]
+    assert result["foo"] == "Foo"
+
+    read_options = datastore_pb2.ReadOptions(transaction=txn_id)
+    ds_api.lookup.assert_called_once_with(
+        request={
+            "project_id": PROJECT,
+            "keys": [key.to_protobuf()],
+            "read_options": read_options,
+        }
+    )
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"])
-        alloc_ids = mock.Mock(return_value=allocated, spec=[])
-        ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"])
-        client._datastore_api_internal = ds_api
 
-        result = client.allocate_ids(incomplete_key, num_ids)
+def test_client_get_multi_hit_multiple_keys_same_project():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.key import Key
+
+    kind = "Kind"
+    id1 = 1234
+    id2 = 2345
+
+    # Make a found entity pb to be returned from mock backend.
+    entity_pb1 = _make_entity_pb(PROJECT, kind, id1)
+    entity_pb2 = _make_entity_pb(PROJECT, kind, id2)
+
+    # Make a connection to return the entity pbs.
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    lookup_response = _make_lookup_response(results=[entity_pb1, entity_pb2])
+    ds_api = _make_datastore_api(lookup_response=lookup_response)
+    client._datastore_api_internal = ds_api
+
+    key1 = Key(kind, id1, project=PROJECT)
+    key2 = Key(kind, id2, project=PROJECT)
+    retrieved1, retrieved2 = client.get_multi([key1, key2])
+
+    # Check values match.
+    assert retrieved1.key.path == key1.path
+    assert dict(retrieved1) == {}
+    assert retrieved2.key.path == key2.path
+    assert dict(retrieved2) == {}
+
+    read_options = datastore_pb2.ReadOptions()
+    ds_api.lookup.assert_called_once_with(
+        request={
+            "project_id": PROJECT,
+            "keys": [key1.to_protobuf(), key2.to_protobuf()],
+            "read_options": read_options,
+        }
+    )
 
-        # Check the IDs returned.
-        self.assertEqual([key.id for key in result], list(range(num_ids)))
 
-        expected_keys = [incomplete_key.to_protobuf()] * num_ids
-        alloc_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys}
-        )
+def test_client_get_multi_hit_multiple_keys_different_project():
+    from google.cloud.datastore.key import Key
 
-    def test_allocate_ids_w_partial_key_w_retry_w_timeout(self):
-        num_ids = 2
+    PROJECT1 = "PROJECT"
+    PROJECT2 = "PROJECT-ALT"
 
-        incomplete_key = _Key(_Key.kind, None)
-        retry = mock.Mock()
-        timeout = 100000
+    key1 = Key("KIND", 1234, project=PROJECT1)
+    key2 = Key("KIND", 1234, project=PROJECT2)
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"])
-        alloc_ids = mock.Mock(return_value=allocated, spec=[])
-        ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"])
-        client._datastore_api_internal = ds_api
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
 
-        result = client.allocate_ids(
-            incomplete_key, num_ids, retry=retry, timeout=timeout
-        )
+    with pytest.raises(ValueError):
+        client.get_multi([key1, key2])
 
-        # Check the IDs returned.
-        self.assertEqual([key.id for key in result], list(range(num_ids)))
 
-        expected_keys = [incomplete_key.to_protobuf()] * num_ids
-        alloc_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys},
-            retry=retry,
-            timeout=timeout,
-        )
+def test_client_get_multi_max_loops():
+    from google.cloud.datastore.key import Key
 
-    def test_allocate_ids_w_completed_key(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
+    kind = "Kind"
+    id_ = 1234
 
-        complete_key = _Key()
-        self.assertRaises(ValueError, client.allocate_ids, complete_key, 2)
+    # Make a found entity pb to be returned from mock backend.
+    entity_pb = _make_entity_pb(PROJECT, kind, id_, "foo", "Foo")
 
-    def test_reserve_ids_sequential_w_completed_key(self):
-        num_ids = 2
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        complete_key = _Key()
-        reserve_ids = mock.Mock()
-        ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
-        client._datastore_api_internal = ds_api
-        self.assertTrue(not complete_key.is_partial)
+    # Make a connection to return the entity pb.
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    lookup_response = _make_lookup_response(results=[entity_pb])
+    ds_api = _make_datastore_api(lookup_response=lookup_response)
+    client._datastore_api_internal = ds_api
 
-        client.reserve_ids_sequential(complete_key, num_ids)
+    key = Key(kind, id_, project=PROJECT)
+    deferred = []
+    missing = []
 
-        reserved_keys = (
-            _Key(_Key.kind, id)
-            for id in range(complete_key.id, complete_key.id + num_ids)
-        )
-        expected_keys = [key.to_protobuf() for key in reserved_keys]
-        reserve_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys}
-        )
+    patch = mock.patch("google.cloud.datastore.client._MAX_LOOPS", new=-1)
+    with patch:
+        result = client.get_multi([key], missing=missing, deferred=deferred)
 
-    def test_reserve_ids_sequential_w_completed_key_w_retry_w_timeout(self):
-        num_ids = 2
-        retry = mock.Mock()
-        timeout = 100000
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        complete_key = _Key()
-        self.assertTrue(not complete_key.is_partial)
-        reserve_ids = mock.Mock()
-        ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
-        client._datastore_api_internal = ds_api
-
-        client.reserve_ids_sequential(
-            complete_key, num_ids, retry=retry, timeout=timeout
-        )
+    # Make sure we have no results, even though the connection has been
+    # set up as in `test_hit` to return a single result.
+    assert result == []
+    assert missing == []
+    assert deferred == []
+    ds_api.lookup.assert_not_called()
 
-        reserved_keys = (
-            _Key(_Key.kind, id)
-            for id in range(complete_key.id, complete_key.id + num_ids)
-        )
-        expected_keys = [key.to_protobuf() for key in reserved_keys]
-        reserve_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys},
-            retry=retry,
-            timeout=timeout,
-        )
 
-    def test_reserve_ids_sequential_w_completed_key_w_ancestor(self):
-        num_ids = 2
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234)
-        reserve_ids = mock.Mock()
-        ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
-        client._datastore_api_internal = ds_api
-        self.assertTrue(not complete_key.is_partial)
+def test_client_put():
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    put_multi = client.put_multi = mock.Mock()
+    entity = mock.Mock()
+
+    client.put(entity)
+
+    put_multi.assert_called_once_with(entities=[entity], retry=None, timeout=None)
+
+
+def test_client_put_w_retry_w_timeout():
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    put_multi = client.put_multi = mock.Mock()
+    entity = mock.Mock()
+    retry = mock.Mock()
+    timeout = 100000
+
+    client.put(entity, retry=retry, timeout=timeout)
+
+    put_multi.assert_called_once_with(entities=[entity], retry=retry, timeout=timeout)
+
+
+def test_client_put_multi_no_entities():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    assert client.put_multi([]) is None
+
+
+def test_client_put_multi_w_single_empty_entity():
+    # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/649
+    from google.cloud.datastore.entity import Entity
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
+        client.put_multi(Entity())
+
+
+def test_client_put_multi_no_batch_w_partial_key_w_retry_w_timeout():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    entity = _Entity(foo=u"bar")
+    key = entity.key = _Key(_Key.kind, None)
+    retry = mock.Mock()
+    timeout = 100000
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    key_pb = _make_key(234)
+    ds_api = _make_datastore_api(key_pb)
+    client._datastore_api_internal = ds_api
+
+    result = client.put_multi([entity], retry=retry, timeout=timeout)
+    assert result is None
+
+    ds_api.commit.assert_called_once_with(
+        request={
+            "project_id": PROJECT,
+            "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL,
+            "mutations": mock.ANY,
+            "transaction": None,
+        },
+        retry=retry,
+        timeout=timeout,
+    )
+
+    mutations = ds_api.commit.call_args[1]["request"]["mutations"]
+    mutated_entity = _mutated_pb(mutations, "insert")
+    assert mutated_entity.key == key.to_protobuf()
+
+    prop_list = list(mutated_entity.properties.items())
+    assert len(prop_list) == 1
+    name, value_pb = prop_list[0]
+    assert name == "foo"
+    assert value_pb.string_value == u"bar"
+
+
+def test_client_put_multi_existing_batch_w_completed_key():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    entity = _Entity(foo=u"bar")
+    key = entity.key = _Key()
+
+    with _NoCommitBatch(client) as CURR_BATCH:
+        result = client.put_multi([entity])
+
+    assert result is None
+    mutated_entity = _mutated_pb(CURR_BATCH.mutations, "upsert")
+    assert mutated_entity.key == key.to_protobuf()
+
+    prop_list = list(mutated_entity.properties.items())
+    assert len(prop_list) == 1
+    name, value_pb = prop_list[0]
+    assert name == "foo"
+    assert value_pb.string_value == u"bar"
+
+
+def test_client_delete():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    delete_multi = client.delete_multi = mock.Mock()
+    key = mock.Mock()
+
+    client.delete(key)
+
+    delete_multi.assert_called_once_with(keys=[key], retry=None, timeout=None)
+
+
+def test_client_delete_w_retry_w_timeout():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    delete_multi = client.delete_multi = mock.Mock()
+    key = mock.Mock()
+    retry = mock.Mock()
+    timeout = 100000
+
+    client.delete(key, retry=retry, timeout=timeout)
+
+    delete_multi.assert_called_once_with(keys=[key], retry=retry, timeout=timeout)
+
+
+def test_client_delete_multi_no_keys():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    client._datastore_api_internal = _make_datastore_api()
+
+    result = client.delete_multi([])
+    assert result is None
+    client._datastore_api_internal.commit.assert_not_called()
+
+
+def test_client_delete_multi_no_batch_w_retry_w_timeout():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    key = _Key()
+    retry = mock.Mock()
+    timeout = 100000
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    ds_api = _make_datastore_api()
+    client._datastore_api_internal = ds_api
+
+    result = client.delete_multi([key], retry=retry, timeout=timeout)
+    assert result is None
+
+    ds_api.commit.assert_called_once_with(
+        request={
+            "project_id": PROJECT,
+            "mode": datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL,
+            "mutations": mock.ANY,
+            "transaction": None,
+        },
+        retry=retry,
+        timeout=timeout,
+    )
+
+    mutations = ds_api.commit.call_args[1]["request"]["mutations"]
+    mutated_key = _mutated_pb(mutations, "delete")
+    assert mutated_key == key.to_protobuf()
+
+
+def test_client_delete_multi_w_existing_batch():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    client._datastore_api_internal = _make_datastore_api()
+
+    key = _Key()
+
+    with _NoCommitBatch(client) as CURR_BATCH:
+        result = client.delete_multi([key])
+
+    assert result is None
+    mutated_key = _mutated_pb(CURR_BATCH.mutations, "delete")
+    assert mutated_key == key._key
+    client._datastore_api_internal.commit.assert_not_called()
+
+
+def test_client_delete_multi_w_existing_transaction():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    client._datastore_api_internal = _make_datastore_api()
+
+    key = _Key()
+
+    with _NoCommitTransaction(client) as CURR_XACT:
+        result = client.delete_multi([key])
+
+    assert result is None
+    mutated_key = _mutated_pb(CURR_XACT.mutations, "delete")
+    assert mutated_key == key._key
+    client._datastore_api_internal.commit.assert_not_called()
+
+
+def test_client_delete_multi_w_existing_transaction_entity():
+    from google.cloud.datastore.entity import Entity
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    client._datastore_api_internal = _make_datastore_api()
+
+    key = _Key()
+    entity = Entity(key=key)
+
+    with _NoCommitTransaction(client) as CURR_XACT:
+        result = client.delete_multi([entity])
+
+    assert result is None
+    mutated_key = _mutated_pb(CURR_XACT.mutations, "delete")
+    assert mutated_key == key._key
+    client._datastore_api_internal.commit.assert_not_called()
+
+
+def test_client_allocate_ids_w_completed_key():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+
+    complete_key = _Key()
+    with pytest.raises(ValueError):
+        client.allocate_ids(complete_key, 2)
+
+
+def test_client_allocate_ids_w_partial_key():
+    num_ids = 2
+
+    incomplete_key = _Key(_Key.kind, None)
 
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"])
+    alloc_ids = mock.Mock(return_value=allocated, spec=[])
+    ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"])
+    client._datastore_api_internal = ds_api
+
+    result = client.allocate_ids(incomplete_key, num_ids)
+
+    # Check the IDs returned.
+    assert [key.id for key in result] == list(range(num_ids))
+
+    expected_keys = [incomplete_key.to_protobuf()] * num_ids
+    alloc_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys}
+    )
+
+
+def test_client_allocate_ids_w_partial_key_w_retry_w_timeout():
+    num_ids = 2
+
+    incomplete_key = _Key(_Key.kind, None)
+    retry = mock.Mock()
+    timeout = 100000
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    allocated = mock.Mock(keys=[_KeyPB(i) for i in range(num_ids)], spec=["keys"])
+    alloc_ids = mock.Mock(return_value=allocated, spec=[])
+    ds_api = mock.Mock(allocate_ids=alloc_ids, spec=["allocate_ids"])
+    client._datastore_api_internal = ds_api
+
+    result = client.allocate_ids(incomplete_key, num_ids, retry=retry, timeout=timeout)
+
+    # Check the IDs returned.
+    assert [key.id for key in result] == list(range(num_ids))
+
+    expected_keys = [incomplete_key.to_protobuf()] * num_ids
+    alloc_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys},
+        retry=retry,
+        timeout=timeout,
+    )
+
+
+def test_client_reserve_ids_sequential_w_completed_key():
+    num_ids = 2
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    complete_key = _Key()
+    reserve_ids = mock.Mock()
+    ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
+    client._datastore_api_internal = ds_api
+    assert not complete_key.is_partial
+
+    client.reserve_ids_sequential(complete_key, num_ids)
+
+    reserved_keys = (
+        _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids)
+    )
+    expected_keys = [key.to_protobuf() for key in reserved_keys]
+    reserve_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys}
+    )
+
+
+def test_client_reserve_ids_sequential_w_completed_key_w_retry_w_timeout():
+    num_ids = 2
+    retry = mock.Mock()
+    timeout = 100000
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    complete_key = _Key()
+    assert not complete_key.is_partial
+    reserve_ids = mock.Mock()
+    ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
+    client._datastore_api_internal = ds_api
+
+    client.reserve_ids_sequential(complete_key, num_ids, retry=retry, timeout=timeout)
+
+    reserved_keys = (
+        _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids)
+    )
+    expected_keys = [key.to_protobuf() for key in reserved_keys]
+    reserve_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys},
+        retry=retry,
+        timeout=timeout,
+    )
+
+
+def test_client_reserve_ids_sequential_w_completed_key_w_ancestor():
+    num_ids = 2
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234)
+    reserve_ids = mock.Mock()
+    ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
+    client._datastore_api_internal = ds_api
+    assert not complete_key.is_partial
+
+    client.reserve_ids_sequential(complete_key, num_ids)
+
+    reserved_keys = (
+        _Key("PARENT", "SINGLETON", _Key.kind, id)
+        for id in range(complete_key.id, complete_key.id + num_ids)
+    )
+    expected_keys = [key.to_protobuf() for key in reserved_keys]
+    reserve_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys}
+    )
+
+
+def test_client_reserve_ids_sequential_w_partial_key():
+    num_ids = 2
+    incomplete_key = _Key(_Key.kind, None)
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
+        client.reserve_ids_sequential(incomplete_key, num_ids)
+
+
+def test_client_reserve_ids_sequential_w_wrong_num_ids():
+    num_ids = "2"
+    complete_key = _Key()
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
         client.reserve_ids_sequential(complete_key, num_ids)
 
-        reserved_keys = (
-            _Key("PARENT", "SINGLETON", _Key.kind, id)
-            for id in range(complete_key.id, complete_key.id + num_ids)
-        )
-        expected_keys = [key.to_protobuf() for key in reserved_keys]
-        reserve_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys}
-        )
 
-    def test_reserve_ids_sequential_w_partial_key(self):
-        num_ids = 2
-        incomplete_key = _Key(_Key.kind, None)
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        with self.assertRaises(ValueError):
-            client.reserve_ids_sequential(incomplete_key, num_ids)
-
-    def test_reserve_ids_sequential_w_wrong_num_ids(self):
-        num_ids = "2"
-        complete_key = _Key()
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        with self.assertRaises(ValueError):
-            client.reserve_ids_sequential(complete_key, num_ids)
-
-    def test_reserve_ids_sequential_w_non_numeric_key_name(self):
-        num_ids = 2
-        complete_key = _Key(_Key.kind, "batman")
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        with self.assertRaises(ValueError):
-            client.reserve_ids_sequential(complete_key, num_ids)
-
-    def test_reserve_ids_w_completed_key(self):
-        import warnings
-
-        num_ids = 2
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        complete_key = _Key()
-        reserve_ids = mock.Mock()
-        ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
-        client._datastore_api_internal = ds_api
-        self.assertTrue(not complete_key.is_partial)
+def test_client_reserve_ids_sequential_w_non_numeric_key_name():
+    num_ids = 2
+    complete_key = _Key(_Key.kind, "batman")
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
+        client.reserve_ids_sequential(complete_key, num_ids)
+
 
+def _assert_reserve_ids_warning(warned):
+    assert len(warned) == 1
+    assert "Client.reserve_ids is deprecated." in str(warned[0].message)
+
+
+def test_client_reserve_ids_w_partial_key():
+    import warnings
+
+    num_ids = 2
+    incomplete_key = _Key(_Key.kind, None)
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
+        with warnings.catch_warnings(record=True) as warned:
+            client.reserve_ids(incomplete_key, num_ids)
+
+    _assert_reserve_ids_warning(warned)
+
+
+def test_client_reserve_ids_w_wrong_num_ids():
+    import warnings
+
+    num_ids = "2"
+    complete_key = _Key()
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
+        with warnings.catch_warnings(record=True) as warned:
+            client.reserve_ids(complete_key, num_ids)
+
+    _assert_reserve_ids_warning(warned)
+
+
+def test_client_reserve_ids_w_non_numeric_key_name():
+    import warnings
+
+    num_ids = 2
+    complete_key = _Key(_Key.kind, "batman")
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
         with warnings.catch_warnings(record=True) as warned:
             client.reserve_ids(complete_key, num_ids)
 
-        reserved_keys = (
-            _Key(_Key.kind, id)
-            for id in range(complete_key.id, complete_key.id + num_ids)
+    _assert_reserve_ids_warning(warned)
+
+
+def test_client_reserve_ids_w_completed_key():
+    import warnings
+
+    num_ids = 2
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    complete_key = _Key()
+    reserve_ids = mock.Mock()
+    ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
+    client._datastore_api_internal = ds_api
+    assert not complete_key.is_partial
+
+    with warnings.catch_warnings(record=True) as warned:
+        client.reserve_ids(complete_key, num_ids)
+
+    reserved_keys = (
+        _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids)
+    )
+    expected_keys = [key.to_protobuf() for key in reserved_keys]
+    reserve_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys}
+    )
+    _assert_reserve_ids_warning(warned)
+
+
+def test_client_reserve_ids_w_completed_key_w_retry_w_timeout():
+    import warnings
+
+    num_ids = 2
+    retry = mock.Mock()
+    timeout = 100000
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    complete_key = _Key()
+    assert not complete_key.is_partial
+    reserve_ids = mock.Mock()
+    ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
+    client._datastore_api_internal = ds_api
+
+    with warnings.catch_warnings(record=True) as warned:
+        client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout)
+
+    reserved_keys = (
+        _Key(_Key.kind, id) for id in range(complete_key.id, complete_key.id + num_ids)
+    )
+    expected_keys = [key.to_protobuf() for key in reserved_keys]
+    reserve_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys},
+        retry=retry,
+        timeout=timeout,
+    )
+    _assert_reserve_ids_warning(warned)
+
+
+def test_client_reserve_ids_w_completed_key_w_ancestor():
+    import warnings
+
+    num_ids = 2
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234)
+    reserve_ids = mock.Mock()
+    ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
+    client._datastore_api_internal = ds_api
+    assert not complete_key.is_partial
+
+    with warnings.catch_warnings(record=True) as warned:
+        client.reserve_ids(complete_key, num_ids)
+
+    reserved_keys = (
+        _Key("PARENT", "SINGLETON", _Key.kind, id)
+        for id in range(complete_key.id, complete_key.id + num_ids)
+    )
+    expected_keys = [key.to_protobuf() for key in reserved_keys]
+    reserve_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys}
+    )
+
+    _assert_reserve_ids_warning(warned)
+
+
+def test_client_key_w_project():
+    KIND = "KIND"
+    ID = 1234
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+
+    with pytest.raises(TypeError):
+        client.key(KIND, ID, project=PROJECT)
+
+
+def test_client_key_wo_project():
+    kind = "KIND"
+    id_ = 1234
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"])
+    with patch as mock_klass:
+        key = client.key(kind, id_)
+        assert key is mock_klass.return_value
+        mock_klass.assert_called_once_with(kind, id_, project=PROJECT, namespace=None)
+
+
+def test_client_key_w_namespace():
+    kind = "KIND"
+    id_ = 1234
+    namespace = object()
+
+    creds = _make_credentials()
+    client = _make_client(namespace=namespace, credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"])
+    with patch as mock_klass:
+        key = client.key(kind, id_)
+        assert key is mock_klass.return_value
+        mock_klass.assert_called_once_with(
+            kind, id_, project=PROJECT, namespace=namespace
         )
-        expected_keys = [key.to_protobuf() for key in reserved_keys]
-        reserve_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys}
+
+
+def test_client_key_w_namespace_collision():
+    kind = "KIND"
+    id_ = 1234
+    namespace1 = object()
+    namespace2 = object()
+
+    creds = _make_credentials()
+    client = _make_client(namespace=namespace1, credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"])
+    with patch as mock_klass:
+        key = client.key(kind, id_, namespace=namespace2)
+        assert key is mock_klass.return_value
+        mock_klass.assert_called_once_with(
+            kind, id_, project=PROJECT, namespace=namespace2
         )
 
-        self.assertEqual(len(warned), 1)
-        self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message))
 
-    def test_reserve_ids_w_completed_key_w_retry_w_timeout(self):
-        import warnings
+def test_client_entity_w_defaults():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
 
-        num_ids = 2
-        retry = mock.Mock()
-        timeout = 100000
+    patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"])
+    with patch as mock_klass:
+        entity = client.entity()
+        assert entity is mock_klass.return_value
+        mock_klass.assert_called_once_with(key=None, exclude_from_indexes=())
 
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        complete_key = _Key()
-        self.assertTrue(not complete_key.is_partial)
-        reserve_ids = mock.Mock()
-        ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
-        client._datastore_api_internal = ds_api
 
-        with warnings.catch_warnings(record=True) as warned:
-            client.reserve_ids(complete_key, num_ids, retry=retry, timeout=timeout)
+def test_client_entity_w_explicit():
+    key = mock.Mock(spec=[])
+    exclude_from_indexes = ["foo", "bar"]
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
 
-        reserved_keys = (
-            _Key(_Key.kind, id)
-            for id in range(complete_key.id, complete_key.id + num_ids)
-        )
-        expected_keys = [key.to_protobuf() for key in reserved_keys]
-        reserve_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys},
-            retry=retry,
-            timeout=timeout,
+    patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"])
+    with patch as mock_klass:
+        entity = client.entity(key, exclude_from_indexes)
+        assert entity is mock_klass.return_value
+        mock_klass.assert_called_once_with(
+            key=key, exclude_from_indexes=exclude_from_indexes
         )
 
-        self.assertEqual(len(warned), 1)
-        self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message))
 
-    def test_reserve_ids_w_completed_key_w_ancestor(self):
-        import warnings
+def test_client_batch():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
 
-        num_ids = 2
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        complete_key = _Key("PARENT", "SINGLETON", _Key.kind, 1234)
-        reserve_ids = mock.Mock()
-        ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
-        client._datastore_api_internal = ds_api
-        self.assertTrue(not complete_key.is_partial)
+    patch = mock.patch("google.cloud.datastore.client.Batch", spec=["__call__"])
+    with patch as mock_klass:
+        batch = client.batch()
+        assert batch is mock_klass.return_value
+        mock_klass.assert_called_once_with(client)
 
-        with warnings.catch_warnings(record=True) as warned:
-            client.reserve_ids(complete_key, num_ids)
 
-        reserved_keys = (
-            _Key("PARENT", "SINGLETON", _Key.kind, id)
-            for id in range(complete_key.id, complete_key.id + num_ids)
+def test_client_transaction_w_defaults():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Transaction", spec=["__call__"])
+    with patch as mock_klass:
+        xact = client.transaction()
+        assert xact is mock_klass.return_value
+        mock_klass.assert_called_once_with(client)
+
+
+def test_client_transaction_w_read_only():
+    from google.cloud.datastore_v1.types import TransactionOptions
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    xact = client.transaction(read_only=True)
+    options = TransactionOptions(read_only=TransactionOptions.ReadOnly())
+    assert xact._options == options
+    assert not xact._options._pb.HasField("read_write")
+    assert xact._options._pb.HasField("read_only")
+    assert xact._options._pb.read_only == TransactionOptions.ReadOnly()._pb
+
+
+def test_client_query_w_other_client():
+    KIND = "KIND"
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    other = _make_client(credentials=_make_credentials())
+
+    with pytest.raises(TypeError):
+        client.query(kind=KIND, client=other)
+
+
+def test_client_query_w_project():
+    KIND = "KIND"
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+
+    with pytest.raises(TypeError):
+        client.query(kind=KIND, project=PROJECT)
+
+
+def test_client_query_w_defaults():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
+    with patch as mock_klass:
+        query = client.query()
+        assert query is mock_klass.return_value
+        mock_klass.assert_called_once_with(client, project=PROJECT, namespace=None)
+
+
+def test_client_query_w_explicit():
+    kind = "KIND"
+    namespace = "NAMESPACE"
+    ancestor = object()
+    filters = [("PROPERTY", "==", "VALUE")]
+    projection = ["__key__"]
+    order = ["PROPERTY"]
+    distinct_on = ["DISTINCT_ON"]
+
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
+    with patch as mock_klass:
+        query = client.query(
+            kind=kind,
+            namespace=namespace,
+            ancestor=ancestor,
+            filters=filters,
+            projection=projection,
+            order=order,
+            distinct_on=distinct_on,
         )
-        expected_keys = [key.to_protobuf() for key in reserved_keys]
-        reserve_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys}
+        assert query is mock_klass.return_value
+        mock_klass.assert_called_once_with(
+            client,
+            project=PROJECT,
+            kind=kind,
+            namespace=namespace,
+            ancestor=ancestor,
+            filters=filters,
+            projection=projection,
+            order=order,
+            distinct_on=distinct_on,
         )
 
-        self.assertEqual(len(warned), 1)
-        self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message))
-
-    def test_reserve_ids_w_partial_key(self):
-        import warnings
-
-        num_ids = 2
-        incomplete_key = _Key(_Key.kind, None)
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        with self.assertRaises(ValueError):
-            with warnings.catch_warnings(record=True) as warned:
-                client.reserve_ids(incomplete_key, num_ids)
-
-        self.assertEqual(len(warned), 1)
-        self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message))
-
-    def test_reserve_ids_w_wrong_num_ids(self):
-        import warnings
-
-        num_ids = "2"
-        complete_key = _Key()
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        with self.assertRaises(ValueError):
-            with warnings.catch_warnings(record=True) as warned:
-                client.reserve_ids(complete_key, num_ids)
-
-        self.assertEqual(len(warned), 1)
-        self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message))
-
-    def test_reserve_ids_w_non_numeric_key_name(self):
-        import warnings
-
-        num_ids = 2
-        complete_key = _Key(_Key.kind, "batman")
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        with self.assertRaises(ValueError):
-            with warnings.catch_warnings(record=True) as warned:
-                client.reserve_ids(complete_key, num_ids)
-
-        self.assertEqual(len(warned), 1)
-        self.assertIn("Client.reserve_ids is deprecated.", str(warned[0].message))
-
-    def test_reserve_ids_multi(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds, _use_grpc=False)
-        key1 = _Key(_Key.kind, "one")
-        key2 = _Key(_Key.kind, "two")
-        reserve_ids = mock.Mock()
-        ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
-        client._datastore_api_internal = ds_api
-
-        client.reserve_ids_multi([key1, key2])
-
-        expected_keys = [key1.to_protobuf(), key2.to_protobuf()]
-        reserve_ids.assert_called_once_with(
-            request={"project_id": self.PROJECT, "keys": expected_keys}
-        )
 
-    def test_reserve_ids_multi_w_partial_key(self):
-        incomplete_key = _Key(_Key.kind, None)
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        with self.assertRaises(ValueError):
-            client.reserve_ids_multi([incomplete_key])
-
-    def test_key_w_project(self):
-        KIND = "KIND"
-        ID = 1234
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        self.assertRaises(TypeError, client.key, KIND, ID, project=self.PROJECT)
-
-    def test_key_wo_project(self):
-        kind = "KIND"
-        id_ = 1234
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"])
-        with patch as mock_klass:
-            key = client.key(kind, id_)
-            self.assertIs(key, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                kind, id_, project=self.PROJECT, namespace=None
-            )
-
-    def test_key_w_namespace(self):
-        kind = "KIND"
-        id_ = 1234
-        namespace = object()
-
-        creds = _make_credentials()
-        client = self._make_one(namespace=namespace, credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"])
-        with patch as mock_klass:
-            key = client.key(kind, id_)
-            self.assertIs(key, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                kind, id_, project=self.PROJECT, namespace=namespace
-            )
-
-    def test_key_w_namespace_collision(self):
-        kind = "KIND"
-        id_ = 1234
-        namespace1 = object()
-        namespace2 = object()
-
-        creds = _make_credentials()
-        client = self._make_one(namespace=namespace1, credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Key", spec=["__call__"])
-        with patch as mock_klass:
-            key = client.key(kind, id_, namespace=namespace2)
-            self.assertIs(key, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                kind, id_, project=self.PROJECT, namespace=namespace2
-            )
-
-    def test_entity_w_defaults(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"])
-        with patch as mock_klass:
-            entity = client.entity()
-            self.assertIs(entity, mock_klass.return_value)
-            mock_klass.assert_called_once_with(key=None, exclude_from_indexes=())
-
-    def test_entity_w_explicit(self):
-        key = mock.Mock(spec=[])
-        exclude_from_indexes = ["foo", "bar"]
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Entity", spec=["__call__"])
-        with patch as mock_klass:
-            entity = client.entity(key, exclude_from_indexes)
-            self.assertIs(entity, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                key=key, exclude_from_indexes=exclude_from_indexes
-            )
-
-    def test_batch(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Batch", spec=["__call__"])
-        with patch as mock_klass:
-            batch = client.batch()
-            self.assertIs(batch, mock_klass.return_value)
-            mock_klass.assert_called_once_with(client)
-
-    def test_transaction_defaults(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        patch = mock.patch(
-            "google.cloud.datastore.client.Transaction", spec=["__call__"]
+def test_client_query_w_namespace():
+    kind = "KIND"
+    namespace = object()
+
+    creds = _make_credentials()
+    client = _make_client(namespace=namespace, credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
+    with patch as mock_klass:
+        query = client.query(kind=kind)
+        assert query is mock_klass.return_value
+        mock_klass.assert_called_once_with(
+            client, project=PROJECT, namespace=namespace, kind=kind
         )
-        with patch as mock_klass:
-            xact = client.transaction()
-            self.assertIs(xact, mock_klass.return_value)
-            mock_klass.assert_called_once_with(client)
-
-    def test_read_only_transaction_defaults(self):
-        from google.cloud.datastore_v1.types import TransactionOptions
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        xact = client.transaction(read_only=True)
-        self.assertEqual(
-            xact._options, TransactionOptions(read_only=TransactionOptions.ReadOnly())
+
+
+def test_client_query_w_namespace_collision():
+    kind = "KIND"
+    namespace1 = object()
+    namespace2 = object()
+
+    creds = _make_credentials()
+    client = _make_client(namespace=namespace1, credentials=creds)
+
+    patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
+    with patch as mock_klass:
+        query = client.query(kind=kind, namespace=namespace2)
+        assert query is mock_klass.return_value
+        mock_klass.assert_called_once_with(
+            client, project=PROJECT, namespace=namespace2, kind=kind
         )
-        self.assertFalse(xact._options._pb.HasField("read_write"))
-        self.assertTrue(xact._options._pb.HasField("read_only"))
-        self.assertEqual(xact._options._pb.read_only, TransactionOptions.ReadOnly()._pb)
-
-    def test_query_w_client(self):
-        KIND = "KIND"
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-        other = self._make_one(credentials=_make_credentials())
-
-        self.assertRaises(TypeError, client.query, kind=KIND, client=other)
-
-    def test_query_w_project(self):
-        KIND = "KIND"
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        self.assertRaises(TypeError, client.query, kind=KIND, project=self.PROJECT)
-
-    def test_query_w_defaults(self):
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
-        with patch as mock_klass:
-            query = client.query()
-            self.assertIs(query, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                client, project=self.PROJECT, namespace=None
-            )
-
-    def test_query_explicit(self):
-        kind = "KIND"
-        namespace = "NAMESPACE"
-        ancestor = object()
-        filters = [("PROPERTY", "==", "VALUE")]
-        projection = ["__key__"]
-        order = ["PROPERTY"]
-        distinct_on = ["DISTINCT_ON"]
-
-        creds = _make_credentials()
-        client = self._make_one(credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
-        with patch as mock_klass:
-            query = client.query(
-                kind=kind,
-                namespace=namespace,
-                ancestor=ancestor,
-                filters=filters,
-                projection=projection,
-                order=order,
-                distinct_on=distinct_on,
-            )
-            self.assertIs(query, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                client,
-                project=self.PROJECT,
-                kind=kind,
-                namespace=namespace,
-                ancestor=ancestor,
-                filters=filters,
-                projection=projection,
-                order=order,
-                distinct_on=distinct_on,
-            )
-
-    def test_query_w_namespace(self):
-        kind = "KIND"
-        namespace = object()
-
-        creds = _make_credentials()
-        client = self._make_one(namespace=namespace, credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
-        with patch as mock_klass:
-            query = client.query(kind=kind)
-            self.assertIs(query, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                client, project=self.PROJECT, namespace=namespace, kind=kind
-            )
-
-    def test_query_w_namespace_collision(self):
-        kind = "KIND"
-        namespace1 = object()
-        namespace2 = object()
-
-        creds = _make_credentials()
-        client = self._make_one(namespace=namespace1, credentials=creds)
-
-        patch = mock.patch("google.cloud.datastore.client.Query", spec=["__call__"])
-        with patch as mock_klass:
-            query = client.query(kind=kind, namespace=namespace2)
-            self.assertIs(query, mock_klass.return_value)
-            mock_klass.assert_called_once_with(
-                client, project=self.PROJECT, namespace=namespace2, kind=kind
-            )
+
+
+def test_client_reserve_ids_multi_w_partial_key():
+    incomplete_key = _Key(_Key.kind, None)
+    creds = _make_credentials()
+    client = _make_client(credentials=creds)
+    with pytest.raises(ValueError):
+        client.reserve_ids_multi([incomplete_key])
+
+
+def test_client_reserve_ids_multi():
+    creds = _make_credentials()
+    client = _make_client(credentials=creds, _use_grpc=False)
+    key1 = _Key(_Key.kind, "one")
+    key2 = _Key(_Key.kind, "two")
+    reserve_ids = mock.Mock()
+    ds_api = mock.Mock(reserve_ids=reserve_ids, spec=["reserve_ids"])
+    client._datastore_api_internal = ds_api
+
+    client.reserve_ids_multi([key1, key2])
+
+    expected_keys = [key1.to_protobuf(), key2.to_protobuf()]
+    reserve_ids.assert_called_once_with(
+        request={"project_id": PROJECT, "keys": expected_keys}
+    )
 
 
 class _NoCommitBatch(object):
@@ -1535,7 +1529,7 @@ class _Key(object):
     kind = "KIND"
     id = 1234
     name = None
-    _project = project = "PROJECT"
+    _project = project = PROJECT
     _namespace = None
 
     _key = "KEY"
@@ -1603,18 +1597,13 @@ def __init__(self, id_):
         self.path = [_PathElementPB(id_)]
 
 
-def _assert_num_mutations(test_case, mutation_pb_list, num_mutations):
-    test_case.assertEqual(len(mutation_pb_list), num_mutations)
-
-
-def _mutated_pb(test_case, mutation_pb_list, mutation_type):
-    # Make sure there is only one mutation.
-    _assert_num_mutations(test_case, mutation_pb_list, 1)
+def _mutated_pb(mutation_pb_list, mutation_type):
+    assert len(mutation_pb_list) == 1
 
     # We grab the only mutation.
     mutated_pb = mutation_pb_list[0]
     # Then check if it is the correct type.
-    test_case.assertEqual(mutated_pb._pb.WhichOneof("operation"), mutation_type)
+    assert mutated_pb._pb.WhichOneof("operation") == mutation_type
 
     return getattr(mutated_pb, mutation_type)
 
@@ -1657,3 +1646,25 @@ def _make_datastore_api(*keys, **kwargs):
     return mock.Mock(
         commit=commit_method, lookup=lookup_method, spec=["commit", "lookup"]
     )
+
+
+def _make_credentials():
+    import google.auth.credentials
+
+    return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def _make_entity_pb(project, kind, integer_id, name=None, str_val=None):
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
+
+    entity_pb = entity_pb2.Entity()
+    entity_pb.key.partition_id.project_id = project
+    path_element = entity_pb._pb.key.path.add()
+    path_element.kind = kind
+    path_element.id = integer_id
+    if name is not None and str_val is not None:
+        value_pb = _new_value_pb(entity_pb, name)
+        value_pb.string_value = str_val
+
+    return entity_pb
diff --git a/tests/unit/test_entity.py b/tests/unit/test_entity.py
index c65541a4..faa862e4 100644
--- a/tests/unit/test_entity.py
+++ b/tests/unit/test_entity.py
@@ -12,214 +12,222 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
+import pytest
 
 _PROJECT = "PROJECT"
 _KIND = "KIND"
 _ID = 1234
 
 
-class TestEntity(unittest.TestCase):
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.entity import Entity
-
-        return Entity
-
-    def _make_one(self, key=None, exclude_from_indexes=()):
-        klass = self._get_target_class()
-        return klass(key=key, exclude_from_indexes=exclude_from_indexes)
-
-    def test_ctor_defaults(self):
-        klass = self._get_target_class()
-        entity = klass()
-        self.assertIsNone(entity.key)
-        self.assertIsNone(entity.kind)
-        self.assertEqual(sorted(entity.exclude_from_indexes), [])
-
-    def test_ctor_explicit(self):
-        _EXCLUDE_FROM_INDEXES = ["foo", "bar"]
-        key = _Key()
-        entity = self._make_one(key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES)
-        self.assertEqual(
-            sorted(entity.exclude_from_indexes), sorted(_EXCLUDE_FROM_INDEXES)
-        )
-
-    def test_ctor_bad_exclude_from_indexes(self):
-        BAD_EXCLUDE_FROM_INDEXES = object()
-        key = _Key()
-        self.assertRaises(
-            TypeError,
-            self._make_one,
-            key=key,
-            exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES,
-        )
-
-    def test___eq_____ne___w_non_entity(self):
-        from google.cloud.datastore.key import Key
-
-        key = Key(_KIND, _ID, project=_PROJECT)
-        entity = self._make_one(key=key)
-        self.assertFalse(entity == object())
-        self.assertTrue(entity != object())
-
-    def test___eq_____ne___w_different_keys(self):
-        from google.cloud.datastore.key import Key
-
-        _ID1 = 1234
-        _ID2 = 2345
-        key1 = Key(_KIND, _ID1, project=_PROJECT)
-        entity1 = self._make_one(key=key1)
-        key2 = Key(_KIND, _ID2, project=_PROJECT)
-        entity2 = self._make_one(key=key2)
-        self.assertFalse(entity1 == entity2)
-        self.assertTrue(entity1 != entity2)
-
-    def test___eq_____ne___w_same_keys(self):
-        from google.cloud.datastore.key import Key
-
-        name = "foo"
-        value = 42
-        meaning = 9
-
-        key1 = Key(_KIND, _ID, project=_PROJECT)
-        entity1 = self._make_one(key=key1, exclude_from_indexes=(name,))
-        entity1[name] = value
-        entity1._meanings[name] = (meaning, value)
-
-        key2 = Key(_KIND, _ID, project=_PROJECT)
-        entity2 = self._make_one(key=key2, exclude_from_indexes=(name,))
-        entity2[name] = value
-        entity2._meanings[name] = (meaning, value)
-
-        self.assertTrue(entity1 == entity2)
-        self.assertFalse(entity1 != entity2)
-
-    def test___eq_____ne___w_same_keys_different_props(self):
-        from google.cloud.datastore.key import Key
-
-        key1 = Key(_KIND, _ID, project=_PROJECT)
-        entity1 = self._make_one(key=key1)
-        entity1["foo"] = "Foo"
-        key2 = Key(_KIND, _ID, project=_PROJECT)
-        entity2 = self._make_one(key=key2)
-        entity1["bar"] = "Bar"
-        self.assertFalse(entity1 == entity2)
-        self.assertTrue(entity1 != entity2)
-
-    def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self):
-        from google.cloud.datastore.key import Key
-
-        key1 = Key(_KIND, _ID, project=_PROJECT)
-        key2 = Key(_KIND, _ID, project=_PROJECT)
-        entity1 = self._make_one(key=key1)
-        entity1["some_key"] = key1
-        entity2 = self._make_one(key=key1)
-        entity2["some_key"] = key2
-        self.assertTrue(entity1 == entity2)
-        self.assertFalse(entity1 != entity2)
-
-    def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self):
-        from google.cloud.datastore.key import Key
-
-        _ID1 = 1234
-        _ID2 = 2345
-        key1 = Key(_KIND, _ID1, project=_PROJECT)
-        key2 = Key(_KIND, _ID2, project=_PROJECT)
-        entity1 = self._make_one(key=key1)
-        entity1["some_key"] = key1
-        entity2 = self._make_one(key=key1)
-        entity2["some_key"] = key2
-        self.assertFalse(entity1 == entity2)
-        self.assertTrue(entity1 != entity2)
-
-    def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self):
-        from google.cloud.datastore.key import Key
-
-        key = Key(_KIND, _ID, project=_PROJECT)
-        entity1 = self._make_one(key=key)
-        sub1 = self._make_one()
-        sub1.update({"foo": "Foo"})
-        entity1["some_entity"] = sub1
-        entity2 = self._make_one(key=key)
-        sub2 = self._make_one()
-        sub2.update({"foo": "Foo"})
-        entity2["some_entity"] = sub2
-        self.assertTrue(entity1 == entity2)
-        self.assertFalse(entity1 != entity2)
-
-    def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self):
-        from google.cloud.datastore.key import Key
-
-        key = Key(_KIND, _ID, project=_PROJECT)
-        entity1 = self._make_one(key=key)
-        sub1 = self._make_one()
-        sub1.update({"foo": "Foo"})
-        entity1["some_entity"] = sub1
-        entity2 = self._make_one(key=key)
-        sub2 = self._make_one()
-        sub2.update({"foo": "Bar"})
-        entity2["some_entity"] = sub2
-        self.assertFalse(entity1 == entity2)
-        self.assertTrue(entity1 != entity2)
-
-    def test__eq__same_value_different_exclude(self):
-        from google.cloud.datastore.key import Key
-
-        name = "foo"
-        value = 42
-        key = Key(_KIND, _ID, project=_PROJECT)
-
-        entity1 = self._make_one(key=key, exclude_from_indexes=(name,))
-        entity1[name] = value
-
-        entity2 = self._make_one(key=key, exclude_from_indexes=())
-        entity2[name] = value
-
-        self.assertFalse(entity1 == entity2)
-
-    def test__eq__same_value_different_meanings(self):
-        from google.cloud.datastore.key import Key
-
-        name = "foo"
-        value = 42
-        meaning = 9
-        key = Key(_KIND, _ID, project=_PROJECT)
-
-        entity1 = self._make_one(key=key, exclude_from_indexes=(name,))
-        entity1[name] = value
-
-        entity2 = self._make_one(key=key, exclude_from_indexes=(name,))
-        entity2[name] = value
-        entity2._meanings[name] = (meaning, value)
-
-        self.assertFalse(entity1 == entity2)
-
-    def test_id(self):
-        from google.cloud.datastore.key import Key
-
-        key = Key(_KIND, _ID, project=_PROJECT)
-        entity = self._make_one(key=key)
-        self.assertEqual(entity.id, _ID)
-
-    def test_id_none(self):
-
-        entity = self._make_one(key=None)
-        self.assertEqual(entity.id, None)
-
-    def test___repr___no_key_empty(self):
-        entity = self._make_one()
-        self.assertEqual(repr(entity), "<Entity {}>")
-
-    def test___repr___w_key_non_empty(self):
-        key = _Key()
-        flat_path = ("bar", 12, "baz", "himom")
-        key._flat_path = flat_path
-        entity = self._make_one(key=key)
-        entity_vals = {"foo": "Foo"}
-        entity.update(entity_vals)
-        expected = "<Entity%s %s>" % (flat_path, entity_vals)
-        self.assertEqual(repr(entity), expected)
+def _make_entity(key=None, exclude_from_indexes=()):
+    from google.cloud.datastore.entity import Entity
+
+    return Entity(key=key, exclude_from_indexes=exclude_from_indexes)
+
+
+def test_entity_ctor_defaults():
+    from google.cloud.datastore.entity import Entity
+
+    entity = Entity()
+    assert entity.key is None
+    assert entity.kind is None
+    assert sorted(entity.exclude_from_indexes) == []
+
+
+def test_entity_ctor_explicit():
+    _EXCLUDE_FROM_INDEXES = ["foo", "bar"]
+    key = _Key()
+    entity = _make_entity(key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES)
+    assert sorted(entity.exclude_from_indexes) == sorted(_EXCLUDE_FROM_INDEXES)
+
+
+def test_entity_ctor_bad_exclude_from_indexes():
+    BAD_EXCLUDE_FROM_INDEXES = object()
+    key = _Key()
+    with pytest.raises(TypeError):
+        _make_entity(key=key, exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES)
+
+
+def test_entity___eq_____ne___w_non_entity():
+    from google.cloud.datastore.key import Key
+
+    key = Key(_KIND, _ID, project=_PROJECT)
+    entity = _make_entity(key=key)
+    assert not entity == object()
+    assert entity != object()
+
+
+def test_entity___eq_____ne___w_different_keys():
+    from google.cloud.datastore.key import Key
+
+    _ID1 = 1234
+    _ID2 = 2345
+    key1 = Key(_KIND, _ID1, project=_PROJECT)
+    entity1 = _make_entity(key=key1)
+    key2 = Key(_KIND, _ID2, project=_PROJECT)
+    entity2 = _make_entity(key=key2)
+    assert not entity1 == entity2
+    assert entity1 != entity2
+
+
+def test_entity___eq_____ne___w_same_keys():
+    from google.cloud.datastore.key import Key
+
+    name = "foo"
+    value = 42
+    meaning = 9
+
+    key1 = Key(_KIND, _ID, project=_PROJECT)
+    entity1 = _make_entity(key=key1, exclude_from_indexes=(name,))
+    entity1[name] = value
+    entity1._meanings[name] = (meaning, value)
+
+    key2 = Key(_KIND, _ID, project=_PROJECT)
+    entity2 = _make_entity(key=key2, exclude_from_indexes=(name,))
+    entity2[name] = value
+    entity2._meanings[name] = (meaning, value)
+
+    assert entity1 == entity2
+    assert not entity1 != entity2
+
+
+def test_entity___eq_____ne___w_same_keys_different_props():
+    from google.cloud.datastore.key import Key
+
+    key1 = Key(_KIND, _ID, project=_PROJECT)
+    entity1 = _make_entity(key=key1)
+    entity1["foo"] = "Foo"
+    key2 = Key(_KIND, _ID, project=_PROJECT)
+    entity2 = _make_entity(key=key2)
+    entity1["bar"] = "Bar"
+    assert not entity1 == entity2
+    assert entity1 != entity2
+
+
+def test_entity___eq_____ne___w_same_keys_props_w_equiv_keys_as_value():
+    from google.cloud.datastore.key import Key
+
+    key1 = Key(_KIND, _ID, project=_PROJECT)
+    key2 = Key(_KIND, _ID, project=_PROJECT)
+    entity1 = _make_entity(key=key1)
+    entity1["some_key"] = key1
+    entity2 = _make_entity(key=key1)
+    entity2["some_key"] = key2
+    assert entity1 == entity2
+    assert not entity1 != entity2
+
+
+def test_entity___eq_____ne___w_same_keys_props_w_diff_keys_as_value():
+    from google.cloud.datastore.key import Key
+
+    _ID1 = 1234
+    _ID2 = 2345
+    key1 = Key(_KIND, _ID1, project=_PROJECT)
+    key2 = Key(_KIND, _ID2, project=_PROJECT)
+    entity1 = _make_entity(key=key1)
+    entity1["some_key"] = key1
+    entity2 = _make_entity(key=key1)
+    entity2["some_key"] = key2
+    assert not entity1 == entity2
+    assert entity1 != entity2
+
+
+def test_entity___eq_____ne___w_same_keys_props_w_equiv_entities_as_value():
+    from google.cloud.datastore.key import Key
+
+    key = Key(_KIND, _ID, project=_PROJECT)
+    entity1 = _make_entity(key=key)
+    sub1 = _make_entity()
+    sub1.update({"foo": "Foo"})
+    entity1["some_entity"] = sub1
+    entity2 = _make_entity(key=key)
+    sub2 = _make_entity()
+    sub2.update({"foo": "Foo"})
+    entity2["some_entity"] = sub2
+    assert entity1 == entity2
+    assert not entity1 != entity2
+
+
+def test_entity___eq_____ne___w_same_keys_props_w_diff_entities_as_value():
+    from google.cloud.datastore.key import Key
+
+    key = Key(_KIND, _ID, project=_PROJECT)
+    entity1 = _make_entity(key=key)
+    sub1 = _make_entity()
+    sub1.update({"foo": "Foo"})
+    entity1["some_entity"] = sub1
+    entity2 = _make_entity(key=key)
+    sub2 = _make_entity()
+    sub2.update({"foo": "Bar"})
+    entity2["some_entity"] = sub2
+    assert not entity1 == entity2
+    assert entity1 != entity2
+
+
+def test__eq__same_value_different_exclude():
+    from google.cloud.datastore.key import Key
+
+    name = "foo"
+    value = 42
+    key = Key(_KIND, _ID, project=_PROJECT)
+
+    entity1 = _make_entity(key=key, exclude_from_indexes=(name,))
+    entity1[name] = value
+
+    entity2 = _make_entity(key=key, exclude_from_indexes=())
+    entity2[name] = value
+
+    assert not entity1 == entity2
+    assert entity1 != entity2
+
+
+def test_entity___eq__same_value_different_meanings():
+    from google.cloud.datastore.key import Key
+
+    name = "foo"
+    value = 42
+    meaning = 9
+    key = Key(_KIND, _ID, project=_PROJECT)
+
+    entity1 = _make_entity(key=key, exclude_from_indexes=(name,))
+    entity1[name] = value
+
+    entity2 = _make_entity(key=key, exclude_from_indexes=(name,))
+    entity2[name] = value
+    entity2._meanings[name] = (meaning, value)
+
+    assert not entity1 == entity2
+    assert entity1 != entity2
+
+
+def test_id():
+    from google.cloud.datastore.key import Key
+
+    key = Key(_KIND, _ID, project=_PROJECT)
+    entity = _make_entity(key=key)
+    assert entity.id == _ID
+
+
+def test_id_none():
+
+    entity = _make_entity(key=None)
+    assert entity.id is None
+
+
+def test___repr___no_key_empty():
+    entity = _make_entity()
+    assert repr(entity) == "<Entity {}>"
+
+
+def test___repr___w_key_non_empty():
+    key = _Key()
+    flat_path = ("bar", 12, "baz", "himom")
+    key._flat_path = flat_path
+    entity = _make_entity(key=key)
+    entity_vals = {"foo": "Foo"}
+    entity.update(entity_vals)
+    expected = "<Entity%s %s>" % (flat_path, entity_vals)
+    assert repr(entity) == expected
 
 
 class _Key(object):
diff --git a/tests/unit/test_helpers.py b/tests/unit/test_helpers.py
index c37499ca..4c1861a2 100644
--- a/tests/unit/test_helpers.py
+++ b/tests/unit/test_helpers.py
@@ -12,1010 +12,1123 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
+import pytest
 
 
-class Test__new_value_pb(unittest.TestCase):
-    def _call_fut(self, entity_pb, name):
-        from google.cloud.datastore.helpers import _new_value_pb
+def test__new_value_pb():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
 
-        return _new_value_pb(entity_pb, name)
+    entity_pb = entity_pb2.Entity()
+    name = "foo"
+    result = _new_value_pb(entity_pb, name)
 
-    def test_it(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
+    assert isinstance(result, type(entity_pb2.Value()._pb))
+    assert len(entity_pb._pb.properties) == 1
+    assert entity_pb._pb.properties[name] == result
 
-        entity_pb = entity_pb2.Entity()
-        name = "foo"
-        result = self._call_fut(entity_pb, name)
 
-        self.assertIsInstance(result, type(entity_pb2.Value()._pb))
-        self.assertEqual(len(entity_pb._pb.properties), 1)
-        self.assertEqual(entity_pb._pb.properties[name], result)
+def test_entity_from_protobuf_w_defaults():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_from_protobuf
 
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID = 1234
+    entity_pb = entity_pb2.Entity()
+    entity_pb.key.partition_id.project_id = _PROJECT
+    entity_pb._pb.key.path.add(kind=_KIND, id=_ID)
 
-class Test_entity_from_protobuf(unittest.TestCase):
-    def _call_fut(self, val):
-        from google.cloud.datastore.helpers import entity_from_protobuf
+    value_pb = _new_value_pb(entity_pb, "foo")
+    value_pb.string_value = "Foo"
 
-        return entity_from_protobuf(val)
+    unindexed_val_pb = _new_value_pb(entity_pb, "bar")
+    unindexed_val_pb.integer_value = 10
+    unindexed_val_pb.exclude_from_indexes = True
 
-    def test_it(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.helpers import _new_value_pb
+    array_val_pb1 = _new_value_pb(entity_pb, "baz")
+    array_pb1 = array_val_pb1.array_value.values
 
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID = 1234
-        entity_pb = entity_pb2.Entity()
-        entity_pb.key.partition_id.project_id = _PROJECT
-        entity_pb._pb.key.path.add(kind=_KIND, id=_ID)
+    unindexed_array_val_pb = array_pb1.add()
+    unindexed_array_val_pb.integer_value = 11
+    unindexed_array_val_pb.exclude_from_indexes = True
 
-        value_pb = _new_value_pb(entity_pb, "foo")
-        value_pb.string_value = "Foo"
+    array_val_pb2 = _new_value_pb(entity_pb, "qux")
+    array_pb2 = array_val_pb2.array_value.values
 
-        unindexed_val_pb = _new_value_pb(entity_pb, "bar")
-        unindexed_val_pb.integer_value = 10
-        unindexed_val_pb.exclude_from_indexes = True
+    indexed_array_val_pb = array_pb2.add()
+    indexed_array_val_pb.integer_value = 12
 
-        array_val_pb1 = _new_value_pb(entity_pb, "baz")
-        array_pb1 = array_val_pb1.array_value.values
+    entity = entity_from_protobuf(entity_pb._pb)
+    assert entity.kind == _KIND
+    assert entity.exclude_from_indexes == frozenset(["bar", "baz"])
+    entity_props = dict(entity)
+    assert entity_props == {"foo": "Foo", "bar": 10, "baz": [11], "qux": [12]}
 
-        unindexed_array_val_pb = array_pb1.add()
-        unindexed_array_val_pb.integer_value = 11
-        unindexed_array_val_pb.exclude_from_indexes = True
+    # Also check the key.
+    key = entity.key
+    assert key.project == _PROJECT
+    assert key.namespace is None
+    assert key.kind == _KIND
+    assert key.id == _ID
 
-        array_val_pb2 = _new_value_pb(entity_pb, "qux")
-        array_pb2 = array_val_pb2.array_value.values
 
-        indexed_array_val_pb = array_pb2.add()
-        indexed_array_val_pb.integer_value = 12
+def test_entity_from_protobuf_w_mismatched_value_indexed():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_from_protobuf
 
-        entity = self._call_fut(entity_pb._pb)
-        self.assertEqual(entity.kind, _KIND)
-        self.assertEqual(entity.exclude_from_indexes, frozenset(["bar", "baz"]))
-        entity_props = dict(entity)
-        self.assertEqual(
-            entity_props, {"foo": "Foo", "bar": 10, "baz": [11], "qux": [12]}
-        )
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID = 1234
+    entity_pb = entity_pb2.Entity()
+    entity_pb.key.partition_id.project_id = _PROJECT
+    entity_pb._pb.key.path.add(kind=_KIND, id=_ID)
 
-        # Also check the key.
-        key = entity.key
-        self.assertEqual(key.project, _PROJECT)
-        self.assertIsNone(key.namespace)
-        self.assertEqual(key.kind, _KIND)
-        self.assertEqual(key.id, _ID)
+    array_val_pb = _new_value_pb(entity_pb, "baz")
+    array_pb = array_val_pb.array_value.values
 
-    def test_mismatched_value_indexed(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.helpers import _new_value_pb
+    unindexed_value_pb1 = array_pb.add()
+    unindexed_value_pb1.integer_value = 10
+    unindexed_value_pb1.exclude_from_indexes = True
 
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID = 1234
-        entity_pb = entity_pb2.Entity()
-        entity_pb.key.partition_id.project_id = _PROJECT
-        entity_pb._pb.key.path.add(kind=_KIND, id=_ID)
+    unindexed_value_pb2 = array_pb.add()
+    unindexed_value_pb2.integer_value = 11
 
-        array_val_pb = _new_value_pb(entity_pb, "baz")
-        array_pb = array_val_pb.array_value.values
+    with pytest.raises(ValueError):
+        entity_from_protobuf(entity_pb._pb)
 
-        unindexed_value_pb1 = array_pb.add()
-        unindexed_value_pb1.integer_value = 10
-        unindexed_value_pb1.exclude_from_indexes = True
 
-        unindexed_value_pb2 = array_pb.add()
-        unindexed_value_pb2.integer_value = 11
+def test_entity_from_protobuf_w_entity_no_key():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import entity_from_protobuf
 
-        with self.assertRaises(ValueError):
-            self._call_fut(entity_pb._pb)
+    entity_pb = entity_pb2.Entity()
+    entity = entity_from_protobuf(entity_pb._pb)
 
-    def test_entity_no_key(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
+    assert entity.key is None
+    assert dict(entity) == {}
 
-        entity_pb = entity_pb2.Entity()
-        entity = self._call_fut(entity_pb._pb)
 
-        self.assertIsNone(entity.key)
-        self.assertEqual(dict(entity), {})
+def test_entity_from_protobuf_w_pb2_entity_no_key():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import entity_from_protobuf
 
-    def test_pb2_entity_no_key(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
+    entity_pb = entity_pb2.Entity()
+    entity = entity_from_protobuf(entity_pb)
 
-        entity_pb = entity_pb2.Entity()
-        entity = self._call_fut(entity_pb)
-
-        self.assertIsNone(entity.key)
-        self.assertEqual(dict(entity), {})
-
-    def test_entity_with_meaning(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.helpers import _new_value_pb
-
-        entity_pb = entity_pb2.Entity()
-        name = "hello"
-        value_pb = _new_value_pb(entity_pb, name)
-        value_pb.meaning = meaning = 9
-        value_pb.string_value = val = u"something"
-
-        entity = self._call_fut(entity_pb)
-        self.assertIsNone(entity.key)
-        self.assertEqual(dict(entity), {name: val})
-        self.assertEqual(entity._meanings, {name: (meaning, val)})
-
-    def test_nested_entity_no_key(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.helpers import _new_value_pb
-
-        PROJECT = "FOO"
-        KIND = "KIND"
-        INSIDE_NAME = "IFOO"
-        OUTSIDE_NAME = "OBAR"
-        INSIDE_VALUE = 1337
-
-        entity_inside = entity_pb2.Entity()
-        inside_val_pb = _new_value_pb(entity_inside, INSIDE_NAME)
-        inside_val_pb.integer_value = INSIDE_VALUE
-
-        entity_pb = entity_pb2.Entity()
-        entity_pb.key.partition_id.project_id = PROJECT
-        element = entity_pb._pb.key.path.add()
-        element.kind = KIND
-
-        outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME)
-        outside_val_pb.entity_value.CopyFrom(entity_inside._pb)
-
-        entity = self._call_fut(entity_pb._pb)
-        self.assertEqual(entity.key.project, PROJECT)
-        self.assertEqual(entity.key.flat_path, (KIND,))
-        self.assertEqual(len(entity), 1)
-
-        inside_entity = entity[OUTSIDE_NAME]
-        self.assertIsNone(inside_entity.key)
-        self.assertEqual(len(inside_entity), 1)
-        self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE)
-
-    def test_index_mismatch_ignores_empty_list(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID = 1234
-
-        array_val_pb = entity_pb2.Value(array_value=entity_pb2.ArrayValue(values=[]))
-
-        entity_pb = entity_pb2.Entity(properties={"baz": array_val_pb})
-        entity_pb.key.partition_id.project_id = _PROJECT
-        entity_pb.key._pb.path.add(kind=_KIND, id=_ID)
-
-        entity = self._call_fut(entity_pb._pb)
-        entity_dict = dict(entity)
-        self.assertEqual(entity_dict["baz"], [])
-
-
-class Test_entity_to_protobuf(unittest.TestCase):
-    def _call_fut(self, entity):
-        from google.cloud.datastore.helpers import entity_to_protobuf
-
-        return entity_to_protobuf(entity)
-
-    def _compare_entity_proto(self, entity_pb1, entity_pb2):
-        self.assertEqual(entity_pb1.key, entity_pb2.key)
-        value_list1 = sorted(entity_pb1.properties.items())
-        value_list2 = sorted(entity_pb2.properties.items())
-        self.assertEqual(len(value_list1), len(value_list2))
-        for pair1, pair2 in zip(value_list1, value_list2):
-            name1, val1 = pair1
-            name2, val2 = pair2
-            self.assertEqual(name1, name2)
-            if val1._pb.HasField("entity_value"):  # Message field (Entity)
-                self.assertEqual(val1.meaning, val2.meaning)
-                self._compare_entity_proto(val1.entity_value, val2.entity_value)
-            else:
-                self.assertEqual(val1, val2)
-
-    def test_empty(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-
-        entity = Entity()
-        entity_pb = self._call_fut(entity)
-        self._compare_entity_proto(entity_pb, entity_pb2.Entity())
-
-    def test_key_only(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-        from google.cloud.datastore.key import Key
-
-        kind, name = "PATH", "NAME"
-        project = "PROJECT"
-        key = Key(kind, name, project=project)
-        entity = Entity(key=key)
-        entity_pb = self._call_fut(entity)
-
-        expected_pb = entity_pb2.Entity()
-        expected_pb.key.partition_id.project_id = project
-        path_elt = expected_pb._pb.key.path.add()
-        path_elt.kind = kind
-        path_elt.name = name
-
-        self._compare_entity_proto(entity_pb, expected_pb)
-
-    def test_simple_fields(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-        from google.cloud.datastore.helpers import _new_value_pb
-
-        entity = Entity()
-        name1 = "foo"
-        entity[name1] = value1 = 42
-        name2 = "bar"
-        entity[name2] = value2 = u"some-string"
-        entity_pb = self._call_fut(entity)
-
-        expected_pb = entity_pb2.Entity()
-        val_pb1 = _new_value_pb(expected_pb, name1)
-        val_pb1.integer_value = value1
-        val_pb2 = _new_value_pb(expected_pb, name2)
-        val_pb2.string_value = value2
-
-        self._compare_entity_proto(entity_pb, expected_pb)
-
-    def test_with_empty_list(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-
-        entity = Entity()
-        entity["foo"] = []
-        entity_pb = self._call_fut(entity)
-
-        expected_pb = entity_pb2.Entity()
-        prop = expected_pb._pb.properties.get_or_create("foo")
-        prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])._pb)
-
-        self._compare_entity_proto(entity_pb, expected_pb)
-
-    def test_inverts_to_protobuf(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.helpers import _new_value_pb
-        from google.cloud.datastore.helpers import entity_from_protobuf
-
-        original_pb = entity_pb2.Entity()
-        # Add a key.
-        original_pb.key.partition_id.project_id = project = "PROJECT"
-        elem1 = original_pb._pb.key.path.add()
-        elem1.kind = "Family"
-        elem1.id = 1234
-        elem2 = original_pb._pb.key.path.add()
-        elem2.kind = "King"
-        elem2.name = "Spades"
-
-        # Add an integer property.
-        val_pb1 = _new_value_pb(original_pb, "foo")
-        val_pb1.integer_value = 1337
-        val_pb1.exclude_from_indexes = True
-        # Add a string property.
-        val_pb2 = _new_value_pb(original_pb, "bar")
-        val_pb2.string_value = u"hello"
-
-        # Add a nested (entity) property.
-        val_pb3 = _new_value_pb(original_pb, "entity-baz")
-        sub_pb = entity_pb2.Entity()
-        sub_val_pb1 = _new_value_pb(sub_pb, "x")
-        sub_val_pb1.double_value = 3.14
-        sub_val_pb2 = _new_value_pb(sub_pb, "y")
-        sub_val_pb2.double_value = 2.718281828
-        val_pb3.meaning = 9
-        val_pb3.entity_value.CopyFrom(sub_pb._pb)
-
-        # Add a list property.
-        val_pb4 = _new_value_pb(original_pb, "list-quux")
-        array_val1 = val_pb4.array_value.values.add()
-        array_val1.exclude_from_indexes = False
-        array_val1.meaning = meaning = 22
-        array_val1.blob_value = b"\xe2\x98\x83"
-        array_val2 = val_pb4.array_value.values.add()
-        array_val2.exclude_from_indexes = False
-        array_val2.meaning = meaning
-        array_val2.blob_value = b"\xe2\x98\x85"
-
-        # Convert to the user-space Entity.
-        entity = entity_from_protobuf(original_pb)
-        # Convert the user-space Entity back to a protobuf.
-        new_pb = self._call_fut(entity)
-
-        # NOTE: entity_to_protobuf() strips the project so we "cheat".
-        new_pb.key.partition_id.project_id = project
-        self._compare_entity_proto(original_pb, new_pb)
-
-    def test_meaning_with_change(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-        from google.cloud.datastore.helpers import _new_value_pb
-
-        entity = Entity()
-        name = "foo"
-        entity[name] = value = 42
-        entity._meanings[name] = (9, 1337)
-        entity_pb = self._call_fut(entity)
-
-        expected_pb = entity_pb2.Entity()
-        value_pb = _new_value_pb(expected_pb, name)
-        value_pb.integer_value = value
-        # NOTE: No meaning is used since the value differs from the
-        #       value stored.
-        self._compare_entity_proto(entity_pb, expected_pb)
-
-    def test_variable_meanings(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-        from google.cloud.datastore.helpers import _new_value_pb
-
-        entity = Entity()
-        name = "quux"
-        entity[name] = values = [1, 20, 300]
-        meaning = 9
-        entity._meanings[name] = ([None, meaning, None], values)
-        entity_pb = self._call_fut(entity)
-
-        # Construct the expected protobuf.
-        expected_pb = entity_pb2.Entity()
-        value_pb = _new_value_pb(expected_pb, name)
-        value0 = value_pb.array_value.values.add()
-        value0.integer_value = values[0]
-        # The only array entry with a meaning is the middle one.
-        value1 = value_pb.array_value.values.add()
-        value1.integer_value = values[1]
-        value1.meaning = meaning
-        value2 = value_pb.array_value.values.add()
-        value2.integer_value = values[2]
-
-        self._compare_entity_proto(entity_pb, expected_pb)
-
-    def test_dict_to_entity(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-
-        entity = Entity()
-        entity["a"] = {"b": u"c"}
-        entity_pb = self._call_fut(entity)
-
-        expected_pb = entity_pb2.Entity(
-            properties={
-                "a": entity_pb2.Value(
-                    entity_value=entity_pb2.Entity(
-                        properties={"b": entity_pb2.Value(string_value="c")}
-                    )
+    assert entity.key is None
+    assert dict(entity) == {}
+
+
+def test_entity_from_protobuf_w_entity_with_meaning():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_from_protobuf
+
+    entity_pb = entity_pb2.Entity()
+    name = "hello"
+    value_pb = _new_value_pb(entity_pb, name)
+    value_pb.meaning = meaning = 9
+    value_pb.string_value = val = u"something"
+
+    entity = entity_from_protobuf(entity_pb)
+    assert entity.key is None
+    assert dict(entity) == {name: val}
+    assert entity._meanings == {name: (meaning, val)}
+
+
+def test_entity_from_protobuf_w_nested_entity_no_key():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_from_protobuf
+
+    PROJECT = "FOO"
+    KIND = "KIND"
+    INSIDE_NAME = "IFOO"
+    OUTSIDE_NAME = "OBAR"
+    INSIDE_VALUE = 1337
+
+    entity_inside = entity_pb2.Entity()
+    inside_val_pb = _new_value_pb(entity_inside, INSIDE_NAME)
+    inside_val_pb.integer_value = INSIDE_VALUE
+
+    entity_pb = entity_pb2.Entity()
+    entity_pb.key.partition_id.project_id = PROJECT
+    element = entity_pb._pb.key.path.add()
+    element.kind = KIND
+
+    outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME)
+    outside_val_pb.entity_value.CopyFrom(entity_inside._pb)
+
+    entity = entity_from_protobuf(entity_pb._pb)
+    assert entity.key.project == PROJECT
+    assert entity.key.flat_path == (KIND,)
+    assert len(entity) == 1
+
+    inside_entity = entity[OUTSIDE_NAME]
+    assert inside_entity.key is None
+    assert len(inside_entity) == 1
+    assert inside_entity[INSIDE_NAME] == INSIDE_VALUE
+
+
+def test_entity_from_protobuf_w_index_mismatch_w_empty_list():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import entity_from_protobuf
+
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID = 1234
+
+    array_val_pb = entity_pb2.Value(array_value=entity_pb2.ArrayValue(values=[]))
+
+    entity_pb = entity_pb2.Entity(properties={"baz": array_val_pb})
+    entity_pb.key.partition_id.project_id = _PROJECT
+    entity_pb.key._pb.path.add(kind=_KIND, id=_ID)
+
+    entity = entity_from_protobuf(entity_pb._pb)
+    entity_dict = dict(entity)
+    assert entity_dict["baz"] == []
+
+
+def _compare_entity_proto(entity_pb1, entity_pb2):
+    assert entity_pb1.key == entity_pb2.key
+    value_list1 = sorted(entity_pb1.properties.items())
+    value_list2 = sorted(entity_pb2.properties.items())
+    assert len(value_list1) == len(value_list2)
+    for pair1, pair2 in zip(value_list1, value_list2):
+        name1, val1 = pair1
+        name2, val2 = pair2
+        assert name1 == name2
+        if val1._pb.HasField("entity_value"):  # Message field (Entity)
+            assert val1.meaning == val2.meaning
+            _compare_entity_proto(val1.entity_value, val2.entity_value)
+        else:
+            assert val1 == val2
+
+
+def test_enity_to_protobf_w_empty():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    entity = Entity()
+    entity_pb = entity_to_protobuf(entity)
+    _compare_entity_proto(entity_pb, entity_pb2.Entity())
+
+
+def test_enity_to_protobf_w_key_only():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import entity_to_protobuf
+    from google.cloud.datastore.key import Key
+
+    kind, name = "PATH", "NAME"
+    project = "PROJECT"
+    key = Key(kind, name, project=project)
+    entity = Entity(key=key)
+    entity_pb = entity_to_protobuf(entity)
+
+    expected_pb = entity_pb2.Entity()
+    expected_pb.key.partition_id.project_id = project
+    path_elt = expected_pb._pb.key.path.add()
+    path_elt.kind = kind
+    path_elt.name = name
+
+    _compare_entity_proto(entity_pb, expected_pb)
+
+
+def test_enity_to_protobf_w_simple_fields():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    entity = Entity()
+    name1 = "foo"
+    entity[name1] = value1 = 42
+    name2 = "bar"
+    entity[name2] = value2 = u"some-string"
+    entity_pb = entity_to_protobuf(entity)
+
+    expected_pb = entity_pb2.Entity()
+    val_pb1 = _new_value_pb(expected_pb, name1)
+    val_pb1.integer_value = value1
+    val_pb2 = _new_value_pb(expected_pb, name2)
+    val_pb2.string_value = value2
+
+    _compare_entity_proto(entity_pb, expected_pb)
+
+
+def test_enity_to_protobf_w_with_empty_list():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    entity = Entity()
+    entity["foo"] = []
+    entity_pb = entity_to_protobuf(entity)
+
+    expected_pb = entity_pb2.Entity()
+    prop = expected_pb._pb.properties.get_or_create("foo")
+    prop.array_value.CopyFrom(entity_pb2.ArrayValue(values=[])._pb)
+
+    _compare_entity_proto(entity_pb, expected_pb)
+
+
+def test_enity_to_protobf_w_inverts_to_protobuf():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_from_protobuf
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    original_pb = entity_pb2.Entity()
+    # Add a key.
+    original_pb.key.partition_id.project_id = project = "PROJECT"
+    elem1 = original_pb._pb.key.path.add()
+    elem1.kind = "Family"
+    elem1.id = 1234
+    elem2 = original_pb._pb.key.path.add()
+    elem2.kind = "King"
+    elem2.name = "Spades"
+
+    # Add an integer property.
+    val_pb1 = _new_value_pb(original_pb, "foo")
+    val_pb1.integer_value = 1337
+    val_pb1.exclude_from_indexes = True
+    # Add a string property.
+    val_pb2 = _new_value_pb(original_pb, "bar")
+    val_pb2.string_value = u"hello"
+
+    # Add a nested (entity) property.
+    val_pb3 = _new_value_pb(original_pb, "entity-baz")
+    sub_pb = entity_pb2.Entity()
+    sub_val_pb1 = _new_value_pb(sub_pb, "x")
+    sub_val_pb1.double_value = 3.14
+    sub_val_pb2 = _new_value_pb(sub_pb, "y")
+    sub_val_pb2.double_value = 2.718281828
+    val_pb3.meaning = 9
+    val_pb3.entity_value.CopyFrom(sub_pb._pb)
+
+    # Add a list property.
+    val_pb4 = _new_value_pb(original_pb, "list-quux")
+    array_val1 = val_pb4.array_value.values.add()
+    array_val1.exclude_from_indexes = False
+    array_val1.meaning = meaning = 22
+    array_val1.blob_value = b"\xe2\x98\x83"
+    array_val2 = val_pb4.array_value.values.add()
+    array_val2.exclude_from_indexes = False
+    array_val2.meaning = meaning
+    array_val2.blob_value = b"\xe2\x98\x85"
+
+    # Convert to the user-space Entity.
+    entity = entity_from_protobuf(original_pb)
+    # Convert the user-space Entity back to a protobuf.
+    new_pb = entity_to_protobuf(entity)
+
+    # NOTE: entity_to_protobuf() strips the project so we "cheat".
+    new_pb.key.partition_id.project_id = project
+    _compare_entity_proto(original_pb, new_pb)
+
+
+def test_enity_to_protobf_w_meaning_with_change():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    entity = Entity()
+    name = "foo"
+    entity[name] = value = 42
+    entity._meanings[name] = (9, 1337)
+    entity_pb = entity_to_protobuf(entity)
+
+    expected_pb = entity_pb2.Entity()
+    value_pb = _new_value_pb(expected_pb, name)
+    value_pb.integer_value = value
+    # NOTE: No meaning is used since the value differs from the
+    #       value stored.
+    _compare_entity_proto(entity_pb, expected_pb)
+
+
+def test_enity_to_protobf_w_variable_meanings():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    entity = Entity()
+    name = "quux"
+    entity[name] = values = [1, 20, 300]
+    meaning = 9
+    entity._meanings[name] = ([None, meaning, None], values)
+    entity_pb = entity_to_protobuf(entity)
+
+    # Construct the expected protobuf.
+    expected_pb = entity_pb2.Entity()
+    value_pb = _new_value_pb(expected_pb, name)
+    value0 = value_pb.array_value.values.add()
+    value0.integer_value = values[0]
+    # The only array entry with a meaning is the middle one.
+    value1 = value_pb.array_value.values.add()
+    value1.integer_value = values[1]
+    value1.meaning = meaning
+    value2 = value_pb.array_value.values.add()
+    value2.integer_value = values[2]
+
+    _compare_entity_proto(entity_pb, expected_pb)
+
+
+def test_enity_to_protobf_w_dict_to_entity():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    entity = Entity()
+    entity["a"] = {"b": u"c"}
+    entity_pb = entity_to_protobuf(entity)
+
+    expected_pb = entity_pb2.Entity(
+        properties={
+            "a": entity_pb2.Value(
+                entity_value=entity_pb2.Entity(
+                    properties={"b": entity_pb2.Value(string_value="c")}
+                )
+            )
+        }
+    )
+    assert entity_pb == expected_pb
+
+
+def test_enity_to_protobf_w_dict_to_entity_recursive():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import entity_to_protobuf
+
+    entity = Entity()
+    entity["a"] = {"b": {"c": {"d": 1.25}, "e": True}, "f": 10}
+    entity_pb = entity_to_protobuf(entity)
+
+    b_entity_pb = entity_pb2.Entity(
+        properties={
+            "c": entity_pb2.Value(
+                entity_value=entity_pb2.Entity(
+                    properties={"d": entity_pb2.Value(double_value=1.25)}
                 )
-            }
-        )
-        self.assertEqual(entity_pb, expected_pb)
-
-    def test_dict_to_entity_recursive(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-
-        entity = Entity()
-        entity["a"] = {"b": {"c": {"d": 1.25}, "e": True}, "f": 10}
-        entity_pb = self._call_fut(entity)
-
-        b_entity_pb = entity_pb2.Entity(
-            properties={
-                "c": entity_pb2.Value(
-                    entity_value=entity_pb2.Entity(
-                        properties={"d": entity_pb2.Value(double_value=1.25)}
-                    )
-                ),
-                "e": entity_pb2.Value(boolean_value=True),
-            }
-        )
-        expected_pb = entity_pb2.Entity(
-            properties={
-                "a": entity_pb2.Value(
-                    entity_value=entity_pb2.Entity(
-                        properties={
-                            "b": entity_pb2.Value(entity_value=b_entity_pb),
-                            "f": entity_pb2.Value(integer_value=10),
-                        }
-                    )
+            ),
+            "e": entity_pb2.Value(boolean_value=True),
+        }
+    )
+    expected_pb = entity_pb2.Entity(
+        properties={
+            "a": entity_pb2.Value(
+                entity_value=entity_pb2.Entity(
+                    properties={
+                        "b": entity_pb2.Value(entity_value=b_entity_pb),
+                        "f": entity_pb2.Value(integer_value=10),
+                    }
                 )
-            }
-        )
-        self.assertEqual(entity_pb, expected_pb)
-
-
-class Test_key_from_protobuf(unittest.TestCase):
-    def _call_fut(self, val):
-        from google.cloud.datastore.helpers import key_from_protobuf
-
-        return key_from_protobuf(val)
-
-    def _makePB(self, project=None, namespace=None, path=()):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        pb = entity_pb2.Key()
-        if project is not None:
-            pb.partition_id.project_id = project
-        if namespace is not None:
-            pb.partition_id.namespace_id = namespace
-        for elem in path:
-            added = pb._pb.path.add()
-            added.kind = elem["kind"]
-            if "id" in elem:
-                added.id = elem["id"]
-            if "name" in elem:
-                added.name = elem["name"]
-        return pb
-
-    def test_wo_namespace_in_pb(self):
-        _PROJECT = "PROJECT"
-        pb = self._makePB(path=[{"kind": "KIND"}], project=_PROJECT)
-        key = self._call_fut(pb)
-        self.assertEqual(key.project, _PROJECT)
-        self.assertIsNone(key.namespace)
-
-    def test_w_namespace_in_pb(self):
-        _PROJECT = "PROJECT"
-        _NAMESPACE = "NAMESPACE"
-        pb = self._makePB(
-            path=[{"kind": "KIND"}], namespace=_NAMESPACE, project=_PROJECT
-        )
-        key = self._call_fut(pb)
-        self.assertEqual(key.project, _PROJECT)
-        self.assertEqual(key.namespace, _NAMESPACE)
-
-    def test_w_nested_path_in_pb(self):
-        _PATH = [
-            {"kind": "PARENT", "name": "NAME"},
-            {"kind": "CHILD", "id": 1234},
-            {"kind": "GRANDCHILD", "id": 5678},
-        ]
-        pb = self._makePB(path=_PATH, project="PROJECT")
-        key = self._call_fut(pb)
-        self.assertEqual(key.path, _PATH)
-
-    def test_w_nothing_in_pb(self):
-        pb = self._makePB()
-        self.assertRaises(ValueError, self._call_fut, pb)
-
-
-class Test__get_read_options(unittest.TestCase):
-    def _call_fut(self, eventual, transaction_id):
-        from google.cloud.datastore.helpers import get_read_options
-
-        return get_read_options(eventual, transaction_id)
-
-    def test_eventual_w_transaction(self):
-        with self.assertRaises(ValueError):
-            self._call_fut(True, b"123")
-
-    def test_eventual_wo_transaction(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        read_options = self._call_fut(True, None)
-        expected = datastore_pb2.ReadOptions(
-            read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
-        )
-        self.assertEqual(read_options, expected)
-
-    def test_default_w_transaction(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        txn_id = b"123abc-easy-as"
-        read_options = self._call_fut(False, txn_id)
-        expected = datastore_pb2.ReadOptions(transaction=txn_id)
-        self.assertEqual(read_options, expected)
-
-    def test_default_wo_transaction(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        read_options = self._call_fut(False, None)
-        expected = datastore_pb2.ReadOptions()
-        self.assertEqual(read_options, expected)
-
-
-class Test__pb_attr_value(unittest.TestCase):
-    def _call_fut(self, val):
-        from google.cloud.datastore.helpers import _pb_attr_value
-
-        return _pb_attr_value(val)
-
-    def test_datetime_naive(self):
-        import calendar
-        import datetime
-        from google.cloud._helpers import UTC
-
-        micros = 4375
-        naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros)  # No zone.
-        utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
-        name, value = self._call_fut(naive)
-        self.assertEqual(name, "timestamp_value")
-        self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
-        self.assertEqual(value.nanos, 1000 * micros)
-
-    def test_datetime_w_zone(self):
-        import calendar
-        import datetime
-        from google.cloud._helpers import UTC
-
-        micros = 4375
-        utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
-        name, value = self._call_fut(utc)
-        self.assertEqual(name, "timestamp_value")
-        self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
-        self.assertEqual(value.nanos, 1000 * micros)
-
-    def test_key(self):
-        from google.cloud.datastore.key import Key
-
-        key = Key("PATH", 1234, project="PROJECT")
-        name, value = self._call_fut(key)
-        self.assertEqual(name, "key_value")
-        self.assertEqual(value, key.to_protobuf())
-
-    def test_bool(self):
-        name, value = self._call_fut(False)
-        self.assertEqual(name, "boolean_value")
-        self.assertEqual(value, False)
-
-    def test_float(self):
-        name, value = self._call_fut(3.1415926)
-        self.assertEqual(name, "double_value")
-        self.assertEqual(value, 3.1415926)
-
-    def test_int(self):
-        name, value = self._call_fut(42)
-        self.assertEqual(name, "integer_value")
-        self.assertEqual(value, 42)
-
-    def test_long(self):
-        must_be_long = (1 << 63) - 1
-        name, value = self._call_fut(must_be_long)
-        self.assertEqual(name, "integer_value")
-        self.assertEqual(value, must_be_long)
-
-    def test_native_str(self):
-        name, value = self._call_fut("str")
-
-        self.assertEqual(name, "string_value")
-        self.assertEqual(value, "str")
-
-    def test_bytes(self):
-        name, value = self._call_fut(b"bytes")
-        self.assertEqual(name, "blob_value")
-        self.assertEqual(value, b"bytes")
-
-    def test_unicode(self):
-        name, value = self._call_fut(u"str")
-        self.assertEqual(name, "string_value")
-        self.assertEqual(value, u"str")
-
-    def test_entity(self):
-        from google.cloud.datastore.entity import Entity
-
-        entity = Entity()
-        name, value = self._call_fut(entity)
-        self.assertEqual(name, "entity_value")
-        self.assertIs(value, entity)
-
-    def test_dict(self):
-        from google.cloud.datastore.entity import Entity
-
-        orig_value = {"richard": b"feynman"}
-        name, value = self._call_fut(orig_value)
-        self.assertEqual(name, "entity_value")
-        self.assertIsInstance(value, Entity)
-        self.assertIsNone(value.key)
-        self.assertEqual(value._meanings, {})
-        self.assertEqual(value.exclude_from_indexes, set())
-        self.assertEqual(dict(value), orig_value)
-
-    def test_array(self):
-        values = ["a", 0, 3.14]
-        name, value = self._call_fut(values)
-        self.assertEqual(name, "array_value")
-        self.assertIs(value, values)
-
-    def test_geo_point(self):
-        from google.type import latlng_pb2
-        from google.cloud.datastore.helpers import GeoPoint
-
-        lat = 42.42
-        lng = 99.0007
-        geo_pt = GeoPoint(latitude=lat, longitude=lng)
-        geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
-        name, value = self._call_fut(geo_pt)
-        self.assertEqual(name, "geo_point_value")
-        self.assertEqual(value, geo_pt_pb)
-
-    def test_null(self):
-        from google.protobuf import struct_pb2
-
-        name, value = self._call_fut(None)
-        self.assertEqual(name, "null_value")
-        self.assertEqual(value, struct_pb2.NULL_VALUE)
-
-    def test_object(self):
-        self.assertRaises(ValueError, self._call_fut, object())
-
-
-class Test__get_value_from_value_pb(unittest.TestCase):
-    def _call_fut(self, pb):
-        from google.cloud.datastore.helpers import _get_value_from_value_pb
-
-        return _get_value_from_value_pb(pb)
-
-    def _makePB(self, attr_name, attr_value):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value = entity_pb2.Value()
-        setattr(value._pb, attr_name, attr_value)
-        return value
-
-    def test_datetime(self):
-        import calendar
-        import datetime
-        from google.cloud._helpers import UTC
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        micros = 4375
-        utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
-        value = entity_pb2.Value()
-        value._pb.timestamp_value.seconds = calendar.timegm(utc.timetuple())
-        value._pb.timestamp_value.nanos = 1000 * micros
-        self.assertEqual(self._call_fut(value._pb), utc)
-
-    def test_key(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.key import Key
-
-        value = entity_pb2.Value()
-        expected = Key("KIND", 1234, project="PROJECT").to_protobuf()
-        value.key_value._pb.CopyFrom(expected._pb)
-        found = self._call_fut(value._pb)
-        self.assertEqual(found.to_protobuf(), expected)
-
-    def test_bool(self):
-        value = self._makePB("boolean_value", False)
-        self.assertEqual(self._call_fut(value._pb), False)
-
-    def test_float(self):
-        value = self._makePB("double_value", 3.1415926)
-        self.assertEqual(self._call_fut(value._pb), 3.1415926)
-
-    def test_int(self):
-        value = self._makePB("integer_value", 42)
-        self.assertEqual(self._call_fut(value._pb), 42)
-
-    def test_bytes(self):
-        value = self._makePB("blob_value", b"str")
-        self.assertEqual(self._call_fut(value._pb), b"str")
-
-    def test_unicode(self):
-        value = self._makePB("string_value", u"str")
-        self.assertEqual(self._call_fut(value._pb), u"str")
-
-    def test_entity(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.entity import Entity
-        from google.cloud.datastore.helpers import _new_value_pb
-
-        value = entity_pb2.Value()
-        entity_pb = value.entity_value
-        entity_pb._pb.key.path.add(kind="KIND")
-        entity_pb.key.partition_id.project_id = "PROJECT"
-
-        value_pb = _new_value_pb(entity_pb, "foo")
-        value_pb.string_value = "Foo"
-        entity = self._call_fut(value._pb)
-        self.assertIsInstance(entity, Entity)
-        self.assertEqual(entity["foo"], "Foo")
-
-    def test_array(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value = entity_pb2.Value()
-        array_pb = value.array_value.values
-        item_pb = array_pb._pb.add()
-        item_pb.string_value = "Foo"
-        item_pb = array_pb._pb.add()
-        item_pb.string_value = "Bar"
-        items = self._call_fut(value._pb)
-        self.assertEqual(items, ["Foo", "Bar"])
-
-    def test_geo_point(self):
-        from google.type import latlng_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore.helpers import GeoPoint
-
-        lat = -3.14
-        lng = 13.37
-        geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
-        value = entity_pb2.Value(geo_point_value=geo_pt_pb)
-        result = self._call_fut(value._pb)
-        self.assertIsInstance(result, GeoPoint)
-        self.assertEqual(result.latitude, lat)
-        self.assertEqual(result.longitude, lng)
-
-    def test_null(self):
-        from google.protobuf import struct_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE)
-        result = self._call_fut(value._pb)
-        self.assertIsNone(result)
-
-    def test_unknown(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value = entity_pb2.Value()
-        with self.assertRaises(ValueError):
-            self._call_fut(value._pb)
-
-
-class Test_set_protobuf_value(unittest.TestCase):
-    def _call_fut(self, value_pb, val):
-        from google.cloud.datastore.helpers import _set_protobuf_value
-
-        return _set_protobuf_value(value_pb, val)
-
-    def _makePB(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        return entity_pb2.Value()._pb
-
-    def test_datetime(self):
-        import calendar
-        import datetime
-        from google.cloud._helpers import UTC
-
-        pb = self._makePB()
-        micros = 4375
-        utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
-        self._call_fut(pb, utc)
-        value = pb.timestamp_value
-        self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
-        self.assertEqual(value.nanos, 1000 * micros)
-
-    def test_key(self):
-        from google.cloud.datastore.key import Key
-
-        pb = self._makePB()
-        key = Key("KIND", 1234, project="PROJECT")
-        self._call_fut(pb, key)
-        value = pb.key_value
-        self.assertEqual(value, key.to_protobuf()._pb)
-
-    def test_none(self):
-        pb = self._makePB()
-        self._call_fut(pb, None)
-        self.assertEqual(pb.WhichOneof("value_type"), "null_value")
-
-    def test_bool(self):
-        pb = self._makePB()
-        self._call_fut(pb, False)
-        value = pb.boolean_value
-        self.assertEqual(value, False)
-
-    def test_float(self):
-        pb = self._makePB()
-        self._call_fut(pb, 3.1415926)
-        value = pb.double_value
-        self.assertEqual(value, 3.1415926)
-
-    def test_int(self):
-        pb = self._makePB()
-        self._call_fut(pb, 42)
-        value = pb.integer_value
-        self.assertEqual(value, 42)
-
-    def test_long(self):
-        pb = self._makePB()
-        must_be_long = (1 << 63) - 1
-        self._call_fut(pb, must_be_long)
-        value = pb.integer_value
-        self.assertEqual(value, must_be_long)
-
-    def test_native_str(self):
-        pb = self._makePB()
-        self._call_fut(pb, "str")
-
-        value = pb.string_value
-        self.assertEqual(value, "str")
-
-    def test_bytes(self):
-        pb = self._makePB()
-        self._call_fut(pb, b"str")
-        value = pb.blob_value
-        self.assertEqual(value, b"str")
-
-    def test_unicode(self):
-        pb = self._makePB()
-        self._call_fut(pb, u"str")
-        value = pb.string_value
-        self.assertEqual(value, u"str")
-
-    def test_entity_empty_wo_key(self):
-        from google.cloud.datastore.entity import Entity
-
-        pb = self._makePB()
-        entity = Entity()
-        self._call_fut(pb, entity)
-        value = pb.entity_value
-        self.assertEqual(value.key.SerializeToString(), b"")
-        self.assertEqual(len(list(value.properties.items())), 0)
-
-    def test_entity_w_key(self):
-        from google.cloud.datastore.entity import Entity
-        from google.cloud.datastore.key import Key
-
-        name = "foo"
-        value = u"Foo"
-        pb = self._makePB()
-        key = Key("KIND", 123, project="PROJECT")
-        entity = Entity(key=key)
-        entity[name] = value
-        self._call_fut(pb, entity)
-        entity_pb = pb.entity_value
-        self.assertEqual(entity_pb.key, key.to_protobuf()._pb)
-
-        prop_dict = dict(entity_pb.properties.items())
-        self.assertEqual(len(prop_dict), 1)
-        self.assertEqual(list(prop_dict.keys()), [name])
-        self.assertEqual(prop_dict[name].string_value, value)
-
-    def test_array(self):
-        pb = self._makePB()
-        values = [u"a", 0, 3.14]
-        self._call_fut(pb, values)
-        marshalled = pb.array_value.values
-        self.assertEqual(len(marshalled), len(values))
-        self.assertEqual(marshalled[0].string_value, values[0])
-        self.assertEqual(marshalled[1].integer_value, values[1])
-        self.assertEqual(marshalled[2].double_value, values[2])
-
-    def test_geo_point(self):
-        from google.type import latlng_pb2
-        from google.cloud.datastore.helpers import GeoPoint
-
-        pb = self._makePB()
-        lat = 9.11
-        lng = 3.337
-        geo_pt = GeoPoint(latitude=lat, longitude=lng)
-        geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
-        self._call_fut(pb, geo_pt)
-        self.assertEqual(pb.geo_point_value, geo_pt_pb)
-
-
-class Test__get_meaning(unittest.TestCase):
-    def _call_fut(self, *args, **kwargs):
-        from google.cloud.datastore.helpers import _get_meaning
-
-        return _get_meaning(*args, **kwargs)
-
-    def test_no_meaning(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value_pb = entity_pb2.Value()
-        result = self._call_fut(value_pb)
-        self.assertIsNone(result)
-
-    def test_single(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value_pb = entity_pb2.Value()
-        value_pb.meaning = meaning = 22
-        value_pb.string_value = u"hi"
-        result = self._call_fut(value_pb)
-        self.assertEqual(meaning, result)
-
-    def test_empty_array_value(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value_pb = entity_pb2.Value()
-        value_pb._pb.array_value.values.add()
-        value_pb._pb.array_value.values.pop()
-
-        result = self._call_fut(value_pb, is_list=True)
-        self.assertEqual(None, result)
-
-    def test_array_value(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value_pb = entity_pb2.Value()
-        meaning = 9
-        sub_value_pb1 = value_pb._pb.array_value.values.add()
-        sub_value_pb2 = value_pb._pb.array_value.values.add()
-
-        sub_value_pb1.meaning = sub_value_pb2.meaning = meaning
-        sub_value_pb1.string_value = u"hi"
-        sub_value_pb2.string_value = u"bye"
-
-        result = self._call_fut(value_pb, is_list=True)
-        self.assertEqual(meaning, result)
-
-    def test_array_value_multiple_meanings(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value_pb = entity_pb2.Value()
-        meaning1 = 9
-        meaning2 = 10
-        sub_value_pb1 = value_pb._pb.array_value.values.add()
-        sub_value_pb2 = value_pb._pb.array_value.values.add()
-
-        sub_value_pb1.meaning = meaning1
-        sub_value_pb2.meaning = meaning2
-        sub_value_pb1.string_value = u"hi"
-        sub_value_pb2.string_value = u"bye"
-
-        result = self._call_fut(value_pb, is_list=True)
-        self.assertEqual(result, [meaning1, meaning2])
-
-    def test_array_value_meaning_partially_unset(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        value_pb = entity_pb2.Value()
-        meaning1 = 9
-        sub_value_pb1 = value_pb._pb.array_value.values.add()
-        sub_value_pb2 = value_pb._pb.array_value.values.add()
-
-        sub_value_pb1.meaning = meaning1
-        sub_value_pb1.string_value = u"hi"
-        sub_value_pb2.string_value = u"bye"
-
-        result = self._call_fut(value_pb, is_list=True)
-        self.assertEqual(result, [meaning1, None])
-
-
-class TestGeoPoint(unittest.TestCase):
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.helpers import GeoPoint
-
-        return GeoPoint
-
-    def _make_one(self, *args, **kwargs):
-        return self._get_target_class()(*args, **kwargs)
-
-    def test_constructor(self):
-        lat = 81.2
-        lng = 359.9999
-        geo_pt = self._make_one(lat, lng)
-        self.assertEqual(geo_pt.latitude, lat)
-        self.assertEqual(geo_pt.longitude, lng)
-
-    def test_to_protobuf(self):
-        from google.type import latlng_pb2
-
-        lat = 0.0001
-        lng = 20.03
-        geo_pt = self._make_one(lat, lng)
-        result = geo_pt.to_protobuf()
-        geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
-        self.assertEqual(result, geo_pt_pb)
-
-    def test___eq__(self):
-        lat = 0.0001
-        lng = 20.03
-        geo_pt1 = self._make_one(lat, lng)
-        geo_pt2 = self._make_one(lat, lng)
-        self.assertEqual(geo_pt1, geo_pt2)
-
-    def test___eq__type_differ(self):
-        lat = 0.0001
-        lng = 20.03
-        geo_pt1 = self._make_one(lat, lng)
-        geo_pt2 = object()
-        self.assertNotEqual(geo_pt1, geo_pt2)
-
-    def test___ne__same_value(self):
-        lat = 0.0001
-        lng = 20.03
-        geo_pt1 = self._make_one(lat, lng)
-        geo_pt2 = self._make_one(lat, lng)
-        comparison_val = geo_pt1 != geo_pt2
-        self.assertFalse(comparison_val)
-
-    def test___ne__(self):
-        geo_pt1 = self._make_one(0.0, 1.0)
-        geo_pt2 = self._make_one(2.0, 3.0)
-        self.assertNotEqual(geo_pt1, geo_pt2)
+            )
+        }
+    )
+    assert entity_pb == expected_pb
+
+
+def _make_key_pb(project=None, namespace=None, path=()):
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+
+    pb = entity_pb2.Key()
+    if project is not None:
+        pb.partition_id.project_id = project
+    if namespace is not None:
+        pb.partition_id.namespace_id = namespace
+    for elem in path:
+        added = pb._pb.path.add()
+        added.kind = elem["kind"]
+        if "id" in elem:
+            added.id = elem["id"]
+        if "name" in elem:
+            added.name = elem["name"]
+    return pb
+
+
+def test_key_from_protobuf_wo_namespace_in_pb():
+    from google.cloud.datastore.helpers import key_from_protobuf
+
+    _PROJECT = "PROJECT"
+    pb = _make_key_pb(path=[{"kind": "KIND"}], project=_PROJECT)
+    key = key_from_protobuf(pb)
+    assert key.project == _PROJECT
+    assert key.namespace is None
+
+
+def test_key_from_protobuf_w_namespace_in_pb():
+    from google.cloud.datastore.helpers import key_from_protobuf
+
+    _PROJECT = "PROJECT"
+    _NAMESPACE = "NAMESPACE"
+    pb = _make_key_pb(path=[{"kind": "KIND"}], namespace=_NAMESPACE, project=_PROJECT)
+    key = key_from_protobuf(pb)
+    assert key.project == _PROJECT
+    assert key.namespace == _NAMESPACE
+
+
+def test_key_from_protobuf_w_nested_path_in_pb():
+    from google.cloud.datastore.helpers import key_from_protobuf
+
+    _PATH = [
+        {"kind": "PARENT", "name": "NAME"},
+        {"kind": "CHILD", "id": 1234},
+        {"kind": "GRANDCHILD", "id": 5678},
+    ]
+    pb = _make_key_pb(path=_PATH, project="PROJECT")
+    key = key_from_protobuf(pb)
+    assert key.path == _PATH
+
+
+def test_w_nothing_in_pb():
+    from google.cloud.datastore.helpers import key_from_protobuf
+
+    pb = _make_key_pb()
+    with pytest.raises(ValueError):
+        key_from_protobuf(pb)
+
+
+def test__get_read_options_w_eventual_w_txn():
+    from google.cloud.datastore.helpers import get_read_options
+
+    with pytest.raises(ValueError):
+        get_read_options(True, b"123")
+
+
+def test__get_read_options_w_eventual_wo_txn():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.helpers import get_read_options
+
+    read_options = get_read_options(True, None)
+    expected = datastore_pb2.ReadOptions(
+        read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
+    )
+    assert read_options == expected
+
+
+def test__get_read_options_w_default_w_txn():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.helpers import get_read_options
+
+    txn_id = b"123abc-easy-as"
+    read_options = get_read_options(False, txn_id)
+    expected = datastore_pb2.ReadOptions(transaction=txn_id)
+    assert read_options == expected
+
+
+def test__get_read_options_w_default_wo_txn():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore.helpers import get_read_options
+
+    read_options = get_read_options(False, None)
+    expected = datastore_pb2.ReadOptions()
+    assert read_options == expected
+
+
+def test__pb_attr_value_w_datetime_naive():
+    import calendar
+    import datetime
+    from google.cloud._helpers import UTC
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    micros = 4375
+    naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros)  # No zone.
+    utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
+    name, value = _pb_attr_value(naive)
+    assert name == "timestamp_value"
+    assert value.seconds == calendar.timegm(utc.timetuple())
+    assert value.nanos == 1000 * micros
+
+
+def test__pb_attr_value_w_datetime_w_zone():
+    import calendar
+    import datetime
+    from google.cloud._helpers import UTC
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    micros = 4375
+    utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
+    name, value = _pb_attr_value(utc)
+    assert name == "timestamp_value"
+    assert value.seconds == calendar.timegm(utc.timetuple())
+    assert value.nanos == 1000 * micros
+
+
+def test__pb_attr_value_w_key():
+    from google.cloud.datastore.key import Key
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    key = Key("PATH", 1234, project="PROJECT")
+    name, value = _pb_attr_value(key)
+    assert name == "key_value"
+    assert value == key.to_protobuf()
+
+
+def test__pb_attr_value_w_bool():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    name, value = _pb_attr_value(False)
+    assert name == "boolean_value"
+    assert not value
+
+
+def test__pb_attr_value_w_float():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    name, value = _pb_attr_value(3.1415926)
+    assert name == "double_value"
+    assert value == 3.1415926
+
+
+def test__pb_attr_value_w_int():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    name, value = _pb_attr_value(42)
+    assert name == "integer_value"
+    assert value == 42
+
+
+def test__pb_attr_value_w_long():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    must_be_long = (1 << 63) - 1
+    name, value = _pb_attr_value(must_be_long)
+    assert name == "integer_value"
+    assert value == must_be_long
+
+
+def test__pb_attr_value_w_native_str():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    name, value = _pb_attr_value("str")
+
+    assert name == "string_value"
+    assert value == "str"
+
+
+def test__pb_attr_value_w_bytes():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    name, value = _pb_attr_value(b"bytes")
+    assert name == "blob_value"
+    assert value == b"bytes"
+
+
+def test__pb_attr_value_w_unicode():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    name, value = _pb_attr_value(u"str")
+    assert name == "string_value"
+    assert value == u"str"
+
+
+def test__pb_attr_value_w_entity():
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    entity = Entity()
+    name, value = _pb_attr_value(entity)
+    assert name == "entity_value"
+    assert value is entity
+
+
+def test__pb_attr_value_w_dict():
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    orig_value = {"richard": b"feynman"}
+    name, value = _pb_attr_value(orig_value)
+    assert name == "entity_value"
+    assert isinstance(value, Entity)
+    assert value.key is None
+    assert value._meanings == {}
+    assert value.exclude_from_indexes == set()
+    assert dict(value) == orig_value
+
+
+def test__pb_attr_value_w_array():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    values = ["a", 0, 3.14]
+    name, value = _pb_attr_value(values)
+    assert name == "array_value"
+    assert value is values
+
+
+def test__pb_attr_value_w_geo_point():
+    from google.type import latlng_pb2
+    from google.cloud.datastore.helpers import GeoPoint
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    lat = 42.42
+    lng = 99.0007
+    geo_pt = GeoPoint(latitude=lat, longitude=lng)
+    geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+    name, value = _pb_attr_value(geo_pt)
+    assert name == "geo_point_value"
+    assert value == geo_pt_pb
+
+
+def test__pb_attr_value_w_null():
+    from google.protobuf import struct_pb2
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    name, value = _pb_attr_value(None)
+    assert name == "null_value"
+    assert value == struct_pb2.NULL_VALUE
+
+
+def test__pb_attr_value_w_object():
+    from google.cloud.datastore.helpers import _pb_attr_value
+
+    with pytest.raises(ValueError):
+        _pb_attr_value(object())
+
+
+def _make_value_pb(attr_name, attr_value):
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+
+    value = entity_pb2.Value()
+    setattr(value._pb, attr_name, attr_value)
+    return value
+
+
+def test__get_value_from_value_pb_w_datetime():
+    import calendar
+    import datetime
+    from google.cloud._helpers import UTC
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    micros = 4375
+    utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
+    value = entity_pb2.Value()
+    value._pb.timestamp_value.seconds = calendar.timegm(utc.timetuple())
+    value._pb.timestamp_value.nanos = 1000 * micros
+    assert _get_value_from_value_pb(value._pb) == utc
+
+
+def test__get_value_from_value_pb_w_key():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.key import Key
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = entity_pb2.Value()
+    expected = Key("KIND", 1234, project="PROJECT").to_protobuf()
+    value.key_value._pb.CopyFrom(expected._pb)
+    found = _get_value_from_value_pb(value._pb)
+    assert found.to_protobuf() == expected
+
+
+def test__get_value_from_value_pb_w_bool():
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = _make_value_pb("boolean_value", False)
+    assert not _get_value_from_value_pb(value._pb)
+
+
+def test__get_value_from_value_pb_w_float():
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = _make_value_pb("double_value", 3.1415926)
+    assert _get_value_from_value_pb(value._pb) == 3.1415926
+
+
+def test__get_value_from_value_pb_w_int():
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = _make_value_pb("integer_value", 42)
+    assert _get_value_from_value_pb(value._pb) == 42
+
+
+def test__get_value_from_value_pb_w_bytes():
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = _make_value_pb("blob_value", b"str")
+    assert _get_value_from_value_pb(value._pb) == b"str"
+
+
+def test__get_value_from_value_pb_w_unicode():
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = _make_value_pb("string_value", u"str")
+    assert _get_value_from_value_pb(value._pb) == u"str"
+
+
+def test__get_value_from_value_pb_w_entity():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import _new_value_pb
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = entity_pb2.Value()
+    entity_pb = value.entity_value
+    entity_pb._pb.key.path.add(kind="KIND")
+    entity_pb.key.partition_id.project_id = "PROJECT"
+
+    value_pb = _new_value_pb(entity_pb, "foo")
+    value_pb.string_value = "Foo"
+    entity = _get_value_from_value_pb(value._pb)
+    assert isinstance(entity, Entity)
+    assert entity["foo"] == "Foo"
+
+
+def test__get_value_from_value_pb_w_array():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = entity_pb2.Value()
+    array_pb = value.array_value.values
+    item_pb = array_pb._pb.add()
+    item_pb.string_value = "Foo"
+    item_pb = array_pb._pb.add()
+    item_pb.string_value = "Bar"
+    items = _get_value_from_value_pb(value._pb)
+    assert items == ["Foo", "Bar"]
+
+
+def test__get_value_from_value_pb_w_geo_point():
+    from google.type import latlng_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import GeoPoint
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    lat = -3.14
+    lng = 13.37
+    geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+    value = entity_pb2.Value(geo_point_value=geo_pt_pb)
+    result = _get_value_from_value_pb(value._pb)
+    assert isinstance(result, GeoPoint)
+    assert result.latitude == lat
+    assert result.longitude == lng
+
+
+def test__get_value_from_value_pb_w_null():
+    from google.protobuf import struct_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE)
+    result = _get_value_from_value_pb(value._pb)
+    assert result is None
+
+
+def test__get_value_from_value_pb_w_unknown():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_value_from_value_pb
+
+    value = entity_pb2.Value()
+    with pytest.raises(ValueError):
+        _get_value_from_value_pb(value._pb)
+
+
+def _make_empty_value_pb():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+
+    return entity_pb2.Value()._pb
+
+
+def test__set_protobuf_value_w_datetime():
+    import calendar
+    import datetime
+    from google.cloud._helpers import UTC
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    micros = 4375
+    utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
+    _set_protobuf_value(pb, utc)
+    value = pb.timestamp_value
+    assert value.seconds == calendar.timegm(utc.timetuple())
+    assert value.nanos == 1000 * micros
+
+
+def test__set_protobuf_value_w_key():
+    from google.cloud.datastore.key import Key
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    key = Key("KIND", 1234, project="PROJECT")
+    _set_protobuf_value(pb, key)
+    value = pb.key_value
+    assert value == key.to_protobuf()._pb
+
+
+def test__set_protobuf_value_w_none():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    _set_protobuf_value(pb, None)
+    assert pb.WhichOneof("value_type") == "null_value"
+
+
+def test__set_protobuf_value_w_bool():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    _set_protobuf_value(pb, False)
+    value = pb.boolean_value
+    assert not value
+
+
+def test__set_protobuf_value_w_float():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    _set_protobuf_value(pb, 3.1415926)
+    value = pb.double_value
+    assert value == 3.1415926
+
+
+def test__set_protobuf_value_w_int():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    _set_protobuf_value(pb, 42)
+    value = pb.integer_value
+    assert value == 42
+
+
+def test__set_protobuf_value_w_long():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    must_be_long = (1 << 63) - 1
+    _set_protobuf_value(pb, must_be_long)
+    value = pb.integer_value
+    assert value == must_be_long
+
+
+def test__set_protobuf_value_w_native_str():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    _set_protobuf_value(pb, "str")
+
+    value = pb.string_value
+    assert value == "str"
+
+
+def test__set_protobuf_value_w_bytes():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    _set_protobuf_value(pb, b"str")
+    value = pb.blob_value
+    assert value == b"str"
+
+
+def test__set_protobuf_value_w_unicode():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    _set_protobuf_value(pb, u"str")
+    value = pb.string_value
+    assert value == u"str"
+
+
+def test__set_protobuf_value_w_entity_empty_wo_key():
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    entity = Entity()
+    _set_protobuf_value(pb, entity)
+    value = pb.entity_value
+    assert value.key.SerializeToString() == b""
+    assert len(list(value.properties.items())) == 0
+
+
+def test__set_protobuf_value_w_entity_w_key():
+    from google.cloud.datastore.entity import Entity
+    from google.cloud.datastore.key import Key
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    name = "foo"
+    value = u"Foo"
+    pb = _make_empty_value_pb()
+    key = Key("KIND", 123, project="PROJECT")
+    entity = Entity(key=key)
+    entity[name] = value
+    _set_protobuf_value(pb, entity)
+    entity_pb = pb.entity_value
+    assert entity_pb.key == key.to_protobuf()._pb
+
+    prop_dict = dict(entity_pb.properties.items())
+    assert len(prop_dict) == 1
+    assert list(prop_dict.keys()) == [name]
+    assert prop_dict[name].string_value == value
+
+
+def test__set_protobuf_value_w_array():
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    values = [u"a", 0, 3.14]
+    _set_protobuf_value(pb, values)
+    marshalled = pb.array_value.values
+    assert len(marshalled) == len(values)
+    assert marshalled[0].string_value == values[0]
+    assert marshalled[1].integer_value == values[1]
+    assert marshalled[2].double_value == values[2]
+
+
+def test__set_protobuf_value_w_geo_point():
+    from google.type import latlng_pb2
+    from google.cloud.datastore.helpers import GeoPoint
+    from google.cloud.datastore.helpers import _set_protobuf_value
+
+    pb = _make_empty_value_pb()
+    lat = 9.11
+    lng = 3.337
+    geo_pt = GeoPoint(latitude=lat, longitude=lng)
+    geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+    _set_protobuf_value(pb, geo_pt)
+    assert pb.geo_point_value == geo_pt_pb
+
+
+def test__get_meaning_w_no_meaning():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_meaning
+
+    value_pb = entity_pb2.Value()
+    result = _get_meaning(value_pb)
+    assert result is None
+
+
+def test__get_meaning_w_single():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_meaning
+
+    value_pb = entity_pb2.Value()
+    value_pb.meaning = meaning = 22
+    value_pb.string_value = u"hi"
+    result = _get_meaning(value_pb)
+    assert meaning == result
+
+
+def test__get_meaning_w_empty_array_value():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_meaning
+
+    value_pb = entity_pb2.Value()
+    value_pb._pb.array_value.values.add()
+    value_pb._pb.array_value.values.pop()
+
+    result = _get_meaning(value_pb, is_list=True)
+    assert result is None
+
+
+def test__get_meaning_w_array_value():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_meaning
+
+    value_pb = entity_pb2.Value()
+    meaning = 9
+    sub_value_pb1 = value_pb._pb.array_value.values.add()
+    sub_value_pb2 = value_pb._pb.array_value.values.add()
+
+    sub_value_pb1.meaning = sub_value_pb2.meaning = meaning
+    sub_value_pb1.string_value = u"hi"
+    sub_value_pb2.string_value = u"bye"
+
+    result = _get_meaning(value_pb, is_list=True)
+    assert meaning == result
+
+
+def test__get_meaning_w_array_value_multiple_meanings():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_meaning
+
+    value_pb = entity_pb2.Value()
+    meaning1 = 9
+    meaning2 = 10
+    sub_value_pb1 = value_pb._pb.array_value.values.add()
+    sub_value_pb2 = value_pb._pb.array_value.values.add()
+
+    sub_value_pb1.meaning = meaning1
+    sub_value_pb2.meaning = meaning2
+    sub_value_pb1.string_value = u"hi"
+    sub_value_pb2.string_value = u"bye"
+
+    result = _get_meaning(value_pb, is_list=True)
+    assert result == [meaning1, meaning2]
+
+
+def test__get_meaning_w_array_value_meaning_partially_unset():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore.helpers import _get_meaning
+
+    value_pb = entity_pb2.Value()
+    meaning1 = 9
+    sub_value_pb1 = value_pb._pb.array_value.values.add()
+    sub_value_pb2 = value_pb._pb.array_value.values.add()
+
+    sub_value_pb1.meaning = meaning1
+    sub_value_pb1.string_value = u"hi"
+    sub_value_pb2.string_value = u"bye"
+
+    result = _get_meaning(value_pb, is_list=True)
+    assert result == [meaning1, None]
+
+
+def _make_geopoint(*args, **kwargs):
+    from google.cloud.datastore.helpers import GeoPoint
+
+    return GeoPoint(*args, **kwargs)
+
+
+def test_geopoint_ctor():
+    lat = 81.2
+    lng = 359.9999
+    geo_pt = _make_geopoint(lat, lng)
+    assert geo_pt.latitude == lat
+    assert geo_pt.longitude == lng
+
+
+def test_geopoint_to_protobuf():
+    from google.type import latlng_pb2
+
+    lat = 0.0001
+    lng = 20.03
+    geo_pt = _make_geopoint(lat, lng)
+    result = geo_pt.to_protobuf()
+    geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+    assert result == geo_pt_pb
+
+
+def test_geopoint___eq__():
+    lat = 0.0001
+    lng = 20.03
+    geo_pt1 = _make_geopoint(lat, lng)
+    geo_pt2 = _make_geopoint(lat, lng)
+    assert geo_pt1 == geo_pt2
+
+
+def test_geopoint___eq__type_differ():
+    lat = 0.0001
+    lng = 20.03
+    geo_pt1 = _make_geopoint(lat, lng)
+    geo_pt2 = object()
+    assert geo_pt1 != geo_pt2
+
+
+def test_geopoint___ne__same_value():
+    lat = 0.0001
+    lng = 20.03
+    geo_pt1 = _make_geopoint(lat, lng)
+    geo_pt2 = _make_geopoint(lat, lng)
+    assert not geo_pt1 != geo_pt2
+
+
+def test_geopoint___ne__():
+    geo_pt1 = _make_geopoint(0.0, 1.0)
+    geo_pt2 = _make_geopoint(2.0, 3.0)
+    assert geo_pt1 != geo_pt2
diff --git a/tests/unit/test_key.py b/tests/unit/test_key.py
index 9d130fb4..2d2a88e7 100644
--- a/tests/unit/test_key.py
+++ b/tests/unit/test_key.py
@@ -12,735 +12,772 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
-
-class TestKey(unittest.TestCase):
-
-    _DEFAULT_PROJECT = "PROJECT"
-    # NOTE: This comes directly from a running (in the dev appserver)
-    #       App Engine app. Created via:
-    #
-    #           from google.appengine.ext import ndb
-    #           key = ndb.Key(
-    #               'Parent', 59, 'Child', 'Feather',
-    #               namespace='space', app='s~sample-app')
-    #           urlsafe = key.urlsafe()
-    _URLSAFE_EXAMPLE1 = (
-        b"agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ" b"WF0aGVyDKIBBXNwYWNl"
+import pytest
+
+
+_DEFAULT_PROJECT = "PROJECT"
+PROJECT = "my-prahjekt"
+# NOTE: This comes directly from a running (in the dev appserver)
+#       App Engine app. Created via:
+#
+#           from google.appengine.ext import ndb
+#           key = ndb.Key(
+#               'Parent', 59, 'Child', 'Feather',
+#               namespace='space', app='s~sample-app')
+#           urlsafe = key.urlsafe()
+_URLSAFE_EXAMPLE1 = (
+    b"agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ" b"WF0aGVyDKIBBXNwYWNl"
+)
+_URLSAFE_APP1 = "s~sample-app"
+_URLSAFE_NAMESPACE1 = "space"
+_URLSAFE_FLAT_PATH1 = ("Parent", 59, "Child", "Feather")
+_URLSAFE_EXAMPLE2 = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA"
+_URLSAFE_APP2 = "s~fire"
+_URLSAFE_FLAT_PATH2 = ("Kind", "Thing")
+_URLSAFE_EXAMPLE3 = b"ahhzfnNhbXBsZS1hcHAtbm8tbG9jYXRpb25yCgsSBFpvcnAYWAw"
+_URLSAFE_APP3 = "sample-app-no-location"
+_URLSAFE_FLAT_PATH3 = ("Zorp", 88)
+
+
+def _make_key(*args, **kwargs):
+    from google.cloud.datastore.key import Key
+
+    return Key(*args, **kwargs)
+
+
+def test_key_ctor_empty():
+    with pytest.raises(ValueError):
+        _make_key()
+
+
+def test_key_ctor_no_project():
+    with pytest.raises(ValueError):
+        _make_key("KIND")
+
+
+def test_key_ctor_w_explicit_project_empty_path():
+    with pytest.raises(ValueError):
+        _make_key(project=PROJECT)
+
+
+def test_key_ctor_parent():
+    _PARENT_KIND = "KIND1"
+    _PARENT_ID = 1234
+    _PARENT_PROJECT = "PROJECT-ALT"
+    _PARENT_NAMESPACE = "NAMESPACE"
+    _CHILD_KIND = "KIND2"
+    _CHILD_ID = 2345
+    _PATH = [
+        {"kind": _PARENT_KIND, "id": _PARENT_ID},
+        {"kind": _CHILD_KIND, "id": _CHILD_ID},
+    ]
+    parent_key = _make_key(
+        _PARENT_KIND, _PARENT_ID, project=_PARENT_PROJECT, namespace=_PARENT_NAMESPACE,
     )
-    _URLSAFE_APP1 = "s~sample-app"
-    _URLSAFE_NAMESPACE1 = "space"
-    _URLSAFE_FLAT_PATH1 = ("Parent", 59, "Child", "Feather")
-    _URLSAFE_EXAMPLE2 = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA"
-    _URLSAFE_APP2 = "s~fire"
-    _URLSAFE_FLAT_PATH2 = ("Kind", "Thing")
-    _URLSAFE_EXAMPLE3 = b"ahhzfnNhbXBsZS1hcHAtbm8tbG9jYXRpb25yCgsSBFpvcnAYWAw"
-    _URLSAFE_APP3 = "sample-app-no-location"
-    _URLSAFE_FLAT_PATH3 = ("Zorp", 88)
-
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.key import Key
-
-        return Key
-
-    def _make_one(self, *args, **kwargs):
-        return self._get_target_class()(*args, **kwargs)
-
-    def test_ctor_empty(self):
-        self.assertRaises(ValueError, self._make_one)
-
-    def test_ctor_no_project(self):
-        klass = self._get_target_class()
-        self.assertRaises(ValueError, klass, "KIND")
-
-    def test_ctor_w_explicit_project_empty_path(self):
-        _PROJECT = "PROJECT"
-        self.assertRaises(ValueError, self._make_one, project=_PROJECT)
-
-    def test_ctor_parent(self):
-        _PARENT_KIND = "KIND1"
-        _PARENT_ID = 1234
-        _PARENT_PROJECT = "PROJECT-ALT"
-        _PARENT_NAMESPACE = "NAMESPACE"
-        _CHILD_KIND = "KIND2"
-        _CHILD_ID = 2345
-        _PATH = [
-            {"kind": _PARENT_KIND, "id": _PARENT_ID},
-            {"kind": _CHILD_KIND, "id": _CHILD_ID},
-        ]
-        parent_key = self._make_one(
-            _PARENT_KIND,
-            _PARENT_ID,
-            project=_PARENT_PROJECT,
-            namespace=_PARENT_NAMESPACE,
-        )
-        key = self._make_one(_CHILD_KIND, _CHILD_ID, parent=parent_key)
-        self.assertEqual(key.project, parent_key.project)
-        self.assertEqual(key.namespace, parent_key.namespace)
-        self.assertEqual(key.kind, _CHILD_KIND)
-        self.assertEqual(key.path, _PATH)
-        self.assertIs(key.parent, parent_key)
-
-    def test_ctor_partial_parent(self):
-        parent_key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        with self.assertRaises(ValueError):
-            self._make_one("KIND2", 1234, parent=parent_key)
-
-    def test_ctor_parent_bad_type(self):
-        with self.assertRaises(AttributeError):
-            self._make_one(
-                "KIND2", 1234, parent=("KIND1", 1234), project=self._DEFAULT_PROJECT
-            )
-
-    def test_ctor_parent_bad_namespace(self):
-        parent_key = self._make_one(
-            "KIND", 1234, namespace="FOO", project=self._DEFAULT_PROJECT
-        )
-        with self.assertRaises(ValueError):
-            self._make_one(
-                "KIND2",
-                1234,
-                namespace="BAR",
-                parent=parent_key,
-                PROJECT=self._DEFAULT_PROJECT,
-            )
-
-    def test_ctor_parent_bad_project(self):
-        parent_key = self._make_one("KIND", 1234, project="FOO")
-        with self.assertRaises(ValueError):
-            self._make_one("KIND2", 1234, parent=parent_key, project="BAR")
-
-    def test_ctor_parent_empty_path(self):
-        parent_key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT)
-        with self.assertRaises(ValueError):
-            self._make_one(parent=parent_key)
-
-    def test_ctor_explicit(self):
-        _PROJECT = "PROJECT-ALT"
-        _NAMESPACE = "NAMESPACE"
-        _KIND = "KIND"
-        _ID = 1234
-        _PATH = [{"kind": _KIND, "id": _ID}]
-        key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT)
-        self.assertEqual(key.project, _PROJECT)
-        self.assertEqual(key.namespace, _NAMESPACE)
-        self.assertEqual(key.kind, _KIND)
-        self.assertEqual(key.path, _PATH)
-
-    def test_ctor_bad_kind(self):
-        self.assertRaises(
-            ValueError, self._make_one, object(), project=self._DEFAULT_PROJECT
-        )
+    key = _make_key(_CHILD_KIND, _CHILD_ID, parent=parent_key)
+    assert key.project == parent_key.project
+    assert key.namespace == parent_key.namespace
+    assert key.kind == _CHILD_KIND
+    assert key.path == _PATH
+    assert key.parent is parent_key
 
-    def test_ctor_bad_id_or_name(self):
-        self.assertRaises(
-            ValueError, self._make_one, "KIND", object(), project=self._DEFAULT_PROJECT
-        )
-        self.assertRaises(
-            ValueError, self._make_one, "KIND", None, project=self._DEFAULT_PROJECT
-        )
-        self.assertRaises(
-            ValueError,
-            self._make_one,
-            "KIND",
-            10,
-            "KIND2",
-            None,
-            project=self._DEFAULT_PROJECT,
-        )
 
-    def test__clone(self):
-        _PROJECT = "PROJECT-ALT"
-        _NAMESPACE = "NAMESPACE"
-        _KIND = "KIND"
-        _ID = 1234
-        _PATH = [{"kind": _KIND, "id": _ID}]
-        key = self._make_one(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT)
-        clone = key._clone()
-        self.assertEqual(clone.project, _PROJECT)
-        self.assertEqual(clone.namespace, _NAMESPACE)
-        self.assertEqual(clone.kind, _KIND)
-        self.assertEqual(clone.path, _PATH)
-
-    def test__clone_with_parent(self):
-        _PROJECT = "PROJECT-ALT"
-        _NAMESPACE = "NAMESPACE"
-        _KIND1 = "PARENT"
-        _KIND2 = "KIND"
-        _ID1 = 1234
-        _ID2 = 2345
-        _PATH = [{"kind": _KIND1, "id": _ID1}, {"kind": _KIND2, "id": _ID2}]
-
-        parent = self._make_one(_KIND1, _ID1, namespace=_NAMESPACE, project=_PROJECT)
-        key = self._make_one(_KIND2, _ID2, parent=parent)
-        self.assertIs(key.parent, parent)
-        clone = key._clone()
-        self.assertIs(clone.parent, key.parent)
-        self.assertEqual(clone.project, _PROJECT)
-        self.assertEqual(clone.namespace, _NAMESPACE)
-        self.assertEqual(clone.path, _PATH)
-
-    def test___eq_____ne___w_non_key(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _NAME = "one"
-        key = self._make_one(_KIND, _NAME, project=_PROJECT)
-        self.assertFalse(key == object())
-        self.assertTrue(key != object())
-
-    def test___eq_____ne___two_incomplete_keys_same_kind(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        key1 = self._make_one(_KIND, project=_PROJECT)
-        key2 = self._make_one(_KIND, project=_PROJECT)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID = 1234
-        key1 = self._make_one(_KIND, project=_PROJECT)
-        key2 = self._make_one(_KIND, _ID, project=_PROJECT)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID = 1234
-        key1 = self._make_one(_KIND, _ID, project=_PROJECT)
-        key2 = self._make_one(_KIND, project=_PROJECT)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___same_kind_different_ids(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID1 = 1234
-        _ID2 = 2345
-        key1 = self._make_one(_KIND, _ID1, project=_PROJECT)
-        key2 = self._make_one(_KIND, _ID2, project=_PROJECT)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___same_kind_and_id(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID = 1234
-        key1 = self._make_one(_KIND, _ID, project=_PROJECT)
-        key2 = self._make_one(_KIND, _ID, project=_PROJECT)
-        self.assertTrue(key1 == key2)
-        self.assertFalse(key1 != key2)
-
-    def test___eq_____ne___same_kind_and_id_different_project(self):
-        _PROJECT1 = "PROJECT1"
-        _PROJECT2 = "PROJECT2"
-        _KIND = "KIND"
-        _ID = 1234
-        key1 = self._make_one(_KIND, _ID, project=_PROJECT1)
-        key2 = self._make_one(_KIND, _ID, project=_PROJECT2)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___same_kind_and_id_different_namespace(self):
-        _PROJECT = "PROJECT"
-        _NAMESPACE1 = "NAMESPACE1"
-        _NAMESPACE2 = "NAMESPACE2"
-        _KIND = "KIND"
-        _ID = 1234
-        key1 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1)
-        key2 = self._make_one(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___same_kind_different_names(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _NAME1 = "one"
-        _NAME2 = "two"
-        key1 = self._make_one(_KIND, _NAME1, project=_PROJECT)
-        key2 = self._make_one(_KIND, _NAME2, project=_PROJECT)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___same_kind_and_name(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _NAME = "one"
-        key1 = self._make_one(_KIND, _NAME, project=_PROJECT)
-        key2 = self._make_one(_KIND, _NAME, project=_PROJECT)
-        self.assertTrue(key1 == key2)
-        self.assertFalse(key1 != key2)
-
-    def test___eq_____ne___same_kind_and_name_different_project(self):
-        _PROJECT1 = "PROJECT1"
-        _PROJECT2 = "PROJECT2"
-        _KIND = "KIND"
-        _NAME = "one"
-        key1 = self._make_one(_KIND, _NAME, project=_PROJECT1)
-        key2 = self._make_one(_KIND, _NAME, project=_PROJECT2)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___eq_____ne___same_kind_and_name_different_namespace(self):
-        _PROJECT = "PROJECT"
-        _NAMESPACE1 = "NAMESPACE1"
-        _NAMESPACE2 = "NAMESPACE2"
-        _KIND = "KIND"
-        _NAME = "one"
-        key1 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1)
-        key2 = self._make_one(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2)
-        self.assertFalse(key1 == key2)
-        self.assertTrue(key1 != key2)
-
-    def test___hash___incomplete(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        key = self._make_one(_KIND, project=_PROJECT)
-        self.assertNotEqual(hash(key), hash(_KIND) + hash(_PROJECT) + hash(None))
-
-    def test___hash___completed_w_id(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _ID = 1234
-        key = self._make_one(_KIND, _ID, project=_PROJECT)
-        self.assertNotEqual(
-            hash(key), hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None)
-        )
+def test_key_ctor_partial_parent():
+    parent_key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    with pytest.raises(ValueError):
+        _make_key("KIND2", 1234, parent=parent_key)
 
-    def test___hash___completed_w_name(self):
-        _PROJECT = "PROJECT"
-        _KIND = "KIND"
-        _NAME = "NAME"
-        key = self._make_one(_KIND, _NAME, project=_PROJECT)
-        self.assertNotEqual(
-            hash(key), hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None)
-        )
 
-    def test_completed_key_on_partial_w_id(self):
-        key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        _ID = 1234
-        new_key = key.completed_key(_ID)
-        self.assertIsNot(key, new_key)
-        self.assertEqual(new_key.id, _ID)
-        self.assertIsNone(new_key.name)
-
-    def test_completed_key_on_partial_w_name(self):
-        key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        _NAME = "NAME"
-        new_key = key.completed_key(_NAME)
-        self.assertIsNot(key, new_key)
-        self.assertIsNone(new_key.id)
-        self.assertEqual(new_key.name, _NAME)
-
-    def test_completed_key_on_partial_w_invalid(self):
-        key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        self.assertRaises(ValueError, key.completed_key, object())
-
-    def test_completed_key_on_complete(self):
-        key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT)
-        self.assertRaises(ValueError, key.completed_key, 5678)
-
-    def test_to_protobuf_defaults(self):
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-
-        _KIND = "KIND"
-        key = self._make_one(_KIND, project=self._DEFAULT_PROJECT)
-        pb = key.to_protobuf()
-        self.assertIsInstance(pb, entity_pb2.Key)
-
-        # Check partition ID.
-        self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT)
-        # Unset values are False-y.
-        self.assertEqual(pb.partition_id.namespace_id, "")
-
-        # Check the element PB matches the partial key and kind.
-        (elem,) = list(pb.path)
-        self.assertEqual(elem.kind, _KIND)
-        # Unset values are False-y.
-        self.assertEqual(elem.name, "")
-        # Unset values are False-y.
-        self.assertEqual(elem.id, 0)
-
-    def test_to_protobuf_w_explicit_project(self):
-        _PROJECT = "PROJECT-ALT"
-        key = self._make_one("KIND", project=_PROJECT)
-        pb = key.to_protobuf()
-        self.assertEqual(pb.partition_id.project_id, _PROJECT)
-
-    def test_to_protobuf_w_explicit_namespace(self):
-        _NAMESPACE = "NAMESPACE"
-        key = self._make_one(
-            "KIND", namespace=_NAMESPACE, project=self._DEFAULT_PROJECT
-        )
-        pb = key.to_protobuf()
-        self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE)
-
-    def test_to_protobuf_w_explicit_path(self):
-        _PARENT = "PARENT"
-        _CHILD = "CHILD"
-        _ID = 1234
-        _NAME = "NAME"
-        key = self._make_one(_PARENT, _NAME, _CHILD, _ID, project=self._DEFAULT_PROJECT)
-        pb = key.to_protobuf()
-        elems = list(pb.path)
-        self.assertEqual(len(elems), 2)
-        self.assertEqual(elems[0].kind, _PARENT)
-        self.assertEqual(elems[0].name, _NAME)
-        self.assertEqual(elems[1].kind, _CHILD)
-        self.assertEqual(elems[1].id, _ID)
-
-    def test_to_protobuf_w_no_kind(self):
-        key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        # Force the 'kind' to be unset. Maybe `to_protobuf` should fail
-        # on this? The backend certainly will.
-        key._path[-1].pop("kind")
-        pb = key.to_protobuf()
-        # Unset values are False-y.
-        self.assertEqual(pb.path[0].kind, "")
-
-    def test_to_legacy_urlsafe(self):
-        key = self._make_one(
-            *self._URLSAFE_FLAT_PATH1,
-            project=self._URLSAFE_APP1,
-            namespace=self._URLSAFE_NAMESPACE1
-        )
-        # NOTE: ``key.project`` is somewhat "invalid" but that is OK.
-        urlsafe = key.to_legacy_urlsafe()
-        self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE1)
-
-    def test_to_legacy_urlsafe_strip_padding(self):
-        key = self._make_one(*self._URLSAFE_FLAT_PATH2, project=self._URLSAFE_APP2)
-        # NOTE: ``key.project`` is somewhat "invalid" but that is OK.
-        urlsafe = key.to_legacy_urlsafe()
-        self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE2)
-        # Make sure it started with base64 padding.
-        self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0)
-
-    def test_to_legacy_urlsafe_with_location_prefix(self):
-        key = self._make_one(*self._URLSAFE_FLAT_PATH3, project=self._URLSAFE_APP3)
-        urlsafe = key.to_legacy_urlsafe(location_prefix="s~")
-        self.assertEqual(urlsafe, self._URLSAFE_EXAMPLE3)
-
-    def test_from_legacy_urlsafe(self):
-        klass = self._get_target_class()
-        key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE1)
-
-        self.assertEqual("s~" + key.project, self._URLSAFE_APP1)
-        self.assertEqual(key.namespace, self._URLSAFE_NAMESPACE1)
-        self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH1)
-        # Also make sure we didn't accidentally set the parent.
-        self.assertIsNone(key._parent)
-        self.assertIsNotNone(key.parent)
-        self.assertIs(key._parent, key.parent)
-
-    def test_from_legacy_urlsafe_needs_padding(self):
-        klass = self._get_target_class()
-        # Make sure it will have base64 padding added.
-        self.assertNotEqual(len(self._URLSAFE_EXAMPLE2) % 4, 0)
-        key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE2)
-
-        self.assertEqual("s~" + key.project, self._URLSAFE_APP2)
-        self.assertIsNone(key.namespace)
-        self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH2)
-
-    def test_from_legacy_urlsafe_with_location_prefix(self):
-        klass = self._get_target_class()
-        # Make sure it will have base64 padding added.
-        key = klass.from_legacy_urlsafe(self._URLSAFE_EXAMPLE3)
-
-        self.assertEqual(key.project, self._URLSAFE_APP3)
-        self.assertIsNone(key.namespace)
-        self.assertEqual(key.flat_path, self._URLSAFE_FLAT_PATH3)
-
-    def test_is_partial_no_name_or_id(self):
-        key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        self.assertTrue(key.is_partial)
-
-    def test_is_partial_w_id(self):
-        _ID = 1234
-        key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT)
-        self.assertFalse(key.is_partial)
-
-    def test_is_partial_w_name(self):
-        _NAME = "NAME"
-        key = self._make_one("KIND", _NAME, project=self._DEFAULT_PROJECT)
-        self.assertFalse(key.is_partial)
-
-    def test_id_or_name_no_name_or_id(self):
-        key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        self.assertIsNone(key.id_or_name)
-
-    def test_id_or_name_no_name_or_id_child(self):
-        key = self._make_one("KIND1", 1234, "KIND2", project=self._DEFAULT_PROJECT)
-        self.assertIsNone(key.id_or_name)
-
-    def test_id_or_name_w_id_only(self):
-        _ID = 1234
-        key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT)
-        self.assertEqual(key.id_or_name, _ID)
-
-    def test_id_or_name_w_name_only(self):
-        _NAME = "NAME"
-        key = self._make_one("KIND", _NAME, project=self._DEFAULT_PROJECT)
-        self.assertEqual(key.id_or_name, _NAME)
-
-    def test_id_or_name_w_id_zero(self):
-        _ID = 0
-        key = self._make_one("KIND", _ID, project=self._DEFAULT_PROJECT)
-        self.assertEqual(key.id_or_name, _ID)
-
-    def test_parent_default(self):
-        key = self._make_one("KIND", project=self._DEFAULT_PROJECT)
-        self.assertIsNone(key.parent)
-
-    def test_parent_explicit_top_level(self):
-        key = self._make_one("KIND", 1234, project=self._DEFAULT_PROJECT)
-        self.assertIsNone(key.parent)
-
-    def test_parent_explicit_nested(self):
-        _PARENT_KIND = "KIND1"
-        _PARENT_ID = 1234
-        _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}]
-        key = self._make_one(
-            _PARENT_KIND, _PARENT_ID, "KIND2", project=self._DEFAULT_PROJECT
-        )
-        self.assertEqual(key.parent.path, _PARENT_PATH)
-
-    def test_parent_multiple_calls(self):
-        _PARENT_KIND = "KIND1"
-        _PARENT_ID = 1234
-        _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}]
-        key = self._make_one(
-            _PARENT_KIND, _PARENT_ID, "KIND2", project=self._DEFAULT_PROJECT
+def test_key_ctor_parent_bad_type():
+    with pytest.raises(AttributeError):
+        _make_key("KIND2", 1234, parent=("KIND1", 1234), project=_DEFAULT_PROJECT)
+
+
+def test_key_ctor_parent_bad_namespace():
+    parent_key = _make_key("KIND", 1234, namespace="FOO", project=_DEFAULT_PROJECT)
+    with pytest.raises(ValueError):
+        _make_key(
+            "KIND2", 1234, namespace="BAR", parent=parent_key, PROJECT=_DEFAULT_PROJECT,
         )
-        parent = key.parent
-        self.assertEqual(parent.path, _PARENT_PATH)
-        new_parent = key.parent
-        self.assertIs(parent, new_parent)
 
 
-class Test__clean_app(unittest.TestCase):
+def test_key_ctor_parent_bad_project():
+    parent_key = _make_key("KIND", 1234, project="FOO")
+    with pytest.raises(ValueError):
+        _make_key("KIND2", 1234, parent=parent_key, project="BAR")
+
+
+def test_key_ctor_parent_empty_path():
+    parent_key = _make_key("KIND", 1234, project=_DEFAULT_PROJECT)
+    with pytest.raises(ValueError):
+        _make_key(parent=parent_key)
+
+
+def test_key_ctor_explicit():
+    _PROJECT = "PROJECT-ALT"
+    _NAMESPACE = "NAMESPACE"
+    _KIND = "KIND"
+    _ID = 1234
+    _PATH = [{"kind": _KIND, "id": _ID}]
+    key = _make_key(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT)
+    assert key.project == _PROJECT
+    assert key.namespace == _NAMESPACE
+    assert key.kind == _KIND
+    assert key.path == _PATH
+
+
+def test_key_ctor_bad_kind():
+    with pytest.raises(ValueError):
+        _make_key(object(), project=_DEFAULT_PROJECT)
+
+
+def test_key_ctor_bad_id_or_name():
+    with pytest.raises(ValueError):
+        _make_key("KIND", object(), project=_DEFAULT_PROJECT)
+
+    with pytest.raises(ValueError):
+        _make_key("KIND", None, project=_DEFAULT_PROJECT)
+
+    with pytest.raises(ValueError):
+        _make_key("KIND", 10, "KIND2", None, project=_DEFAULT_PROJECT)
+
+
+def test_key__clone():
+    _PROJECT = "PROJECT-ALT"
+    _NAMESPACE = "NAMESPACE"
+    _KIND = "KIND"
+    _ID = 1234
+    _PATH = [{"kind": _KIND, "id": _ID}]
+    key = _make_key(_KIND, _ID, namespace=_NAMESPACE, project=_PROJECT)
+
+    clone = key._clone()
+
+    assert clone.project == _PROJECT
+    assert clone.namespace == _NAMESPACE
+    assert clone.kind == _KIND
+    assert clone.path == _PATH
+
+
+def test_key__clone_with_parent():
+    _PROJECT = "PROJECT-ALT"
+    _NAMESPACE = "NAMESPACE"
+    _KIND1 = "PARENT"
+    _KIND2 = "KIND"
+    _ID1 = 1234
+    _ID2 = 2345
+    _PATH = [{"kind": _KIND1, "id": _ID1}, {"kind": _KIND2, "id": _ID2}]
+
+    parent = _make_key(_KIND1, _ID1, namespace=_NAMESPACE, project=_PROJECT)
+    key = _make_key(_KIND2, _ID2, parent=parent)
+    assert key.parent is parent
+
+    clone = key._clone()
+
+    assert clone.parent is key.parent
+    assert clone.project == _PROJECT
+    assert clone.namespace == _NAMESPACE
+    assert clone.path == _PATH
+
+
+def test_key___eq_____ne___w_non_key():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _NAME = "one"
+    key = _make_key(_KIND, _NAME, project=_PROJECT)
+    assert not key == object()
+    assert key != object()
+
+
+def test_key___eq_____ne___two_incomplete_keys_same_kind():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    key1 = _make_key(_KIND, project=_PROJECT)
+    key2 = _make_key(_KIND, project=_PROJECT)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___incomplete_key_w_complete_key_same_kind():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID = 1234
+    key1 = _make_key(_KIND, project=_PROJECT)
+    key2 = _make_key(_KIND, _ID, project=_PROJECT)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___complete_key_w_incomplete_key_same_kind():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID = 1234
+    key1 = _make_key(_KIND, _ID, project=_PROJECT)
+    key2 = _make_key(_KIND, project=_PROJECT)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___same_kind_different_ids():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID1 = 1234
+    _ID2 = 2345
+    key1 = _make_key(_KIND, _ID1, project=_PROJECT)
+    key2 = _make_key(_KIND, _ID2, project=_PROJECT)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___same_kind_and_id():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID = 1234
+    key1 = _make_key(_KIND, _ID, project=_PROJECT)
+    key2 = _make_key(_KIND, _ID, project=_PROJECT)
+    assert key1 == key2
+    assert not key1 != key2
+
+
+def test_key___eq_____ne___same_kind_and_id_different_project():
+    _PROJECT1 = "PROJECT1"
+    _PROJECT2 = "PROJECT2"
+    _KIND = "KIND"
+    _ID = 1234
+    key1 = _make_key(_KIND, _ID, project=_PROJECT1)
+    key2 = _make_key(_KIND, _ID, project=_PROJECT2)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___same_kind_and_id_different_namespace():
+    _PROJECT = "PROJECT"
+    _NAMESPACE1 = "NAMESPACE1"
+    _NAMESPACE2 = "NAMESPACE2"
+    _KIND = "KIND"
+    _ID = 1234
+    key1 = _make_key(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE1)
+    key2 = _make_key(_KIND, _ID, project=_PROJECT, namespace=_NAMESPACE2)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___same_kind_different_names():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _NAME1 = "one"
+    _NAME2 = "two"
+    key1 = _make_key(_KIND, _NAME1, project=_PROJECT)
+    key2 = _make_key(_KIND, _NAME2, project=_PROJECT)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___same_kind_and_name():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _NAME = "one"
+    key1 = _make_key(_KIND, _NAME, project=_PROJECT)
+    key2 = _make_key(_KIND, _NAME, project=_PROJECT)
+    assert key1 == key2
+    assert not key1 != key2
+
+
+def test_key___eq_____ne___same_kind_and_name_different_project():
+    _PROJECT1 = "PROJECT1"
+    _PROJECT2 = "PROJECT2"
+    _KIND = "KIND"
+    _NAME = "one"
+    key1 = _make_key(_KIND, _NAME, project=_PROJECT1)
+    key2 = _make_key(_KIND, _NAME, project=_PROJECT2)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___eq_____ne___same_kind_and_name_different_namespace():
+    _PROJECT = "PROJECT"
+    _NAMESPACE1 = "NAMESPACE1"
+    _NAMESPACE2 = "NAMESPACE2"
+    _KIND = "KIND"
+    _NAME = "one"
+    key1 = _make_key(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE1)
+    key2 = _make_key(_KIND, _NAME, project=_PROJECT, namespace=_NAMESPACE2)
+    assert not key1 == key2
+    assert key1 != key2
+
+
+def test_key___hash___incomplete():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    key = _make_key(_KIND, project=_PROJECT)
+    assert hash(key) != hash(_KIND) + hash(_PROJECT) + hash(None)
+
+
+def test_key___hash___completed_w_id():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _ID = 1234
+    key = _make_key(_KIND, _ID, project=_PROJECT)
+    assert hash(key) != hash(_KIND) + hash(_ID) + hash(_PROJECT) + hash(None)
+
+
+def test_key___hash___completed_w_name():
+    _PROJECT = "PROJECT"
+    _KIND = "KIND"
+    _NAME = "NAME"
+    key = _make_key(_KIND, _NAME, project=_PROJECT)
+    assert hash(key) != hash(_KIND) + hash(_NAME) + hash(_PROJECT) + hash(None)
+
+
+def test_key_completed_key_on_partial_w_id():
+    key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    _ID = 1234
+    new_key = key.completed_key(_ID)
+    assert key is not new_key
+    assert new_key.id == _ID
+    assert new_key.name is None
+
+
+def test_key_completed_key_on_partial_w_name():
+    key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    _NAME = "NAME"
+    new_key = key.completed_key(_NAME)
+    assert key is not new_key
+    assert new_key.id is None
+    assert new_key.name == _NAME
+
+
+def test_key_completed_key_on_partial_w_invalid():
+    key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    with pytest.raises(ValueError):
+        key.completed_key(object())
+
+
+def test_key_completed_key_on_complete():
+    key = _make_key("KIND", 1234, project=_DEFAULT_PROJECT)
+    with pytest.raises(ValueError):
+        key.completed_key(5678)
+
+
+def test_key_to_protobuf_defaults():
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+
+    _KIND = "KIND"
+    key = _make_key(_KIND, project=_DEFAULT_PROJECT)
+    pb = key.to_protobuf()
+    assert isinstance(pb, entity_pb2.Key)
+
+    # Check partition ID.
+    assert pb.partition_id.project_id == _DEFAULT_PROJECT
+    # Unset values are False-y.
+    assert pb.partition_id.namespace_id == ""
+
+    # Check the element PB matches the partial key and kind.
+    (elem,) = list(pb.path)
+    assert elem.kind == _KIND
+    # Unset values are False-y.
+    assert elem.name == ""
+    # Unset values are False-y.
+    assert elem.id == 0
+
+
+def test_key_to_protobuf_w_explicit_project():
+    _PROJECT = "PROJECT-ALT"
+    key = _make_key("KIND", project=_PROJECT)
+    pb = key.to_protobuf()
+    assert pb.partition_id.project_id == _PROJECT
+
+
+def test_key_to_protobuf_w_explicit_namespace():
+    _NAMESPACE = "NAMESPACE"
+    key = _make_key("KIND", namespace=_NAMESPACE, project=_DEFAULT_PROJECT)
+    pb = key.to_protobuf()
+    assert pb.partition_id.namespace_id == _NAMESPACE
+
+
+def test_key_to_protobuf_w_explicit_path():
+    _PARENT = "PARENT"
+    _CHILD = "CHILD"
+    _ID = 1234
+    _NAME = "NAME"
+    key = _make_key(_PARENT, _NAME, _CHILD, _ID, project=_DEFAULT_PROJECT)
+    pb = key.to_protobuf()
+    elems = list(pb.path)
+    assert len(elems) == 2
+    assert elems[0].kind == _PARENT
+    assert elems[0].name == _NAME
+    assert elems[1].kind == _CHILD
+    assert elems[1].id == _ID
+
+
+def test_key_to_protobuf_w_no_kind():
+    key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    # Force the 'kind' to be unset. Maybe `to_protobuf` should fail
+    # on this? The backend certainly will.
+    key._path[-1].pop("kind")
+    pb = key.to_protobuf()
+    # Unset values are False-y.
+    assert pb.path[0].kind == ""
+
+
+def test_key_to_legacy_urlsafe():
+    key = _make_key(
+        *_URLSAFE_FLAT_PATH1, project=_URLSAFE_APP1, namespace=_URLSAFE_NAMESPACE1
+    )
+    # NOTE: ``key.project`` is somewhat "invalid" but that is OK.
+    urlsafe = key.to_legacy_urlsafe()
+    assert urlsafe == _URLSAFE_EXAMPLE1
+
+
+def test_key_to_legacy_urlsafe_strip_padding():
+    key = _make_key(*_URLSAFE_FLAT_PATH2, project=_URLSAFE_APP2)
+    # NOTE: ``key.project`` is somewhat "invalid" but that is OK.
+    urlsafe = key.to_legacy_urlsafe()
+    assert urlsafe == _URLSAFE_EXAMPLE2
+    # Make sure it started with base64 padding.
+    assert len(_URLSAFE_EXAMPLE2) % 4 != 0
+
+
+def test_key_to_legacy_urlsafe_with_location_prefix():
+    key = _make_key(*_URLSAFE_FLAT_PATH3, project=_URLSAFE_APP3)
+    urlsafe = key.to_legacy_urlsafe(location_prefix="s~")
+    assert urlsafe == _URLSAFE_EXAMPLE3
+
+
+def test_key_from_legacy_urlsafe():
+    from google.cloud.datastore.key import Key
+
+    key = Key.from_legacy_urlsafe(_URLSAFE_EXAMPLE1)
+
+    assert "s~" + key.project == _URLSAFE_APP1
+    assert key.namespace == _URLSAFE_NAMESPACE1
+    assert key.flat_path == _URLSAFE_FLAT_PATH1
+    # Also make sure we didn't accidentally set the parent.
+    assert key._parent is None
+    assert key.parent is not None
+    assert key._parent is key.parent
+
+
+def test_key_from_legacy_urlsafe_needs_padding():
+    from google.cloud.datastore.key import Key
+
+    # Make sure it will have base64 padding added.
+    len(_URLSAFE_EXAMPLE2) % 4 != 0
+    key = Key.from_legacy_urlsafe(_URLSAFE_EXAMPLE2)
+
+    assert "s~" + key.project == _URLSAFE_APP2
+    assert key.namespace is None
+    assert key.flat_path == _URLSAFE_FLAT_PATH2
+
+
+def test_key_from_legacy_urlsafe_with_location_prefix():
+    from google.cloud.datastore.key import Key
+
+    # Make sure it will have base64 padding added.
+    key = Key.from_legacy_urlsafe(_URLSAFE_EXAMPLE3)
+
+    assert key.project == _URLSAFE_APP3
+    assert key.namespace is None
+    assert key.flat_path == _URLSAFE_FLAT_PATH3
+
+
+def test_key_is_partial_no_name_or_id():
+    key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    assert key.is_partial
+
+
+def test_key_is_partial_w_id():
+    _ID = 1234
+    key = _make_key("KIND", _ID, project=_DEFAULT_PROJECT)
+    assert not key.is_partial
+
+
+def test_key_is_partial_w_name():
+    _NAME = "NAME"
+    key = _make_key("KIND", _NAME, project=_DEFAULT_PROJECT)
+    assert not key.is_partial
+
+
+def test_key_id_or_name_no_name_or_id():
+    key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    assert key.id_or_name is None
+
+
+def test_key_id_or_name_no_name_or_id_child():
+    key = _make_key("KIND1", 1234, "KIND2", project=_DEFAULT_PROJECT)
+    assert key.id_or_name is None
+
+
+def test_key_id_or_name_w_id_only():
+    _ID = 1234
+    key = _make_key("KIND", _ID, project=_DEFAULT_PROJECT)
+    assert key.id_or_name == _ID
+
+
+def test_key_id_or_name_w_name_only():
+    _NAME = "NAME"
+    key = _make_key("KIND", _NAME, project=_DEFAULT_PROJECT)
+    assert key.id_or_name == _NAME
+
+
+def test_key_id_or_name_w_id_zero():
+    _ID = 0
+    key = _make_key("KIND", _ID, project=_DEFAULT_PROJECT)
+    assert key.id_or_name == _ID
+
+
+def test_key_parent_default():
+    key = _make_key("KIND", project=_DEFAULT_PROJECT)
+    assert key.parent is None
+
+
+def test_key_parent_explicit_top_level():
+    key = _make_key("KIND", 1234, project=_DEFAULT_PROJECT)
+    assert key.parent is None
+
+
+def test_key_parent_explicit_nested():
+    _PARENT_KIND = "KIND1"
+    _PARENT_ID = 1234
+    _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}]
+    key = _make_key(_PARENT_KIND, _PARENT_ID, "KIND2", project=_DEFAULT_PROJECT)
+    assert key.parent.path == _PARENT_PATH
+
+
+def test_key_parent_multiple_calls():
+    _PARENT_KIND = "KIND1"
+    _PARENT_ID = 1234
+    _PARENT_PATH = [{"kind": _PARENT_KIND, "id": _PARENT_ID}]
+    key = _make_key(_PARENT_KIND, _PARENT_ID, "KIND2", project=_DEFAULT_PROJECT)
+    parent = key.parent
+    assert parent.path == _PARENT_PATH
+    new_parent = key.parent
+    assert parent is new_parent
+
+
+def test__cliean_app_w_already_clean():
+    from google.cloud.datastore.key import _clean_app
+
+    app_str = PROJECT
+    assert _clean_app(app_str) == PROJECT
+
+
+def test__cliean_app_w_standard():
+    from google.cloud.datastore.key import _clean_app
+
+    app_str = "s~" + PROJECT
+    assert _clean_app(app_str) == PROJECT
+
+
+def test__cliean_app_w_european():
+    from google.cloud.datastore.key import _clean_app
+
+    app_str = "e~" + PROJECT
+    assert _clean_app(app_str) == PROJECT
+
+
+def test__cliean_app_w_dev_server():
+    from google.cloud.datastore.key import _clean_app
+
+    app_str = "dev~" + PROJECT
+    assert _clean_app(app_str) == PROJECT
+
+
+def test__get_empty_w_unset():
+    from google.cloud.datastore.key import _get_empty
+
+    for empty_value in (u"", 0, 0.0, []):
+        ret_val = _get_empty(empty_value, empty_value)
+        assert ret_val is None
+
+
+def test__get_empty_w_actually_set():
+    from google.cloud.datastore.key import _get_empty
+
+    value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], []))
+    for value, empty_value in value_pairs:
+        ret_val = _get_empty(value, empty_value)
+        assert ret_val is value
+
+
+def test__check_database_id_w_empty_value():
+    from google.cloud.datastore.key import _check_database_id
+
+    ret_val = _check_database_id(u"")
+    # Really we are just happy there was no exception.
+    assert ret_val is None
+
+
+def test__check_database_id_w_failure():
+    from google.cloud.datastore.key import _check_database_id
+
+    with pytest.raises(ValueError):
+        _check_database_id(u"some-database-id")
+
+
+def test__add_id_or_name_add_id():
+    from google.cloud.datastore.key import _add_id_or_name
+
+    flat_path = []
+    id_ = 123
+    element_pb = _make_element_pb(id=id_)
+
+    ret_val = _add_id_or_name(flat_path, element_pb, False)
+    assert ret_val is None
+    assert flat_path == [id_]
+    ret_val = _add_id_or_name(flat_path, element_pb, True)
+    assert ret_val is None
+    assert flat_path == [id_, id_]
+
+
+def test__add_id_or_name_add_name():
+    from google.cloud.datastore.key import _add_id_or_name
+
+    flat_path = []
+    name = "moon-shadow"
+    element_pb = _make_element_pb(name=name)
+
+    ret_val = _add_id_or_name(flat_path, element_pb, False)
+    assert ret_val is None
+    assert flat_path == [name]
+    ret_val = _add_id_or_name(flat_path, element_pb, True)
+    assert ret_val is None
+    assert flat_path == [name, name]
+
+
+def test__add_id_or_name_both_present():
+    from google.cloud.datastore.key import _add_id_or_name
+
+    element_pb = _make_element_pb(id=17, name="seventeen")
+    flat_path = []
+    with pytest.raises(ValueError):
+        _add_id_or_name(flat_path, element_pb, False)
+    with pytest.raises(ValueError):
+        _add_id_or_name(flat_path, element_pb, True)
+
+    assert flat_path == []
+
+
+def test__add_id_or_name_both_empty_failure():
+    from google.cloud.datastore.key import _add_id_or_name
+
+    element_pb = _make_element_pb()
+    flat_path = []
+    with pytest.raises(ValueError):
+        _add_id_or_name(flat_path, element_pb, False)
+
+    assert flat_path == []
+
+
+def test__add_id_or_name_both_empty_allowed():
+    from google.cloud.datastore.key import _add_id_or_name
+
+    element_pb = _make_element_pb()
+    flat_path = []
+    ret_val = _add_id_or_name(flat_path, element_pb, True)
+    assert ret_val is None
+    assert flat_path == []
+
 
-    PROJECT = "my-prahjekt"
+def test__get_flat_path_one_pair():
+    from google.cloud.datastore.key import _get_flat_path
 
-    @staticmethod
-    def _call_fut(app_str):
-        from google.cloud.datastore.key import _clean_app
+    kind = "Widget"
+    name = "Scooter"
+    element_pb = _make_element_pb(type=kind, name=name)
+    path_pb = _make_path_pb(element_pb)
+    flat_path = _get_flat_path(path_pb)
+    assert flat_path == (kind, name)
 
-        return _clean_app(app_str)
 
-    def test_already_clean(self):
-        app_str = self.PROJECT
-        self.assertEqual(self._call_fut(app_str), self.PROJECT)
+def test__get_flat_path_two_pairs():
+    from google.cloud.datastore.key import _get_flat_path
 
-    def test_standard(self):
-        app_str = "s~" + self.PROJECT
-        self.assertEqual(self._call_fut(app_str), self.PROJECT)
+    kind1 = "parent"
+    id1 = 59
+    element_pb1 = _make_element_pb(type=kind1, id=id1)
 
-    def test_european(self):
-        app_str = "e~" + self.PROJECT
-        self.assertEqual(self._call_fut(app_str), self.PROJECT)
+    kind2 = "child"
+    name2 = "naem"
+    element_pb2 = _make_element_pb(type=kind2, name=name2)
 
-    def test_dev_server(self):
-        app_str = "dev~" + self.PROJECT
-        self.assertEqual(self._call_fut(app_str), self.PROJECT)
+    path_pb = _make_path_pb(element_pb1, element_pb2)
+    flat_path = _get_flat_path(path_pb)
+    assert flat_path == (kind1, id1, kind2, name2)
 
 
-class Test__get_empty(unittest.TestCase):
-    @staticmethod
-    def _call_fut(value, empty_value):
-        from google.cloud.datastore.key import _get_empty
+def test__get_flat_path_partial_key():
+    from google.cloud.datastore.key import _get_flat_path
 
-        return _get_empty(value, empty_value)
+    kind1 = "grandparent"
+    name1 = "cats"
+    element_pb1 = _make_element_pb(type=kind1, name=name1)
 
-    def test_unset(self):
-        for empty_value in (u"", 0, 0.0, []):
-            ret_val = self._call_fut(empty_value, empty_value)
-            self.assertIsNone(ret_val)
+    kind2 = "parent"
+    id2 = 1337
+    element_pb2 = _make_element_pb(type=kind2, id=id2)
 
-    def test_actually_set(self):
-        value_pairs = ((u"hello", u""), (10, 0), (3.14, 0.0), (["stuff", "here"], []))
-        for value, empty_value in value_pairs:
-            ret_val = self._call_fut(value, empty_value)
-            self.assertIs(ret_val, value)
+    kind3 = "child"
+    element_pb3 = _make_element_pb(type=kind3)
 
+    path_pb = _make_path_pb(element_pb1, element_pb2, element_pb3)
+    flat_path = _get_flat_path(path_pb)
+    assert flat_path == (kind1, name1, kind2, id2, kind3)
 
-class Test__check_database_id(unittest.TestCase):
-    @staticmethod
-    def _call_fut(database_id):
-        from google.cloud.datastore.key import _check_database_id
 
-        return _check_database_id(database_id)
+def test__to_legacy_path_w_one_pair():
+    from google.cloud.datastore.key import _to_legacy_path
 
-    def test_empty_value(self):
-        ret_val = self._call_fut(u"")
-        # Really we are just happy there was no exception.
-        self.assertIsNone(ret_val)
+    kind = "Widget"
+    name = "Scooter"
+    dict_path = [{"kind": kind, "name": name}]
+    path_pb = _to_legacy_path(dict_path)
 
-    def test_failure(self):
-        with self.assertRaises(ValueError):
-            self._call_fut(u"some-database-id")
+    element_pb = _make_element_pb(type=kind, name=name)
+    expected_pb = _make_path_pb(element_pb)
+    assert path_pb == expected_pb
 
 
-class Test__add_id_or_name(unittest.TestCase):
-    @staticmethod
-    def _call_fut(flat_path, element_pb, empty_allowed):
-        from google.cloud.datastore.key import _add_id_or_name
+def test__to_legacy_path_w_two_pairs():
+    from google.cloud.datastore.key import _to_legacy_path
 
-        return _add_id_or_name(flat_path, element_pb, empty_allowed)
+    kind1 = "parent"
+    id1 = 59
 
-    def test_add_id(self):
-        flat_path = []
-        id_ = 123
-        element_pb = _make_element_pb(id=id_)
+    kind2 = "child"
+    name2 = "naem"
 
-        ret_val = self._call_fut(flat_path, element_pb, False)
-        self.assertIsNone(ret_val)
-        self.assertEqual(flat_path, [id_])
-        ret_val = self._call_fut(flat_path, element_pb, True)
-        self.assertIsNone(ret_val)
-        self.assertEqual(flat_path, [id_, id_])
+    dict_path = [{"kind": kind1, "id": id1}, {"kind": kind2, "name": name2}]
+    path_pb = _to_legacy_path(dict_path)
 
-    def test_add_name(self):
-        flat_path = []
-        name = "moon-shadow"
-        element_pb = _make_element_pb(name=name)
+    element_pb1 = _make_element_pb(type=kind1, id=id1)
+    element_pb2 = _make_element_pb(type=kind2, name=name2)
+    expected_pb = _make_path_pb(element_pb1, element_pb2)
+    assert path_pb == expected_pb
 
-        ret_val = self._call_fut(flat_path, element_pb, False)
-        self.assertIsNone(ret_val)
-        self.assertEqual(flat_path, [name])
-        ret_val = self._call_fut(flat_path, element_pb, True)
-        self.assertIsNone(ret_val)
-        self.assertEqual(flat_path, [name, name])
 
-    def test_both_present(self):
-        element_pb = _make_element_pb(id=17, name="seventeen")
-        flat_path = []
-        with self.assertRaises(ValueError):
-            self._call_fut(flat_path, element_pb, False)
-        with self.assertRaises(ValueError):
-            self._call_fut(flat_path, element_pb, True)
+def test__to_legacy_path_w_partial_key():
+    from google.cloud.datastore.key import _to_legacy_path
 
-        self.assertEqual(flat_path, [])
+    kind1 = "grandparent"
+    name1 = "cats"
 
-    def test_both_empty_failure(self):
-        element_pb = _make_element_pb()
-        flat_path = []
-        with self.assertRaises(ValueError):
-            self._call_fut(flat_path, element_pb, False)
+    kind2 = "parent"
+    id2 = 1337
 
-        self.assertEqual(flat_path, [])
-
-    def test_both_empty_allowed(self):
-        element_pb = _make_element_pb()
-        flat_path = []
-        ret_val = self._call_fut(flat_path, element_pb, True)
-        self.assertIsNone(ret_val)
-        self.assertEqual(flat_path, [])
-
-
-class Test__get_flat_path(unittest.TestCase):
-    @staticmethod
-    def _call_fut(path_pb):
-        from google.cloud.datastore.key import _get_flat_path
-
-        return _get_flat_path(path_pb)
-
-    def test_one_pair(self):
-        kind = "Widget"
-        name = "Scooter"
-        element_pb = _make_element_pb(type=kind, name=name)
-        path_pb = _make_path_pb(element_pb)
-        flat_path = self._call_fut(path_pb)
-        self.assertEqual(flat_path, (kind, name))
-
-    def test_two_pairs(self):
-        kind1 = "parent"
-        id1 = 59
-        element_pb1 = _make_element_pb(type=kind1, id=id1)
-
-        kind2 = "child"
-        name2 = "naem"
-        element_pb2 = _make_element_pb(type=kind2, name=name2)
-
-        path_pb = _make_path_pb(element_pb1, element_pb2)
-        flat_path = self._call_fut(path_pb)
-        self.assertEqual(flat_path, (kind1, id1, kind2, name2))
-
-    def test_partial_key(self):
-        kind1 = "grandparent"
-        name1 = "cats"
-        element_pb1 = _make_element_pb(type=kind1, name=name1)
-
-        kind2 = "parent"
-        id2 = 1337
-        element_pb2 = _make_element_pb(type=kind2, id=id2)
-
-        kind3 = "child"
-        element_pb3 = _make_element_pb(type=kind3)
-
-        path_pb = _make_path_pb(element_pb1, element_pb2, element_pb3)
-        flat_path = self._call_fut(path_pb)
-        self.assertEqual(flat_path, (kind1, name1, kind2, id2, kind3))
-
-
-class Test__to_legacy_path(unittest.TestCase):
-    @staticmethod
-    def _call_fut(dict_path):
-        from google.cloud.datastore.key import _to_legacy_path
-
-        return _to_legacy_path(dict_path)
-
-    def test_one_pair(self):
-        kind = "Widget"
-        name = "Scooter"
-        dict_path = [{"kind": kind, "name": name}]
-        path_pb = self._call_fut(dict_path)
-
-        element_pb = _make_element_pb(type=kind, name=name)
-        expected_pb = _make_path_pb(element_pb)
-        self.assertEqual(path_pb, expected_pb)
-
-    def test_two_pairs(self):
-        kind1 = "parent"
-        id1 = 59
-
-        kind2 = "child"
-        name2 = "naem"
-
-        dict_path = [{"kind": kind1, "id": id1}, {"kind": kind2, "name": name2}]
-        path_pb = self._call_fut(dict_path)
-
-        element_pb1 = _make_element_pb(type=kind1, id=id1)
-        element_pb2 = _make_element_pb(type=kind2, name=name2)
-        expected_pb = _make_path_pb(element_pb1, element_pb2)
-        self.assertEqual(path_pb, expected_pb)
-
-    def test_partial_key(self):
-        kind1 = "grandparent"
-        name1 = "cats"
+    kind3 = "child"
 
-        kind2 = "parent"
-        id2 = 1337
+    dict_path = [
+        {"kind": kind1, "name": name1},
+        {"kind": kind2, "id": id2},
+        {"kind": kind3},
+    ]
+    path_pb = _to_legacy_path(dict_path)
 
-        kind3 = "child"
-
-        dict_path = [
-            {"kind": kind1, "name": name1},
-            {"kind": kind2, "id": id2},
-            {"kind": kind3},
-        ]
-        path_pb = self._call_fut(dict_path)
-
-        element_pb1 = _make_element_pb(type=kind1, name=name1)
-        element_pb2 = _make_element_pb(type=kind2, id=id2)
-        element_pb3 = _make_element_pb(type=kind3)
-        expected_pb = _make_path_pb(element_pb1, element_pb2, element_pb3)
-        self.assertEqual(path_pb, expected_pb)
+    element_pb1 = _make_element_pb(type=kind1, name=name1)
+    element_pb2 = _make_element_pb(type=kind2, id=id2)
+    element_pb3 = _make_element_pb(type=kind3)
+    expected_pb = _make_path_pb(element_pb1, element_pb2, element_pb3)
+    assert path_pb == expected_pb
 
 
 def _make_element_pb(**kwargs):
diff --git a/tests/unit/test_query.py b/tests/unit/test_query.py
index dcb4e9f5..3cbd95b8 100644
--- a/tests/unit/test_query.py
+++ b/tests/unit/test_query.py
@@ -12,770 +12,791 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
 import mock
 import pytest
 
+_PROJECT = "PROJECT"
+
+
+def test_query_ctor_defaults():
+    client = _make_client()
+    query = _make_query(client)
+    assert query._client is client
+    assert query.project == client.project
+    assert query.kind is None
+    assert query.namespace == client.namespace
+    assert query.ancestor is None
+    assert query.filters == []
+    assert query.projection == []
+    assert query.order == []
+    assert query.distinct_on == []
+
+
+def test_query_ctor_explicit():
+    from google.cloud.datastore.key import Key
+
+    _PROJECT = "OTHER_PROJECT"
+    _KIND = "KIND"
+    _NAMESPACE = "OTHER_NAMESPACE"
+    client = _make_client()
+    ancestor = Key("ANCESTOR", 123, project=_PROJECT)
+    FILTERS = [("foo", "=", "Qux"), ("bar", "<", 17)]
+    PROJECTION = ["foo", "bar", "baz"]
+    ORDER = ["foo", "bar"]
+    DISTINCT_ON = ["foo"]
+    query = _make_query(
+        client,
+        kind=_KIND,
+        project=_PROJECT,
+        namespace=_NAMESPACE,
+        ancestor=ancestor,
+        filters=FILTERS,
+        projection=PROJECTION,
+        order=ORDER,
+        distinct_on=DISTINCT_ON,
+    )
+    assert query._client is client
+    assert query.project == _PROJECT
+    assert query.kind == _KIND
+    assert query.namespace == _NAMESPACE
+    assert query.ancestor.path == ancestor.path
+    assert query.filters == FILTERS
+    assert query.projection == PROJECTION
+    assert query.order == ORDER
+    assert query.distinct_on == DISTINCT_ON
 
-class TestQuery(unittest.TestCase):
-
-    _PROJECT = "PROJECT"
-
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.query import Query
-
-        return Query
-
-    def _make_one(self, *args, **kw):
-        return self._get_target_class()(*args, **kw)
-
-    def _make_client(self):
-        return _Client(self._PROJECT)
-
-    def test_ctor_defaults(self):
-        client = self._make_client()
-        query = self._make_one(client)
-        self.assertIs(query._client, client)
-        self.assertEqual(query.project, client.project)
-        self.assertIsNone(query.kind)
-        self.assertEqual(query.namespace, client.namespace)
-        self.assertIsNone(query.ancestor)
-        self.assertEqual(query.filters, [])
-        self.assertEqual(query.projection, [])
-        self.assertEqual(query.order, [])
-        self.assertEqual(query.distinct_on, [])
-
-    def test_ctor_explicit(self):
-        from google.cloud.datastore.key import Key
-
-        _PROJECT = "OTHER_PROJECT"
-        _KIND = "KIND"
-        _NAMESPACE = "OTHER_NAMESPACE"
-        client = self._make_client()
-        ancestor = Key("ANCESTOR", 123, project=_PROJECT)
-        FILTERS = [("foo", "=", "Qux"), ("bar", "<", 17)]
-        PROJECTION = ["foo", "bar", "baz"]
-        ORDER = ["foo", "bar"]
-        DISTINCT_ON = ["foo"]
-        query = self._make_one(
-            client,
-            kind=_KIND,
-            project=_PROJECT,
-            namespace=_NAMESPACE,
-            ancestor=ancestor,
-            filters=FILTERS,
-            projection=PROJECTION,
-            order=ORDER,
-            distinct_on=DISTINCT_ON,
-        )
-        self.assertIs(query._client, client)
-        self.assertEqual(query.project, _PROJECT)
-        self.assertEqual(query.kind, _KIND)
-        self.assertEqual(query.namespace, _NAMESPACE)
-        self.assertEqual(query.ancestor.path, ancestor.path)
-        self.assertEqual(query.filters, FILTERS)
-        self.assertEqual(query.projection, PROJECTION)
-        self.assertEqual(query.order, ORDER)
-        self.assertEqual(query.distinct_on, DISTINCT_ON)
-
-    def test_ctor_bad_projection(self):
-        BAD_PROJECTION = object()
-        self.assertRaises(
-            TypeError, self._make_one, self._make_client(), projection=BAD_PROJECTION
-        )
 
-    def test_ctor_bad_order(self):
-        BAD_ORDER = object()
-        self.assertRaises(
-            TypeError, self._make_one, self._make_client(), order=BAD_ORDER
-        )
+def test_query_ctor_bad_projection():
+    BAD_PROJECTION = object()
+    with pytest.raises(TypeError):
+        _make_query(_make_client(), projection=BAD_PROJECTION)
 
-    def test_ctor_bad_distinct_on(self):
-        BAD_DISTINCT_ON = object()
-        self.assertRaises(
-            TypeError, self._make_one, self._make_client(), distinct_on=BAD_DISTINCT_ON
-        )
 
-    def test_ctor_bad_filters(self):
-        FILTERS_CANT_UNPACK = [("one", "two")]
-        self.assertRaises(
-            ValueError, self._make_one, self._make_client(), filters=FILTERS_CANT_UNPACK
-        )
+def test_query_ctor_bad_order():
+    BAD_ORDER = object()
+    with pytest.raises(TypeError):
+        _make_query(_make_client(), order=BAD_ORDER)
 
-    def test_namespace_setter_w_non_string(self):
-        query = self._make_one(self._make_client())
-
-        def _assign(val):
-            query.namespace = val
-
-        self.assertRaises(ValueError, _assign, object())
-
-    def test_namespace_setter(self):
-        _NAMESPACE = "OTHER_NAMESPACE"
-        query = self._make_one(self._make_client())
-        query.namespace = _NAMESPACE
-        self.assertEqual(query.namespace, _NAMESPACE)
-
-    def test_kind_setter_w_non_string(self):
-        query = self._make_one(self._make_client())
-
-        def _assign(val):
-            query.kind = val
-
-        self.assertRaises(TypeError, _assign, object())
-
-    def test_kind_setter_wo_existing(self):
-        _KIND = "KIND"
-        query = self._make_one(self._make_client())
-        query.kind = _KIND
-        self.assertEqual(query.kind, _KIND)
-
-    def test_kind_setter_w_existing(self):
-        _KIND_BEFORE = "KIND_BEFORE"
-        _KIND_AFTER = "KIND_AFTER"
-        query = self._make_one(self._make_client(), kind=_KIND_BEFORE)
-        self.assertEqual(query.kind, _KIND_BEFORE)
-        query.kind = _KIND_AFTER
-        self.assertEqual(query.project, self._PROJECT)
-        self.assertEqual(query.kind, _KIND_AFTER)
-
-    def test_ancestor_setter_w_non_key(self):
-        query = self._make_one(self._make_client())
-
-        def _assign(val):
-            query.ancestor = val
-
-        self.assertRaises(TypeError, _assign, object())
-        self.assertRaises(TypeError, _assign, ["KIND", "NAME"])
-
-    def test_ancestor_setter_w_key(self):
-        from google.cloud.datastore.key import Key
-
-        _NAME = "NAME"
-        key = Key("KIND", 123, project=self._PROJECT)
-        query = self._make_one(self._make_client())
-        query.add_filter("name", "=", _NAME)
-        query.ancestor = key
-        self.assertEqual(query.ancestor.path, key.path)
-
-    def test_ancestor_deleter_w_key(self):
-        from google.cloud.datastore.key import Key
-
-        key = Key("KIND", 123, project=self._PROJECT)
-        query = self._make_one(client=self._make_client(), ancestor=key)
-        del query.ancestor
-        self.assertIsNone(query.ancestor)
-
-    def test_add_filter_setter_w_unknown_operator(self):
-        query = self._make_one(self._make_client())
-        self.assertRaises(ValueError, query.add_filter, "firstname", "~~", "John")
-
-    def test_add_filter_w_known_operator(self):
-        query = self._make_one(self._make_client())
-        query.add_filter("firstname", "=", "John")
-        self.assertEqual(query.filters, [("firstname", "=", "John")])
-
-    def test_add_filter_w_all_operators(self):
-        query = self._make_one(self._make_client())
-        query.add_filter("leq_prop", "<=", "val1")
-        query.add_filter("geq_prop", ">=", "val2")
-        query.add_filter("lt_prop", "<", "val3")
-        query.add_filter("gt_prop", ">", "val4")
-        query.add_filter("eq_prop", "=", "val5")
-        self.assertEqual(len(query.filters), 5)
-        self.assertEqual(query.filters[0], ("leq_prop", "<=", "val1"))
-        self.assertEqual(query.filters[1], ("geq_prop", ">=", "val2"))
-        self.assertEqual(query.filters[2], ("lt_prop", "<", "val3"))
-        self.assertEqual(query.filters[3], ("gt_prop", ">", "val4"))
-        self.assertEqual(query.filters[4], ("eq_prop", "=", "val5"))
-
-    def test_add_filter_w_known_operator_and_entity(self):
-        from google.cloud.datastore.entity import Entity
-
-        query = self._make_one(self._make_client())
-        other = Entity()
-        other["firstname"] = "John"
-        other["lastname"] = "Smith"
-        query.add_filter("other", "=", other)
-        self.assertEqual(query.filters, [("other", "=", other)])
-
-    def test_add_filter_w_whitespace_property_name(self):
-        query = self._make_one(self._make_client())
-        PROPERTY_NAME = "  property with lots of space "
-        query.add_filter(PROPERTY_NAME, "=", "John")
-        self.assertEqual(query.filters, [(PROPERTY_NAME, "=", "John")])
-
-    def test_add_filter___key__valid_key(self):
-        from google.cloud.datastore.key import Key
-
-        query = self._make_one(self._make_client())
-        key = Key("Foo", project=self._PROJECT)
-        query.add_filter("__key__", "=", key)
-        self.assertEqual(query.filters, [("__key__", "=", key)])
-
-    def test_add_filter_return_query_obj(self):
-        from google.cloud.datastore.query import Query
-
-        query = self._make_one(self._make_client())
-        query_obj = query.add_filter("firstname", "=", "John")
-        self.assertIsInstance(query_obj, Query)
-        self.assertEqual(query_obj.filters, [("firstname", "=", "John")])
-
-    def test_filter___key__not_equal_operator(self):
-        from google.cloud.datastore.key import Key
-
-        key = Key("Foo", project=self._PROJECT)
-        query = self._make_one(self._make_client())
-        query.add_filter("__key__", "<", key)
-        self.assertEqual(query.filters, [("__key__", "<", key)])
-
-    def test_filter___key__invalid_value(self):
-        query = self._make_one(self._make_client())
-        self.assertRaises(ValueError, query.add_filter, "__key__", "=", None)
-
-    def test_projection_setter_empty(self):
-        query = self._make_one(self._make_client())
-        query.projection = []
-        self.assertEqual(query.projection, [])
-
-    def test_projection_setter_string(self):
-        query = self._make_one(self._make_client())
-        query.projection = "field1"
-        self.assertEqual(query.projection, ["field1"])
-
-    def test_projection_setter_non_empty(self):
-        query = self._make_one(self._make_client())
-        query.projection = ["field1", "field2"]
-        self.assertEqual(query.projection, ["field1", "field2"])
-
-    def test_projection_setter_multiple_calls(self):
-        _PROJECTION1 = ["field1", "field2"]
-        _PROJECTION2 = ["field3"]
-        query = self._make_one(self._make_client())
-        query.projection = _PROJECTION1
-        self.assertEqual(query.projection, _PROJECTION1)
-        query.projection = _PROJECTION2
-        self.assertEqual(query.projection, _PROJECTION2)
-
-    def test_keys_only(self):
-        query = self._make_one(self._make_client())
-        query.keys_only()
-        self.assertEqual(query.projection, ["__key__"])
-
-    def test_key_filter_defaults(self):
-        from google.cloud.datastore.key import Key
-
-        client = self._make_client()
-        query = self._make_one(client)
-        self.assertEqual(query.filters, [])
-        key = Key("Kind", 1234, project="project")
-        query.key_filter(key)
-        self.assertEqual(query.filters, [("__key__", "=", key)])
-
-    def test_key_filter_explicit(self):
-        from google.cloud.datastore.key import Key
-
-        client = self._make_client()
-        query = self._make_one(client)
-        self.assertEqual(query.filters, [])
-        key = Key("Kind", 1234, project="project")
-        query.key_filter(key, operator=">")
-        self.assertEqual(query.filters, [("__key__", ">", key)])
-
-    def test_order_setter_empty(self):
-        query = self._make_one(self._make_client(), order=["foo", "-bar"])
-        query.order = []
-        self.assertEqual(query.order, [])
-
-    def test_order_setter_string(self):
-        query = self._make_one(self._make_client())
-        query.order = "field"
-        self.assertEqual(query.order, ["field"])
-
-    def test_order_setter_single_item_list_desc(self):
-        query = self._make_one(self._make_client())
-        query.order = ["-field"]
-        self.assertEqual(query.order, ["-field"])
-
-    def test_order_setter_multiple(self):
-        query = self._make_one(self._make_client())
-        query.order = ["foo", "-bar"]
-        self.assertEqual(query.order, ["foo", "-bar"])
-
-    def test_distinct_on_setter_empty(self):
-        query = self._make_one(self._make_client(), distinct_on=["foo", "bar"])
-        query.distinct_on = []
-        self.assertEqual(query.distinct_on, [])
-
-    def test_distinct_on_setter_string(self):
-        query = self._make_one(self._make_client())
-        query.distinct_on = "field1"
-        self.assertEqual(query.distinct_on, ["field1"])
-
-    def test_distinct_on_setter_non_empty(self):
-        query = self._make_one(self._make_client())
-        query.distinct_on = ["field1", "field2"]
-        self.assertEqual(query.distinct_on, ["field1", "field2"])
-
-    def test_distinct_on_multiple_calls(self):
-        _DISTINCT_ON1 = ["field1", "field2"]
-        _DISTINCT_ON2 = ["field3"]
-        query = self._make_one(self._make_client())
-        query.distinct_on = _DISTINCT_ON1
-        self.assertEqual(query.distinct_on, _DISTINCT_ON1)
-        query.distinct_on = _DISTINCT_ON2
-        self.assertEqual(query.distinct_on, _DISTINCT_ON2)
-
-    def test_fetch_defaults_w_client_attr(self):
-        from google.cloud.datastore.query import Iterator
-
-        client = self._make_client()
-        query = self._make_one(client)
-
-        iterator = query.fetch()
-
-        self.assertIsInstance(iterator, Iterator)
-        self.assertIs(iterator._query, query)
-        self.assertIs(iterator.client, client)
-        self.assertIsNone(iterator.max_results)
-        self.assertEqual(iterator._offset, 0)
-        self.assertIsNone(iterator._retry)
-        self.assertIsNone(iterator._timeout)
-
-    def test_fetch_w_explicit_client_w_retry_w_timeout(self):
-        from google.cloud.datastore.query import Iterator
-
-        client = self._make_client()
-        other_client = self._make_client()
-        query = self._make_one(client)
-        retry = mock.Mock()
-        timeout = 100000
-
-        iterator = query.fetch(
-            limit=7, offset=8, client=other_client, retry=retry, timeout=timeout
-        )
 
-        self.assertIsInstance(iterator, Iterator)
-        self.assertIs(iterator._query, query)
-        self.assertIs(iterator.client, other_client)
-        self.assertEqual(iterator.max_results, 7)
-        self.assertEqual(iterator._offset, 8)
-        self.assertEqual(iterator._retry, retry)
-        self.assertEqual(iterator._timeout, timeout)
-
-
-class TestIterator(unittest.TestCase):
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.query import Iterator
-
-        return Iterator
-
-    def _make_one(self, *args, **kw):
-        return self._get_target_class()(*args, **kw)
-
-    def test_constructor_defaults(self):
-        query = object()
-        client = object()
-
-        iterator = self._make_one(query, client)
-
-        self.assertFalse(iterator._started)
-        self.assertIs(iterator.client, client)
-        self.assertIsNone(iterator.max_results)
-        self.assertEqual(iterator.page_number, 0)
-        self.assertIsNone(iterator.next_page_token)
-        self.assertEqual(iterator.num_results, 0)
-        self.assertIs(iterator._query, query)
-        self.assertIsNone(iterator._offset)
-        self.assertIsNone(iterator._end_cursor)
-        self.assertTrue(iterator._more_results)
-        self.assertIsNone(iterator._retry)
-        self.assertIsNone(iterator._timeout)
-
-    def test_constructor_explicit(self):
-        query = object()
-        client = object()
-        limit = 43
-        offset = 9
-        start_cursor = b"8290\xff"
-        end_cursor = b"so20rc\ta"
-        retry = mock.Mock()
-        timeout = 100000
-
-        iterator = self._make_one(
-            query,
-            client,
-            limit=limit,
-            offset=offset,
-            start_cursor=start_cursor,
-            end_cursor=end_cursor,
-            retry=retry,
-            timeout=timeout,
-        )
+def test_query_ctor_bad_distinct_on():
+    BAD_DISTINCT_ON = object()
+    with pytest.raises(TypeError):
+        _make_query(_make_client(), distinct_on=BAD_DISTINCT_ON)
 
-        self.assertFalse(iterator._started)
-        self.assertIs(iterator.client, client)
-        self.assertEqual(iterator.max_results, limit)
-        self.assertEqual(iterator.page_number, 0)
-        self.assertEqual(iterator.next_page_token, start_cursor)
-        self.assertEqual(iterator.num_results, 0)
-        self.assertIs(iterator._query, query)
-        self.assertEqual(iterator._offset, offset)
-        self.assertEqual(iterator._end_cursor, end_cursor)
-        self.assertTrue(iterator._more_results)
-        self.assertEqual(iterator._retry, retry)
-        self.assertEqual(iterator._timeout, timeout)
-
-    def test__build_protobuf_empty(self):
-        from google.cloud.datastore_v1.types import query as query_pb2
-        from google.cloud.datastore.query import Query
-
-        client = _Client(None)
-        query = Query(client)
-        iterator = self._make_one(query, client)
-
-        pb = iterator._build_protobuf()
-        expected_pb = query_pb2.Query()
-        self.assertEqual(pb, expected_pb)
-
-    def test__build_protobuf_all_values_except_offset(self):
-        # this test and the following (all_values_except_start_and_end_cursor)
-        # test mutually exclusive states; the offset is ignored
-        # if a start_cursor is supplied
-        from google.cloud.datastore_v1.types import query as query_pb2
-        from google.cloud.datastore.query import Query
-
-        client = _Client(None)
-        query = Query(client)
-        limit = 15
-        start_bytes = b"i\xb7\x1d"
-        start_cursor = "abcd"
-        end_bytes = b"\xc3\x1c\xb3"
-        end_cursor = "wxyz"
-        iterator = self._make_one(
-            query, client, limit=limit, start_cursor=start_cursor, end_cursor=end_cursor
-        )
-        self.assertEqual(iterator.max_results, limit)
-        iterator.num_results = 4
-        iterator._skipped_results = 1
-
-        pb = iterator._build_protobuf()
-        expected_pb = query_pb2.Query(start_cursor=start_bytes, end_cursor=end_bytes)
-        expected_pb._pb.limit.value = limit - iterator.num_results
-        self.assertEqual(pb, expected_pb)
-
-    def test__build_protobuf_all_values_except_start_and_end_cursor(self):
-        # this test and the previous (all_values_except_start_offset)
-        # test mutually exclusive states; the offset is ignored
-        # if a start_cursor is supplied
-        from google.cloud.datastore_v1.types import query as query_pb2
-        from google.cloud.datastore.query import Query
-
-        client = _Client(None)
-        query = Query(client)
-        limit = 15
-        offset = 9
-        iterator = self._make_one(query, client, limit=limit, offset=offset)
-        self.assertEqual(iterator.max_results, limit)
-        iterator.num_results = 4
-
-        pb = iterator._build_protobuf()
-        expected_pb = query_pb2.Query(offset=offset - iterator._skipped_results)
-        expected_pb._pb.limit.value = limit - iterator.num_results
-        self.assertEqual(pb, expected_pb)
-
-    def test__process_query_results(self):
-        from google.cloud.datastore_v1.types import query as query_pb2
-
-        iterator = self._make_one(None, None, end_cursor="abcd")
-        self.assertIsNotNone(iterator._end_cursor)
-
-        entity_pbs = [_make_entity("Hello", 9998, "PRAHJEKT")]
-        cursor_as_bytes = b"\x9ai\xe7"
-        cursor = b"mmnn"
-        skipped_results = 4
-        more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED
-        response_pb = _make_query_response(
-            entity_pbs, cursor_as_bytes, more_results_enum, skipped_results
-        )
-        result = iterator._process_query_results(response_pb)
-        self.assertEqual(result, entity_pbs)
 
-        self.assertEqual(iterator._skipped_results, skipped_results)
-        self.assertEqual(iterator.next_page_token, cursor)
-        self.assertTrue(iterator._more_results)
+def test_query_ctor_bad_filters():
+    FILTERS_CANT_UNPACK = [("one", "two")]
+    with pytest.raises(ValueError):
+        _make_query(_make_client(), filters=FILTERS_CANT_UNPACK)
 
-    def test__process_query_results_done(self):
-        from google.cloud.datastore_v1.types import query as query_pb2
 
-        iterator = self._make_one(None, None, end_cursor="abcd")
-        self.assertIsNotNone(iterator._end_cursor)
+def test_query_namespace_setter_w_non_string():
+    query = _make_query(_make_client())
+    with pytest.raises(ValueError):
+        query.namespace = object()
 
-        entity_pbs = [_make_entity("World", 1234, "PROJECT")]
-        cursor_as_bytes = b"\x9ai\xe7"
-        skipped_results = 44
-        more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS
-        response_pb = _make_query_response(
-            entity_pbs, cursor_as_bytes, more_results_enum, skipped_results
-        )
-        result = iterator._process_query_results(response_pb)
-        self.assertEqual(result, entity_pbs)
-
-        self.assertEqual(iterator._skipped_results, skipped_results)
-        self.assertIsNone(iterator.next_page_token)
-        self.assertFalse(iterator._more_results)
-
-    @pytest.mark.filterwarnings("ignore")
-    def test__process_query_results_bad_enum(self):
-        iterator = self._make_one(None, None)
-        more_results_enum = 999
-        response_pb = _make_query_response([], b"", more_results_enum, 0)
-        with self.assertRaises(ValueError):
-            iterator._process_query_results(response_pb)
-
-    def _next_page_helper(self, txn_id=None, retry=None, timeout=None):
-        from google.api_core import page_iterator
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore_v1.types import query as query_pb2
-        from google.cloud.datastore.query import Query
-
-        more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED
-        result = _make_query_response([], b"", more_enum, 0)
-        project = "prujekt"
-        ds_api = _make_datastore_api(result)
-        if txn_id is None:
-            client = _Client(project, datastore_api=ds_api)
-        else:
-            transaction = mock.Mock(id=txn_id, spec=["id"])
-            client = _Client(project, datastore_api=ds_api, transaction=transaction)
-
-        query = Query(client)
-        kwargs = {}
-
-        if retry is not None:
-            kwargs["retry"] = retry
-
-        if timeout is not None:
-            kwargs["timeout"] = timeout
-
-        iterator = self._make_one(query, client, **kwargs)
-
-        page = iterator._next_page()
-
-        self.assertIsInstance(page, page_iterator.Page)
-        self.assertIs(page._parent, iterator)
-
-        partition_id = entity_pb2.PartitionId(project_id=project)
-        if txn_id is None:
-            read_options = datastore_pb2.ReadOptions()
-        else:
-            read_options = datastore_pb2.ReadOptions(transaction=txn_id)
-        empty_query = query_pb2.Query()
-        ds_api.run_query.assert_called_once_with(
+
+def test_query_namespace_setter():
+    _NAMESPACE = "OTHER_NAMESPACE"
+    query = _make_query(_make_client())
+    query.namespace = _NAMESPACE
+    assert query.namespace == _NAMESPACE
+
+
+def test_query_kind_setter_w_non_string():
+    query = _make_query(_make_client())
+    with pytest.raises(TypeError):
+        query.kind = object()
+
+
+def test_query_kind_setter_wo_existing():
+    _KIND = "KIND"
+    query = _make_query(_make_client())
+    query.kind = _KIND
+    assert query.kind == _KIND
+
+
+def test_query_kind_setter_w_existing():
+    _KIND_BEFORE = "KIND_BEFORE"
+    _KIND_AFTER = "KIND_AFTER"
+    query = _make_query(_make_client(), kind=_KIND_BEFORE)
+    assert query.kind == _KIND_BEFORE
+    query.kind = _KIND_AFTER
+    assert query.project == _PROJECT
+    assert query.kind == _KIND_AFTER
+
+
+def test_query_ancestor_setter_w_non_key():
+    query = _make_query(_make_client())
+
+    with pytest.raises(TypeError):
+        query.ancestor = object()
+
+    with pytest.raises(TypeError):
+        query.ancestor = ["KIND", "NAME"]
+
+
+def test_query_ancestor_setter_w_key():
+    from google.cloud.datastore.key import Key
+
+    _NAME = "NAME"
+    key = Key("KIND", 123, project=_PROJECT)
+    query = _make_query(_make_client())
+    query.add_filter("name", "=", _NAME)
+    query.ancestor = key
+    assert query.ancestor.path == key.path
+
+
+def test_query_ancestor_deleter_w_key():
+    from google.cloud.datastore.key import Key
+
+    key = Key("KIND", 123, project=_PROJECT)
+    query = _make_query(client=_make_client(), ancestor=key)
+    del query.ancestor
+    assert query.ancestor is None
+
+
+def test_query_add_filter_setter_w_unknown_operator():
+    query = _make_query(_make_client())
+    with pytest.raises(ValueError):
+        query.add_filter("firstname", "~~", "John")
+
+
+def test_query_add_filter_w_known_operator():
+    query = _make_query(_make_client())
+    query.add_filter("firstname", "=", "John")
+    assert query.filters == [("firstname", "=", "John")]
+
+
+def test_query_add_filter_w_all_operators():
+    query = _make_query(_make_client())
+    query.add_filter("leq_prop", "<=", "val1")
+    query.add_filter("geq_prop", ">=", "val2")
+    query.add_filter("lt_prop", "<", "val3")
+    query.add_filter("gt_prop", ">", "val4")
+    query.add_filter("eq_prop", "=", "val5")
+    assert len(query.filters) == 5
+    assert query.filters[0] == ("leq_prop", "<=", "val1")
+    assert query.filters[1] == ("geq_prop", ">=", "val2")
+    assert query.filters[2] == ("lt_prop", "<", "val3")
+    assert query.filters[3] == ("gt_prop", ">", "val4")
+    assert query.filters[4] == ("eq_prop", "=", "val5")
+
+
+def test_query_add_filter_w_known_operator_and_entity():
+    from google.cloud.datastore.entity import Entity
+
+    query = _make_query(_make_client())
+    other = Entity()
+    other["firstname"] = "John"
+    other["lastname"] = "Smith"
+    query.add_filter("other", "=", other)
+    assert query.filters == [("other", "=", other)]
+
+
+def test_query_add_filter_w_whitespace_property_name():
+    query = _make_query(_make_client())
+    PROPERTY_NAME = "  property with lots of space "
+    query.add_filter(PROPERTY_NAME, "=", "John")
+    assert query.filters == [(PROPERTY_NAME, "=", "John")]
+
+
+def test_query_add_filter___key__valid_key():
+    from google.cloud.datastore.key import Key
+
+    query = _make_query(_make_client())
+    key = Key("Foo", project=_PROJECT)
+    query.add_filter("__key__", "=", key)
+    assert query.filters == [("__key__", "=", key)]
+
+
+def test_query_add_filter_return_query_obj():
+    from google.cloud.datastore.query import Query
+
+    query = _make_query(_make_client())
+    query_obj = query.add_filter("firstname", "=", "John")
+    assert isinstance(query_obj, Query)
+    assert query_obj.filters == [("firstname", "=", "John")]
+
+
+def test_query_filter___key__not_equal_operator():
+    from google.cloud.datastore.key import Key
+
+    key = Key("Foo", project=_PROJECT)
+    query = _make_query(_make_client())
+    query.add_filter("__key__", "<", key)
+    assert query.filters == [("__key__", "<", key)]
+
+
+def test_query_filter___key__invalid_value():
+    query = _make_query(_make_client())
+    with pytest.raises(ValueError):
+        query.add_filter("__key__", "=", None)
+
+
+def test_query_projection_setter_empty():
+    query = _make_query(_make_client())
+    query.projection = []
+    assert query.projection == []
+
+
+def test_query_projection_setter_string():
+    query = _make_query(_make_client())
+    query.projection = "field1"
+    assert query.projection == ["field1"]
+
+
+def test_query_projection_setter_non_empty():
+    query = _make_query(_make_client())
+    query.projection = ["field1", "field2"]
+    assert query.projection == ["field1", "field2"]
+
+
+def test_query_projection_setter_multiple_calls():
+    _PROJECTION1 = ["field1", "field2"]
+    _PROJECTION2 = ["field3"]
+    query = _make_query(_make_client())
+    query.projection = _PROJECTION1
+    assert query.projection == _PROJECTION1
+    query.projection = _PROJECTION2
+    assert query.projection == _PROJECTION2
+
+
+def test_query_keys_only():
+    query = _make_query(_make_client())
+    query.keys_only()
+    assert query.projection == ["__key__"]
+
+
+def test_query_key_filter_defaults():
+    from google.cloud.datastore.key import Key
+
+    client = _make_client()
+    query = _make_query(client)
+    assert query.filters == []
+    key = Key("Kind", 1234, project="project")
+    query.key_filter(key)
+    assert query.filters == [("__key__", "=", key)]
+
+
+def test_query_key_filter_explicit():
+    from google.cloud.datastore.key import Key
+
+    client = _make_client()
+    query = _make_query(client)
+    assert query.filters == []
+    key = Key("Kind", 1234, project="project")
+    query.key_filter(key, operator=">")
+    assert query.filters == [("__key__", ">", key)]
+
+
+def test_query_order_setter_empty():
+    query = _make_query(_make_client(), order=["foo", "-bar"])
+    query.order = []
+    assert query.order == []
+
+
+def test_query_order_setter_string():
+    query = _make_query(_make_client())
+    query.order = "field"
+    assert query.order == ["field"]
+
+
+def test_query_order_setter_single_item_list_desc():
+    query = _make_query(_make_client())
+    query.order = ["-field"]
+    assert query.order == ["-field"]
+
+
+def test_query_order_setter_multiple():
+    query = _make_query(_make_client())
+    query.order = ["foo", "-bar"]
+    assert query.order == ["foo", "-bar"]
+
+
+def test_query_distinct_on_setter_empty():
+    query = _make_query(_make_client(), distinct_on=["foo", "bar"])
+    query.distinct_on = []
+    assert query.distinct_on == []
+
+
+def test_query_distinct_on_setter_string():
+    query = _make_query(_make_client())
+    query.distinct_on = "field1"
+    assert query.distinct_on == ["field1"]
+
+
+def test_query_distinct_on_setter_non_empty():
+    query = _make_query(_make_client())
+    query.distinct_on = ["field1", "field2"]
+    assert query.distinct_on == ["field1", "field2"]
+
+
+def test_query_distinct_on_multiple_calls():
+    _DISTINCT_ON1 = ["field1", "field2"]
+    _DISTINCT_ON2 = ["field3"]
+    query = _make_query(_make_client())
+    query.distinct_on = _DISTINCT_ON1
+    assert query.distinct_on == _DISTINCT_ON1
+    query.distinct_on = _DISTINCT_ON2
+    assert query.distinct_on == _DISTINCT_ON2
+
+
+def test_query_fetch_defaults_w_client_attr():
+    from google.cloud.datastore.query import Iterator
+
+    client = _make_client()
+    query = _make_query(client)
+
+    iterator = query.fetch()
+
+    assert isinstance(iterator, Iterator)
+    assert iterator._query is query
+    assert iterator.client is client
+    assert iterator.max_results is None
+    assert iterator._offset == 0
+    assert iterator._retry is None
+    assert iterator._timeout is None
+
+
+def test_query_fetch_w_explicit_client_w_retry_w_timeout():
+    from google.cloud.datastore.query import Iterator
+
+    client = _make_client()
+    other_client = _make_client()
+    query = _make_query(client)
+    retry = mock.Mock()
+    timeout = 100000
+
+    iterator = query.fetch(
+        limit=7, offset=8, client=other_client, retry=retry, timeout=timeout
+    )
+
+    assert isinstance(iterator, Iterator)
+    assert iterator._query is query
+    assert iterator.client is other_client
+    assert iterator.max_results == 7
+    assert iterator._offset == 8
+    assert iterator._retry == retry
+    assert iterator._timeout == timeout
+
+
+def test_iterator_constructor_defaults():
+    query = object()
+    client = object()
+
+    iterator = _make_iterator(query, client)
+
+    assert not iterator._started
+    assert iterator.client is client
+    assert iterator.max_results is None
+    assert iterator.page_number == 0
+    assert iterator.next_page_token is None
+    assert iterator.num_results == 0
+    assert iterator._query is query
+    assert iterator._offset is None
+    assert iterator._end_cursor is None
+    assert iterator._more_results
+    assert iterator._retry is None
+    assert iterator._timeout is None
+
+
+def test_iterator_constructor_explicit():
+    query = object()
+    client = object()
+    limit = 43
+    offset = 9
+    start_cursor = b"8290\xff"
+    end_cursor = b"so20rc\ta"
+    retry = mock.Mock()
+    timeout = 100000
+
+    iterator = _make_iterator(
+        query,
+        client,
+        limit=limit,
+        offset=offset,
+        start_cursor=start_cursor,
+        end_cursor=end_cursor,
+        retry=retry,
+        timeout=timeout,
+    )
+
+    assert not iterator._started
+    assert iterator.client is client
+    assert iterator.max_results == limit
+    assert iterator.page_number == 0
+    assert iterator.next_page_token == start_cursor
+    assert iterator.num_results == 0
+    assert iterator._query is query
+    assert iterator._offset == offset
+    assert iterator._end_cursor == end_cursor
+    assert iterator._more_results
+    assert iterator._retry == retry
+    assert iterator._timeout == timeout
+
+
+def test_iterator__build_protobuf_empty():
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import Query
+
+    client = _Client(None)
+    query = Query(client)
+    iterator = _make_iterator(query, client)
+
+    pb = iterator._build_protobuf()
+    expected_pb = query_pb2.Query()
+    assert pb == expected_pb
+
+
+def test_iterator__build_protobuf_all_values_except_offset():
+    # this test and the following (all_values_except_start_and_end_cursor)
+    # test mutually exclusive states; the offset is ignored
+    # if a start_cursor is supplied
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import Query
+
+    client = _Client(None)
+    query = Query(client)
+    limit = 15
+    start_bytes = b"i\xb7\x1d"
+    start_cursor = "abcd"
+    end_bytes = b"\xc3\x1c\xb3"
+    end_cursor = "wxyz"
+    iterator = _make_iterator(
+        query, client, limit=limit, start_cursor=start_cursor, end_cursor=end_cursor
+    )
+    assert iterator.max_results == limit
+    iterator.num_results = 4
+    iterator._skipped_results = 1
+
+    pb = iterator._build_protobuf()
+    expected_pb = query_pb2.Query(start_cursor=start_bytes, end_cursor=end_bytes)
+    expected_pb._pb.limit.value = limit - iterator.num_results
+    assert pb == expected_pb
+
+
+def test_iterator__build_protobuf_all_values_except_start_and_end_cursor():
+    # this test and the previous (all_values_except_start_offset)
+    # test mutually exclusive states; the offset is ignored
+    # if a start_cursor is supplied
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import Query
+
+    client = _Client(None)
+    query = Query(client)
+    limit = 15
+    offset = 9
+    iterator = _make_iterator(query, client, limit=limit, offset=offset)
+    assert iterator.max_results == limit
+    iterator.num_results = 4
+
+    pb = iterator._build_protobuf()
+    expected_pb = query_pb2.Query(offset=offset - iterator._skipped_results)
+    expected_pb._pb.limit.value = limit - iterator.num_results
+    assert pb == expected_pb
+
+
+def test_iterator__process_query_results():
+    from google.cloud.datastore_v1.types import query as query_pb2
+
+    iterator = _make_iterator(None, None, end_cursor="abcd")
+    assert iterator._end_cursor is not None
+
+    entity_pbs = [_make_entity("Hello", 9998, "PRAHJEKT")]
+    cursor_as_bytes = b"\x9ai\xe7"
+    cursor = b"mmnn"
+    skipped_results = 4
+    more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED
+    response_pb = _make_query_response(
+        entity_pbs, cursor_as_bytes, more_results_enum, skipped_results
+    )
+    result = iterator._process_query_results(response_pb)
+    assert result == entity_pbs
+
+    assert iterator._skipped_results == skipped_results
+    assert iterator.next_page_token == cursor
+    assert iterator._more_results
+
+
+def test_iterator__process_query_results_done():
+    from google.cloud.datastore_v1.types import query as query_pb2
+
+    iterator = _make_iterator(None, None, end_cursor="abcd")
+    assert iterator._end_cursor is not None
+
+    entity_pbs = [_make_entity("World", 1234, "PROJECT")]
+    cursor_as_bytes = b"\x9ai\xe7"
+    skipped_results = 44
+    more_results_enum = query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS
+    response_pb = _make_query_response(
+        entity_pbs, cursor_as_bytes, more_results_enum, skipped_results
+    )
+    result = iterator._process_query_results(response_pb)
+    assert result == entity_pbs
+
+    assert iterator._skipped_results == skipped_results
+    assert iterator.next_page_token is None
+    assert not iterator._more_results
+
+
+@pytest.mark.filterwarnings("ignore")
+def test_iterator__process_query_results_bad_enum():
+    iterator = _make_iterator(None, None)
+    more_results_enum = 999
+    response_pb = _make_query_response([], b"", more_results_enum, 0)
+    with pytest.raises(ValueError):
+        iterator._process_query_results(response_pb)
+
+
+def _next_page_helper(txn_id=None, retry=None, timeout=None):
+    from google.api_core import page_iterator
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import Query
+
+    more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED
+    result = _make_query_response([], b"", more_enum, 0)
+    project = "prujekt"
+    ds_api = _make_datastore_api(result)
+    if txn_id is None:
+        client = _Client(project, datastore_api=ds_api)
+    else:
+        transaction = mock.Mock(id=txn_id, spec=["id"])
+        client = _Client(project, datastore_api=ds_api, transaction=transaction)
+
+    query = Query(client)
+    kwargs = {}
+
+    if retry is not None:
+        kwargs["retry"] = retry
+
+    if timeout is not None:
+        kwargs["timeout"] = timeout
+
+    iterator = _make_iterator(query, client, **kwargs)
+
+    page = iterator._next_page()
+
+    assert isinstance(page, page_iterator.Page)
+    assert page._parent is iterator
+
+    partition_id = entity_pb2.PartitionId(project_id=project)
+    if txn_id is None:
+        read_options = datastore_pb2.ReadOptions()
+    else:
+        read_options = datastore_pb2.ReadOptions(transaction=txn_id)
+    empty_query = query_pb2.Query()
+    ds_api.run_query.assert_called_once_with(
+        request={
+            "project_id": project,
+            "partition_id": partition_id,
+            "read_options": read_options,
+            "query": empty_query,
+        },
+        **kwargs,
+    )
+
+
+def test_iterator__next_page():
+    _next_page_helper()
+
+
+def test_iterator__next_page_w_retry():
+    _next_page_helper(retry=mock.Mock())
+
+
+def test_iterator__next_page_w_timeout():
+    _next_page_helper(timeout=100000)
+
+
+def test_iterator__next_page_in_transaction():
+    txn_id = b"1xo1md\xe2\x98\x83"
+    _next_page_helper(txn_id)
+
+
+def test_iterator__next_page_no_more():
+    from google.cloud.datastore.query import Query
+
+    ds_api = _make_datastore_api()
+    client = _Client(None, datastore_api=ds_api)
+    query = Query(client)
+    iterator = _make_iterator(query, client)
+    iterator._more_results = False
+
+    page = iterator._next_page()
+    assert page is None
+    ds_api.run_query.assert_not_called()
+
+
+def test_iterator__next_page_w_skipped_lt_offset():
+    from google.api_core import page_iterator
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    from google.cloud.datastore_v1.types import entity as entity_pb2
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import Query
+
+    project = "prujekt"
+    skipped_1 = 100
+    skipped_cursor_1 = b"DEADBEEF"
+    skipped_2 = 50
+    skipped_cursor_2 = b"FACEDACE"
+
+    more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED
+
+    result_1 = _make_query_response([], b"", more_enum, skipped_1)
+    result_1.batch.skipped_cursor = skipped_cursor_1
+    result_2 = _make_query_response([], b"", more_enum, skipped_2)
+    result_2.batch.skipped_cursor = skipped_cursor_2
+
+    ds_api = _make_datastore_api(result_1, result_2)
+    client = _Client(project, datastore_api=ds_api)
+
+    query = Query(client)
+    offset = 150
+    iterator = _make_iterator(query, client, offset=offset)
+
+    page = iterator._next_page()
+
+    assert isinstance(page, page_iterator.Page)
+    assert page._parent is iterator
+
+    partition_id = entity_pb2.PartitionId(project_id=project)
+    read_options = datastore_pb2.ReadOptions()
+
+    query_1 = query_pb2.Query(offset=offset)
+    query_2 = query_pb2.Query(
+        start_cursor=skipped_cursor_1, offset=(offset - skipped_1)
+    )
+    expected_calls = [
+        mock.call(
             request={
                 "project_id": project,
                 "partition_id": partition_id,
                 "read_options": read_options,
-                "query": empty_query,
-            },
-            **kwargs,
+                "query": query,
+            }
         )
+        for query in [query_1, query_2]
+    ]
+    assert ds_api.run_query.call_args_list == expected_calls
 
-    def test__next_page(self):
-        self._next_page_helper()
 
-    def test__next_page_w_retry(self):
-        self._next_page_helper(retry=mock.Mock())
+def test__item_to_entity():
+    from google.cloud.datastore.query import _item_to_entity
 
-    def test__next_page_w_timeout(self):
-        self._next_page_helper(timeout=100000)
+    entity_pb = mock.Mock()
+    entity_pb._pb = mock.sentinel.entity_pb
+    patch = mock.patch("google.cloud.datastore.helpers.entity_from_protobuf")
+    with patch as entity_from_protobuf:
+        result = _item_to_entity(None, entity_pb)
+        assert result is entity_from_protobuf.return_value
 
-    def test__next_page_in_transaction(self):
-        txn_id = b"1xo1md\xe2\x98\x83"
-        self._next_page_helper(txn_id)
+    entity_from_protobuf.assert_called_once_with(entity_pb)
 
-    def test__next_page_no_more(self):
-        from google.cloud.datastore.query import Query
 
-        ds_api = _make_datastore_api()
-        client = _Client(None, datastore_api=ds_api)
-        query = Query(client)
-        iterator = self._make_one(query, client)
-        iterator._more_results = False
+def test_pb_from_query_empty():
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import _pb_from_query
 
-        page = iterator._next_page()
-        self.assertIsNone(page)
-        ds_api.run_query.assert_not_called()
+    pb = _pb_from_query(_Query())
+    assert list(pb.projection) == []
+    assert list(pb.kind) == []
+    assert list(pb.order) == []
+    assert list(pb.distinct_on) == []
+    assert pb.filter.property_filter.property.name == ""
+    cfilter = pb.filter.composite_filter
+    assert cfilter.op == query_pb2.CompositeFilter.Operator.OPERATOR_UNSPECIFIED
+    assert list(cfilter.filters) == []
+    assert pb.start_cursor == b""
+    assert pb.end_cursor == b""
+    assert pb._pb.limit.value == 0
+    assert pb.offset == 0
 
-    def test__next_page_w_skipped_lt_offset(self):
-        from google.api_core import page_iterator
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-        from google.cloud.datastore_v1.types import entity as entity_pb2
-        from google.cloud.datastore_v1.types import query as query_pb2
-        from google.cloud.datastore.query import Query
 
-        project = "prujekt"
-        skipped_1 = 100
-        skipped_cursor_1 = b"DEADBEEF"
-        skipped_2 = 50
-        skipped_cursor_2 = b"FACEDACE"
+def test_pb_from_query_projection():
+    from google.cloud.datastore.query import _pb_from_query
 
-        more_enum = query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED
+    pb = _pb_from_query(_Query(projection=["a", "b", "c"]))
+    assert [item.property.name for item in pb.projection] == ["a", "b", "c"]
 
-        result_1 = _make_query_response([], b"", more_enum, skipped_1)
-        result_1.batch.skipped_cursor = skipped_cursor_1
-        result_2 = _make_query_response([], b"", more_enum, skipped_2)
-        result_2.batch.skipped_cursor = skipped_cursor_2
 
-        ds_api = _make_datastore_api(result_1, result_2)
-        client = _Client(project, datastore_api=ds_api)
+def test_pb_from_query_kind():
+    from google.cloud.datastore.query import _pb_from_query
 
-        query = Query(client)
-        offset = 150
-        iterator = self._make_one(query, client, offset=offset)
+    pb = _pb_from_query(_Query(kind="KIND"))
+    assert [item.name for item in pb.kind] == ["KIND"]
 
-        page = iterator._next_page()
 
-        self.assertIsInstance(page, page_iterator.Page)
-        self.assertIs(page._parent, iterator)
+def test_pb_from_query_ancestor():
+    from google.cloud.datastore.key import Key
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import _pb_from_query
 
-        partition_id = entity_pb2.PartitionId(project_id=project)
-        read_options = datastore_pb2.ReadOptions()
+    ancestor = Key("Ancestor", 123, project="PROJECT")
+    pb = _pb_from_query(_Query(ancestor=ancestor))
+    cfilter = pb.filter.composite_filter
+    assert cfilter.op == query_pb2.CompositeFilter.Operator.AND
+    assert len(cfilter.filters) == 1
+    pfilter = cfilter.filters[0].property_filter
+    assert pfilter.property.name == "__key__"
+    ancestor_pb = ancestor.to_protobuf()
+    assert pfilter.value.key_value == ancestor_pb
 
-        query_1 = query_pb2.Query(offset=offset)
-        query_2 = query_pb2.Query(
-            start_cursor=skipped_cursor_1, offset=(offset - skipped_1)
-        )
-        expected_calls = [
-            mock.call(
-                request={
-                    "project_id": project,
-                    "partition_id": partition_id,
-                    "read_options": read_options,
-                    "query": query,
-                }
-            )
-            for query in [query_1, query_2]
-        ]
-        self.assertEqual(ds_api.run_query.call_args_list, expected_calls)
-
-
-class Test__item_to_entity(unittest.TestCase):
-    def _call_fut(self, iterator, entity_pb):
-        from google.cloud.datastore.query import _item_to_entity
-
-        return _item_to_entity(iterator, entity_pb)
-
-    def test_it(self):
-        entity_pb = mock.Mock()
-        entity_pb._pb = mock.sentinel.entity_pb
-        patch = mock.patch("google.cloud.datastore.helpers.entity_from_protobuf")
-        with patch as entity_from_protobuf:
-            result = self._call_fut(None, entity_pb)
-            self.assertIs(result, entity_from_protobuf.return_value)
-
-        entity_from_protobuf.assert_called_once_with(entity_pb)
-
-
-class Test__pb_from_query(unittest.TestCase):
-    def _call_fut(self, query):
-        from google.cloud.datastore.query import _pb_from_query
-
-        return _pb_from_query(query)
-
-    def test_empty(self):
-        from google.cloud.datastore_v1.types import query as query_pb2
-
-        pb = self._call_fut(_Query())
-        self.assertEqual(list(pb.projection), [])
-        self.assertEqual(list(pb.kind), [])
-        self.assertEqual(list(pb.order), [])
-        self.assertEqual(list(pb.distinct_on), [])
-        self.assertEqual(pb.filter.property_filter.property.name, "")
-        cfilter = pb.filter.composite_filter
-        self.assertEqual(
-            cfilter.op, query_pb2.CompositeFilter.Operator.OPERATOR_UNSPECIFIED
-        )
-        self.assertEqual(list(cfilter.filters), [])
-        self.assertEqual(pb.start_cursor, b"")
-        self.assertEqual(pb.end_cursor, b"")
-        self.assertEqual(pb._pb.limit.value, 0)
-        self.assertEqual(pb.offset, 0)
-
-    def test_projection(self):
-        pb = self._call_fut(_Query(projection=["a", "b", "c"]))
-        self.assertEqual(
-            [item.property.name for item in pb.projection], ["a", "b", "c"]
-        )
 
-    def test_kind(self):
-        pb = self._call_fut(_Query(kind="KIND"))
-        self.assertEqual([item.name for item in pb.kind], ["KIND"])
-
-    def test_ancestor(self):
-        from google.cloud.datastore.key import Key
-        from google.cloud.datastore_v1.types import query as query_pb2
-
-        ancestor = Key("Ancestor", 123, project="PROJECT")
-        pb = self._call_fut(_Query(ancestor=ancestor))
-        cfilter = pb.filter.composite_filter
-        self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND)
-        self.assertEqual(len(cfilter.filters), 1)
-        pfilter = cfilter.filters[0].property_filter
-        self.assertEqual(pfilter.property.name, "__key__")
-        ancestor_pb = ancestor.to_protobuf()
-        self.assertEqual(pfilter.value.key_value, ancestor_pb)
-
-    def test_filter(self):
-        from google.cloud.datastore_v1.types import query as query_pb2
-
-        query = _Query(filters=[("name", "=", "John")])
-        query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL}
-        pb = self._call_fut(query)
-        cfilter = pb.filter.composite_filter
-        self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND)
-        self.assertEqual(len(cfilter.filters), 1)
-        pfilter = cfilter.filters[0].property_filter
-        self.assertEqual(pfilter.property.name, "name")
-        self.assertEqual(pfilter.value.string_value, "John")
-
-    def test_filter_key(self):
-        from google.cloud.datastore.key import Key
-        from google.cloud.datastore_v1.types import query as query_pb2
-
-        key = Key("Kind", 123, project="PROJECT")
-        query = _Query(filters=[("__key__", "=", key)])
-        query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL}
-        pb = self._call_fut(query)
-        cfilter = pb.filter.composite_filter
-        self.assertEqual(cfilter.op, query_pb2.CompositeFilter.Operator.AND)
-        self.assertEqual(len(cfilter.filters), 1)
-        pfilter = cfilter.filters[0].property_filter
-        self.assertEqual(pfilter.property.name, "__key__")
-        key_pb = key.to_protobuf()
-        self.assertEqual(pfilter.value.key_value, key_pb)
-
-    def test_order(self):
-        from google.cloud.datastore_v1.types import query as query_pb2
-
-        pb = self._call_fut(_Query(order=["a", "-b", "c"]))
-        self.assertEqual([item.property.name for item in pb.order], ["a", "b", "c"])
-        self.assertEqual(
-            [item.direction for item in pb.order],
-            [
-                query_pb2.PropertyOrder.Direction.ASCENDING,
-                query_pb2.PropertyOrder.Direction.DESCENDING,
-                query_pb2.PropertyOrder.Direction.ASCENDING,
-            ],
-        )
+def test_pb_from_query_filter():
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import _pb_from_query
+
+    query = _Query(filters=[("name", "=", "John")])
+    query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL}
+    pb = _pb_from_query(query)
+    cfilter = pb.filter.composite_filter
+    assert cfilter.op == query_pb2.CompositeFilter.Operator.AND
+    assert len(cfilter.filters) == 1
+    pfilter = cfilter.filters[0].property_filter
+    assert pfilter.property.name == "name"
+    assert pfilter.value.string_value == "John"
+
+
+def test_pb_from_query_filter_key():
+    from google.cloud.datastore.key import Key
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import _pb_from_query
+
+    key = Key("Kind", 123, project="PROJECT")
+    query = _Query(filters=[("__key__", "=", key)])
+    query.OPERATORS = {"=": query_pb2.PropertyFilter.Operator.EQUAL}
+    pb = _pb_from_query(query)
+    cfilter = pb.filter.composite_filter
+    assert cfilter.op == query_pb2.CompositeFilter.Operator.AND
+    assert len(cfilter.filters) == 1
+    pfilter = cfilter.filters[0].property_filter
+    assert pfilter.property.name == "__key__"
+    key_pb = key.to_protobuf()
+    assert pfilter.value.key_value == key_pb
+
+
+def test_pb_from_query_order():
+    from google.cloud.datastore_v1.types import query as query_pb2
+    from google.cloud.datastore.query import _pb_from_query
+
+    pb = _pb_from_query(_Query(order=["a", "-b", "c"]))
+    assert [item.property.name for item in pb.order] == ["a", "b", "c"]
+    expected_directions = [
+        query_pb2.PropertyOrder.Direction.ASCENDING,
+        query_pb2.PropertyOrder.Direction.DESCENDING,
+        query_pb2.PropertyOrder.Direction.ASCENDING,
+    ]
+    assert [item.direction for item in pb.order] == expected_directions
+
 
-    def test_distinct_on(self):
-        pb = self._call_fut(_Query(distinct_on=["a", "b", "c"]))
-        self.assertEqual([item.name for item in pb.distinct_on], ["a", "b", "c"])
+def test_pb_from_query_distinct_on():
+    from google.cloud.datastore.query import _pb_from_query
+
+    pb = _pb_from_query(_Query(distinct_on=["a", "b", "c"]))
+    assert [item.name for item in pb.distinct_on] == ["a", "b", "c"]
 
 
 class _Query(object):
@@ -814,6 +835,22 @@ def current_transaction(self):
         return self._transaction
 
 
+def _make_query(*args, **kw):
+    from google.cloud.datastore.query import Query
+
+    return Query(*args, **kw)
+
+
+def _make_iterator(*args, **kw):
+    from google.cloud.datastore.query import Iterator
+
+    return Iterator(*args, **kw)
+
+
+def _make_client():
+    return _Client(_PROJECT)
+
+
 def _make_entity(kind, id_, project):
     from google.cloud.datastore_v1.types import entity as entity_pb2
 
diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py
index bae419df..648ae7e4 100644
--- a/tests/unit/test_transaction.py
+++ b/tests/unit/test_transaction.py
@@ -12,360 +12,349 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
 import mock
+import pytest
+
 
+def test_transaction_ctor_defaults():
+    from google.cloud.datastore.transaction import Transaction
 
-class TestTransaction(unittest.TestCase):
-    @staticmethod
-    def _get_target_class():
-        from google.cloud.datastore.transaction import Transaction
+    project = "PROJECT"
+    client = _Client(project)
 
-        return Transaction
+    xact = _make_transaction(client)
 
-    def _make_one(self, client, **kw):
-        return self._get_target_class()(client, **kw)
+    assert xact.project == project
+    assert xact._client is client
+    assert xact.id is None
+    assert xact._status == Transaction._INITIAL
+    assert xact._mutations == []
+    assert len(xact._partial_key_entities) == 0
 
-    def _make_options(self, read_only=False, previous_transaction=None):
-        from google.cloud.datastore_v1.types import TransactionOptions
 
-        kw = {}
+def test_transaction_constructor_read_only():
+    project = "PROJECT"
+    id_ = 850302
+    ds_api = _make_datastore_api(xact=id_)
+    client = _Client(project, datastore_api=ds_api)
+    options = _make_options(read_only=True)
 
-        if read_only:
-            kw["read_only"] = TransactionOptions.ReadOnly()
+    xact = _make_transaction(client, read_only=True)
 
-        return TransactionOptions(**kw)
+    assert xact._options == options
 
-    def test_ctor_defaults(self):
-        project = "PROJECT"
-        client = _Client(project)
 
-        xact = self._make_one(client)
+def test_transaction_current():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        self.assertEqual(xact.project, project)
-        self.assertIs(xact._client, client)
-        self.assertIsNone(xact.id)
-        self.assertEqual(xact._status, self._get_target_class()._INITIAL)
-        self.assertEqual(xact._mutations, [])
-        self.assertEqual(len(xact._partial_key_entities), 0)
+    project = "PROJECT"
+    id_ = 678
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact1 = _make_transaction(client)
+    xact2 = _make_transaction(client)
+    assert xact1.current() is None
+    assert xact2.current() is None
 
-    def test_constructor_read_only(self):
-        project = "PROJECT"
-        id_ = 850302
-        ds_api = _make_datastore_api(xact=id_)
-        client = _Client(project, datastore_api=ds_api)
-        options = self._make_options(read_only=True)
+    with xact1:
+        assert xact1.current() is xact1
+        assert xact2.current() is xact1
 
-        xact = self._make_one(client, read_only=True)
+        with _NoCommitBatch(client):
+            assert xact1.current() is None
+            assert xact2.current() is None
 
-        self.assertEqual(xact._options, options)
+        with xact2:
+            assert xact1.current() is xact2
+            assert xact2.current() is xact2
+
+            with _NoCommitBatch(client):
+                assert xact1.current() is None
+                assert xact2.current() is None
 
-    def _make_begin_request(self, project, read_only=False):
-        expected_options = self._make_options(read_only=read_only)
-        return {
+        assert xact1.current() is xact1
+        assert xact2.current() is xact1
+
+    assert xact1.current() is None
+    assert xact2.current() is None
+
+    begin_txn = ds_api.begin_transaction
+    assert begin_txn.call_count == 2
+    expected_request = _make_begin_request(project)
+    begin_txn.assert_called_with(request=expected_request)
+
+    commit_method = ds_api.commit
+    assert commit_method.call_count == 2
+    mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
+    commit_method.assert_called_with(
+        request={
             "project_id": project,
-            "transaction_options": expected_options,
+            "mode": mode,
+            "mutations": [],
+            "transaction": id_,
         }
+    )
 
-    def test_current(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
+    ds_api.rollback.assert_not_called()
 
-        project = "PROJECT"
-        id_ = 678
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact1 = self._make_one(client)
-        xact2 = self._make_one(client)
-        self.assertIsNone(xact1.current())
-        self.assertIsNone(xact2.current())
 
-        with xact1:
-            self.assertIs(xact1.current(), xact1)
-            self.assertIs(xact2.current(), xact1)
+def test_transaction_begin():
+    project = "PROJECT"
+    id_ = 889
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
 
-            with _NoCommitBatch(client):
-                self.assertIsNone(xact1.current())
-                self.assertIsNone(xact2.current())
-
-            with xact2:
-                self.assertIs(xact1.current(), xact2)
-                self.assertIs(xact2.current(), xact2)
-
-                with _NoCommitBatch(client):
-                    self.assertIsNone(xact1.current())
-                    self.assertIsNone(xact2.current())
-
-            self.assertIs(xact1.current(), xact1)
-            self.assertIs(xact2.current(), xact1)
-
-        self.assertIsNone(xact1.current())
-        self.assertIsNone(xact2.current())
-
-        begin_txn = ds_api.begin_transaction
-        self.assertEqual(begin_txn.call_count, 2)
-        expected_request = self._make_begin_request(project)
-        begin_txn.assert_called_with(request=expected_request)
-
-        commit_method = ds_api.commit
-        self.assertEqual(commit_method.call_count, 2)
-        mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
-        commit_method.assert_called_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": id_,
-            }
-        )
-
-        ds_api.rollback.assert_not_called()
-
-    def test_begin(self):
-        project = "PROJECT"
-        id_ = 889
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
+    xact.begin()
 
-        xact.begin()
+    assert xact.id == id_
 
-        self.assertEqual(xact.id, id_)
+    expected_request = _make_begin_request(project)
+    ds_api.begin_transaction.assert_called_once_with(request=expected_request)
 
-        expected_request = self._make_begin_request(project)
-        ds_api.begin_transaction.assert_called_once_with(request=expected_request)
 
-    def test_begin_w_readonly(self):
-        project = "PROJECT"
-        id_ = 889
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client, read_only=True)
+def test_transaction_begin_w_readonly():
+    project = "PROJECT"
+    id_ = 889
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client, read_only=True)
 
-        xact.begin()
+    xact.begin()
 
-        self.assertEqual(xact.id, id_)
+    assert xact.id == id_
 
-        expected_request = self._make_begin_request(project, read_only=True)
-        ds_api.begin_transaction.assert_called_once_with(request=expected_request)
+    expected_request = _make_begin_request(project, read_only=True)
+    ds_api.begin_transaction.assert_called_once_with(request=expected_request)
 
-    def test_begin_w_retry_w_timeout(self):
-        project = "PROJECT"
-        id_ = 889
-        retry = mock.Mock()
-        timeout = 100000
 
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
+def test_transaction_begin_w_retry_w_timeout():
+    project = "PROJECT"
+    id_ = 889
+    retry = mock.Mock()
+    timeout = 100000
 
-        xact.begin(retry=retry, timeout=timeout)
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
 
-        self.assertEqual(xact.id, id_)
+    xact.begin(retry=retry, timeout=timeout)
 
-        expected_request = self._make_begin_request(project)
-        ds_api.begin_transaction.assert_called_once_with(
-            request=expected_request, retry=retry, timeout=timeout,
-        )
+    assert xact.id == id_
 
-    def test_begin_tombstoned(self):
-        project = "PROJECT"
-        id_ = 1094
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
+    expected_request = _make_begin_request(project)
+    ds_api.begin_transaction.assert_called_once_with(
+        request=expected_request, retry=retry, timeout=timeout,
+    )
 
-        xact.begin()
 
-        self.assertEqual(xact.id, id_)
+def test_transaction_begin_tombstoned():
+    project = "PROJECT"
+    id_ = 1094
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
 
-        expected_request = self._make_begin_request(project)
-        ds_api.begin_transaction.assert_called_once_with(request=expected_request)
+    xact.begin()
 
-        xact.rollback()
+    assert xact.id == id_
 
-        client._datastore_api.rollback.assert_called_once_with(
-            request={"project_id": project, "transaction": id_}
-        )
-        self.assertIsNone(xact.id)
+    expected_request = _make_begin_request(project)
+    ds_api.begin_transaction.assert_called_once_with(request=expected_request)
 
-        with self.assertRaises(ValueError):
-            xact.begin()
+    xact.rollback()
 
-    def test_begin_w_begin_transaction_failure(self):
-        project = "PROJECT"
-        id_ = 712
-        ds_api = _make_datastore_api(xact_id=id_)
-        ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[])
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
+    client._datastore_api.rollback.assert_called_once_with(
+        request={"project_id": project, "transaction": id_}
+    )
+    assert xact.id is None
 
-        with self.assertRaises(RuntimeError):
-            xact.begin()
+    with pytest.raises(ValueError):
+        xact.begin()
 
-        self.assertIsNone(xact.id)
 
-        expected_request = self._make_begin_request(project)
-        ds_api.begin_transaction.assert_called_once_with(request=expected_request)
+def test_transaction_begin_w_begin_transaction_failure():
+    project = "PROJECT"
+    id_ = 712
+    ds_api = _make_datastore_api(xact_id=id_)
+    ds_api.begin_transaction = mock.Mock(side_effect=RuntimeError, spec=[])
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
 
-    def test_rollback(self):
-        project = "PROJECT"
-        id_ = 239
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
+    with pytest.raises(RuntimeError):
         xact.begin()
 
-        xact.rollback()
+    assert xact.id is None
 
-        self.assertIsNone(xact.id)
-        ds_api.rollback.assert_called_once_with(
-            request={"project_id": project, "transaction": id_}
-        )
+    expected_request = _make_begin_request(project)
+    ds_api.begin_transaction.assert_called_once_with(request=expected_request)
 
-    def test_rollback_w_retry_w_timeout(self):
-        project = "PROJECT"
-        id_ = 239
-        retry = mock.Mock()
-        timeout = 100000
 
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
-        xact.begin()
+def test_transaction_rollback():
+    project = "PROJECT"
+    id_ = 239
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
+    xact.begin()
 
-        xact.rollback(retry=retry, timeout=timeout)
+    xact.rollback()
 
-        self.assertIsNone(xact.id)
-        ds_api.rollback.assert_called_once_with(
-            request={"project_id": project, "transaction": id_},
-            retry=retry,
-            timeout=timeout,
-        )
+    assert xact.id is None
+    ds_api.rollback.assert_called_once_with(
+        request={"project_id": project, "transaction": id_}
+    )
 
-    def test_commit_no_partial_keys(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
-        project = "PROJECT"
-        id_ = 1002930
-        mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
+def test_transaction_rollback_w_retry_w_timeout():
+    project = "PROJECT"
+    id_ = 239
+    retry = mock.Mock()
+    timeout = 100000
 
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
-        xact.begin()
-        xact.commit()
-
-        ds_api.commit.assert_called_once_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": id_,
-            }
-        )
-        self.assertIsNone(xact.id)
-
-    def test_commit_w_partial_keys_w_retry_w_timeout(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        kind = "KIND"
-        id1 = 123
-        mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
-        key = _make_key(kind, id1, project)
-        id2 = 234
-        retry = mock.Mock()
-        timeout = 100000
-
-        ds_api = _make_datastore_api(key, xact_id=id2)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
-        xact.begin()
-        entity = _Entity()
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
+    xact.begin()
 
-        xact.put(entity)
-        xact.commit(retry=retry, timeout=timeout)
-
-        ds_api.commit.assert_called_once_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": xact.mutations,
-                "transaction": id2,
-            },
-            retry=retry,
-            timeout=timeout,
-        )
-        self.assertIsNone(xact.id)
-        self.assertEqual(entity.key.path, [{"kind": kind, "id": id1}])
-
-    def test_context_manager_no_raise(self):
-        from google.cloud.datastore_v1.types import datastore as datastore_pb2
-
-        project = "PROJECT"
-        id_ = 912830
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
+    xact.rollback(retry=retry, timeout=timeout)
+
+    assert xact.id is None
+    ds_api.rollback.assert_called_once_with(
+        request={"project_id": project, "transaction": id_},
+        retry=retry,
+        timeout=timeout,
+    )
+
+
+def test_transaction_commit_no_partial_keys():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    id_ = 1002930
+    mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
+
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
+    xact.begin()
+    xact.commit()
+
+    ds_api.commit.assert_called_once_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": [],
+            "transaction": id_,
+        }
+    )
+    assert xact.id is None
+
+
+def test_transaction_commit_w_partial_keys_w_retry_w_timeout():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
 
+    project = "PROJECT"
+    kind = "KIND"
+    id1 = 123
+    mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
+    key = _make_key(kind, id1, project)
+    id2 = 234
+    retry = mock.Mock()
+    timeout = 100000
+
+    ds_api = _make_datastore_api(key, xact_id=id2)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
+    xact.begin()
+    entity = _Entity()
+
+    xact.put(entity)
+    xact.commit(retry=retry, timeout=timeout)
+
+    ds_api.commit.assert_called_once_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": xact.mutations,
+            "transaction": id2,
+        },
+        retry=retry,
+        timeout=timeout,
+    )
+    assert xact.id is None
+    assert entity.key.path == [{"kind": kind, "id": id1}]
+
+
+def test_transaction_context_manager_no_raise():
+    from google.cloud.datastore_v1.types import datastore as datastore_pb2
+
+    project = "PROJECT"
+    id_ = 912830
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
+
+    with xact:
+        # only set between begin / commit
+        assert xact.id == id_
+
+    assert xact.id is None
+
+    expected_request = _make_begin_request(project)
+    ds_api.begin_transaction.assert_called_once_with(request=expected_request)
+
+    mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
+    client._datastore_api.commit.assert_called_once_with(
+        request={
+            "project_id": project,
+            "mode": mode,
+            "mutations": [],
+            "transaction": id_,
+        },
+    )
+
+
+def test_transaction_context_manager_w_raise():
+    class Foo(Exception):
+        pass
+
+    project = "PROJECT"
+    id_ = 614416
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    xact = _make_transaction(client)
+    xact._mutation = object()
+    try:
         with xact:
-            self.assertEqual(xact.id, id_)  # only set between begin / commit
-
-        self.assertIsNone(xact.id)
-
-        expected_request = self._make_begin_request(project)
-        ds_api.begin_transaction.assert_called_once_with(request=expected_request)
-
-        mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
-        client._datastore_api.commit.assert_called_once_with(
-            request={
-                "project_id": project,
-                "mode": mode,
-                "mutations": [],
-                "transaction": id_,
-            },
-        )
-
-    def test_context_manager_w_raise(self):
-        class Foo(Exception):
-            pass
-
-        project = "PROJECT"
-        id_ = 614416
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        xact = self._make_one(client)
-        xact._mutation = object()
-        try:
-            with xact:
-                self.assertEqual(xact.id, id_)
-                raise Foo()
-        except Foo:
-            pass
-
-        self.assertIsNone(xact.id)
-
-        expected_request = self._make_begin_request(project)
-        ds_api.begin_transaction.assert_called_once_with(request=expected_request)
-
-        client._datastore_api.commit.assert_not_called()
-
-        client._datastore_api.rollback.assert_called_once_with(
-            request={"project_id": project, "transaction": id_}
-        )
-
-    def test_put_read_only(self):
-        project = "PROJECT"
-        id_ = 943243
-        ds_api = _make_datastore_api(xact_id=id_)
-        client = _Client(project, datastore_api=ds_api)
-        entity = _Entity()
-        xact = self._make_one(client, read_only=True)
-        xact.begin()
+            assert xact.id == id_
+            raise Foo()
+    except Foo:
+        pass
+
+    assert xact.id is None
 
-        with self.assertRaises(RuntimeError):
-            xact.put(entity)
+    expected_request = _make_begin_request(project)
+    ds_api.begin_transaction.assert_called_once_with(request=expected_request)
+
+    client._datastore_api.commit.assert_not_called()
+
+    client._datastore_api.rollback.assert_called_once_with(
+        request={"project_id": project, "transaction": id_}
+    )
+
+
+def test_transaction_put_read_only():
+    project = "PROJECT"
+    id_ = 943243
+    ds_api = _make_datastore_api(xact_id=id_)
+    client = _Client(project, datastore_api=ds_api)
+    entity = _Entity()
+    xact = _make_transaction(client, read_only=True)
+    xact.begin()
+
+    with pytest.raises(RuntimeError):
+        xact.put(entity)
 
 
 def _make_key(kind, id_, project):
@@ -422,6 +411,31 @@ def __exit__(self, *args):
         self._client._pop_batch()
 
 
+def _make_options(read_only=False, previous_transaction=None):
+    from google.cloud.datastore_v1.types import TransactionOptions
+
+    kw = {}
+
+    if read_only:
+        kw["read_only"] = TransactionOptions.ReadOnly()
+
+    return TransactionOptions(**kw)
+
+
+def _make_transaction(client, **kw):
+    from google.cloud.datastore.transaction import Transaction
+
+    return Transaction(client, **kw)
+
+
+def _make_begin_request(project, read_only=False):
+    expected_options = _make_options(read_only=read_only)
+    return {
+        "project_id": project,
+        "transaction_options": expected_options,
+    }
+
+
 def _make_commit_response(*keys):
     from google.cloud.datastore_v1.types import datastore as datastore_pb2