Skip to content

Commit

Permalink
refactor: incorporate will_accept() checks into publish() (#108)
Browse files Browse the repository at this point in the history
  • Loading branch information
Gurov Ilya committed Jun 9, 2020
1 parent 0132a46 commit 6c7677e
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 69 deletions.
26 changes: 0 additions & 26 deletions google/cloud/pubsub_v1/publisher/_batch/base.py
Expand Up @@ -109,32 +109,6 @@ def status(self):
"""
raise NotImplementedError

def will_accept(self, message):
"""Return True if the batch is able to accept the message.
In concurrent implementations, the attributes on the current batch
may be modified by other workers. With this in mind, the caller will
likely want to hold a lock that will make sure the state remains
the same after the "will accept?" question is answered.
Args:
message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message.
Returns:
bool: Whether this batch can accept the message.
"""
# If this batch is not accepting messages generally, return False.
if self.status != BatchStatus.ACCEPTING_MESSAGES:
return False

# If this message will make the batch exceed the ``max_messages``
# setting, return False.
if len(self.messages) >= self.settings.max_messages:
return False

# Okay, everything is good.
return True

def cancel(self, cancellation_reason):
"""Complete pending futures with an exception.
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/pubsub_v1/publisher/_batch/thread.py
Expand Up @@ -333,8 +333,8 @@ def publish(self, message):
self._status != base.BatchStatus.ERROR
), "Publish after stop() or publish error."

if not self.will_accept(message):
return future
if self.status != base.BatchStatus.ACCEPTING_MESSAGES:
return

size_increase = types.PublishRequest(messages=[message]).ByteSize()

Expand Down
30 changes: 0 additions & 30 deletions tests/unit/pubsub_v1/publisher/batch/test_base.py
Expand Up @@ -46,33 +46,3 @@ def test_len():
assert len(batch) == 0
batch.publish(types.PubsubMessage(data=b"foo"))
assert len(batch) == 1


def test_will_accept():
batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES)
message = types.PubsubMessage()
assert batch.will_accept(message) is True


def test_will_accept_oversize():
batch = create_batch(
settings=types.BatchSettings(max_bytes=10),
status=BatchStatus.ACCEPTING_MESSAGES,
)
message = types.PubsubMessage(data=b"abcdefghijklmnopqrstuvwxyz")
assert batch.will_accept(message) is True


def test_will_not_accept_status():
batch = create_batch(status="talk to the hand")
message = types.PubsubMessage()
assert batch.will_accept(message) is False


def test_will_not_accept_number():
batch = create_batch(
settings=types.BatchSettings(max_messages=-1),
status=BatchStatus.ACCEPTING_MESSAGES,
)
message = types.PubsubMessage(data=b"abc")
assert batch.will_accept(message) is False
52 changes: 45 additions & 7 deletions tests/unit/pubsub_v1/publisher/batch/test_thread.py
Expand Up @@ -287,18 +287,56 @@ def test_publish_updating_batch_size():
assert batch.size > 0 # I do not always trust protobuf.


def test_publish_not_will_accept():
def test_publish():
batch = create_batch()
message = types.PubsubMessage()
future = batch.publish(message)

assert len(batch.messages) == 1
assert batch._futures == [future]


def test_publish_max_messages_zero():
batch = create_batch(topic="topic_foo", max_messages=0)
base_request_size = types.PublishRequest(topic="topic_foo").ByteSize()

# Publish the message.
message = types.PubsubMessage(data=b"foobarbaz")
with mock.patch.object(batch, "commit") as commit:
future = batch.publish(message)

assert future is not None
assert len(batch.messages) == 1
assert batch._futures == [future]
commit.assert_called_once()


def test_publish_max_messages_enforced():
batch = create_batch(topic="topic_foo", max_messages=1)

message = types.PubsubMessage(data=b"foobarbaz")
message2 = types.PubsubMessage(data=b"foobarbaz2")

future = batch.publish(message)
future2 = batch.publish(message2)

assert future is not None
assert future2 is None
assert len(batch.messages) == 1
assert len(batch._futures) == 1


def test_publish_max_bytes_enforced():
batch = create_batch(topic="topic_foo", max_bytes=15)

message = types.PubsubMessage(data=b"foobarbaz")
message2 = types.PubsubMessage(data=b"foobarbaz2")

future = batch.publish(message)
future2 = batch.publish(message2)

assert future is None
assert batch.size == base_request_size
assert batch.messages == []
assert batch._futures == []
assert future is not None
assert future2 is None
assert len(batch.messages) == 1
assert len(batch._futures) == 1


def test_publish_exceed_max_messages():
Expand Down
4 changes: 0 additions & 4 deletions tests/unit/pubsub_v1/publisher/test_publisher_client.py
Expand Up @@ -135,7 +135,6 @@ def test_publish():
batch = mock.Mock(spec=client._batch_class)

# Set the mock up to claim indiscriminately that it accepts all messages.
batch.will_accept.return_value = True
batch.publish.side_effect = (future1, future2)

topic = "topic/path"
Expand Down Expand Up @@ -169,7 +168,6 @@ def test_publish_error_exceeding_flow_control_limits():
client = publisher.Client(credentials=creds, publisher_options=publisher_options)

mock_batch = mock.Mock(spec=client._batch_class)
mock_batch.will_accept.return_value = True
topic = "topic/path"
client._set_batch(topic, mock_batch)

Expand Down Expand Up @@ -216,7 +214,6 @@ def test_publish_attrs_bytestring():
# Use a mock in lieu of the actual batch class.
batch = mock.Mock(spec=client._batch_class)
# Set the mock up to claim indiscriminately that it accepts all messages.
batch.will_accept.return_value = True

topic = "topic/path"
client._set_batch(topic, batch)
Expand Down Expand Up @@ -431,7 +428,6 @@ def test_publish_with_ordering_key():
future1.add_done_callback = mock.Mock(spec=["__call__"])
future2.add_done_callback = mock.Mock(spec=["__call__"])

batch.will_accept.return_value = True
batch.publish.side_effect = (future1, future2)

topic = "topic/path"
Expand Down

0 comments on commit 6c7677e

Please sign in to comment.