Skip to content

Commit

Permalink
Merge pull request #3562 from kenhys/show-chunk-limit-size
Browse files Browse the repository at this point in the history
Show chunk_limit_size respectively
  • Loading branch information
ashie committed Nov 24, 2021
2 parents baf6a11 + 236639c commit 3bbedcc
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions lib/fluent/plugin/buffer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -781,7 +781,7 @@ def write_step_by_step(metadata, data, format, splits_count, &block)
else
big_record_size = formatted_split.bytesize
if chunk.bytesize + big_record_size > @chunk_limit_size
errors << "a #{big_record_size} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size"
errors << "a #{big_record_size} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
writing_splits_index += 1
next
end
Expand All @@ -803,7 +803,7 @@ def write_step_by_step(metadata, data, format, splits_count, &block)
# but if it raises here, already processed 'split' or
# the proceeding 'split' will be lost completely.
# so it is a last resort to delay raising such a exception
errors << "a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size"
errors << "concatenated/appended a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
writing_splits_index += 1
next
end
Expand Down
4 changes: 2 additions & 2 deletions test/plugin/test_buffer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -974,7 +974,7 @@ def create_chunk_es(metadata, es)
end
messages = []
nth.each do |n|
messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size"
messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size (1280000)"
end

assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages.join(", ")) do
Expand Down Expand Up @@ -1273,7 +1273,7 @@ def create_chunk_es(metadata, es)
c.append(Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}]]), compress: :gzip)
overflow_bytes = c.bytesize

messages = "a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size"
messages = "concatenated/appended a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size (70)"
assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages) do
# test format == nil && compress == :gzip
@p.write({@dm0 => es})
Expand Down

0 comments on commit 3bbedcc

Please sign in to comment.