From 236639c3e2eb9dfc2614b0e965fcd5402e707893 Mon Sep 17 00:00:00 2001 From: Kentaro Hayashi Date: Wed, 24 Nov 2021 09:44:32 +0900 Subject: [PATCH] Show chunk_limit_size respectively It is more informative and distinguishable. Signed-off-by: Kentaro Hayashi --- lib/fluent/plugin/buffer.rb | 4 ++-- test/plugin/test_buffer.rb | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/fluent/plugin/buffer.rb b/lib/fluent/plugin/buffer.rb index 39616a6d11..cc18f76a1e 100644 --- a/lib/fluent/plugin/buffer.rb +++ b/lib/fluent/plugin/buffer.rb @@ -781,7 +781,7 @@ def write_step_by_step(metadata, data, format, splits_count, &block) else big_record_size = formatted_split.bytesize if chunk.bytesize + big_record_size > @chunk_limit_size - errors << "a #{big_record_size} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size" + errors << "a #{big_record_size} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})" writing_splits_index += 1 next end @@ -803,7 +803,7 @@ def write_step_by_step(metadata, data, format, splits_count, &block) # but if it raises here, already processed 'split' or # the proceeding 'split' will be lost completely. # so it is a last resort to delay raising such a exception - errors << "a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size" + errors << "concatenated/appended a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})" writing_splits_index += 1 next end diff --git a/test/plugin/test_buffer.rb b/test/plugin/test_buffer.rb index acf48b7d7f..cfbcd89380 100644 --- a/test/plugin/test_buffer.rb +++ b/test/plugin/test_buffer.rb @@ -974,7 +974,7 @@ def create_chunk_es(metadata, es) end messages = [] nth.each do |n| - messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size" + messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size (1280000)" end assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages.join(", ")) do @@ -1273,7 +1273,7 @@ def create_chunk_es(metadata, es) c.append(Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}]]), compress: :gzip) overflow_bytes = c.bytesize - messages = "a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size" + messages = "concatenated/appended a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size (70)" assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages) do # test format == nil && compress == :gzip @p.write({@dm0 => es})