Skip to content

Commit

Permalink
Merge pull request #4229 from daipom/fix-log-cache-growing-unlimitedly
Browse files Browse the repository at this point in the history
logger: Fix cache for `ignore_same_log_interval` growing unlimitedly
  • Loading branch information
ashie authored Jul 7, 2023
2 parents e20697e + 82fc146 commit 09617b4
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 0 deletions.
9 changes: 9 additions & 0 deletions lib/fluent/log.rb
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ module TTYColor
LOG_TYPES = [LOG_TYPE_SUPERVISOR, LOG_TYPE_WORKER0, LOG_TYPE_DEFAULT].freeze
LOG_ROTATE_AGE = %w(daily weekly monthly)

IGNORE_SAME_LOG_MAX_CACHE_SIZE = 1000 # If need, make this an option of system config.

def self.str_to_level(log_level_str)
case log_level_str.downcase
when "trace" then LEVEL_TRACE
Expand Down Expand Up @@ -477,6 +479,13 @@ def ignore_same_log?(time, message)
false
end
else
if cached_log.size >= IGNORE_SAME_LOG_MAX_CACHE_SIZE
cached_log.reject! do |_, cached_time|
(time - cached_time) > @ignore_same_log_interval
end
end
# If the size is still over, we have no choice but to clear it.
cached_log.clear if cached_log.size >= IGNORE_SAME_LOG_MAX_CACHE_SIZE
cached_log[message] = time
false
end
Expand Down
37 changes: 37 additions & 0 deletions test/test_log.rb
Original file line number Diff line number Diff line change
Expand Up @@ -472,6 +472,43 @@ def test_different_message
]
assert_equal(expected, log.out.logs)
end

def test_reject_on_max_size
ignore_same_log_interval = 10

logger = Fluent::Log.new(
ServerEngine::DaemonLogger.new(@log_device, log_level: ServerEngine::DaemonLogger::INFO),
ignore_same_log_interval: ignore_same_log_interval,
)

# Output unique log every second.
Fluent::Log::IGNORE_SAME_LOG_MAX_CACHE_SIZE.times do |i|
logger.info "Test #{i}"
Timecop.freeze(@timestamp + i)
end
logger.info "Over max size!"

# The newest cache and the latest caches in `ignore_same_log_interval` should exist.
assert { Thread.current[:last_same_log].size == ignore_same_log_interval + 1 }
end

def test_clear_on_max_size
ignore_same_log_interval = 10

logger = Fluent::Log.new(
ServerEngine::DaemonLogger.new(@log_device, log_level: ServerEngine::DaemonLogger::INFO),
ignore_same_log_interval: ignore_same_log_interval,
)

# Output unique log at the same time.
Fluent::Log::IGNORE_SAME_LOG_MAX_CACHE_SIZE.times do |i|
logger.info "Test #{i}"
end
logger.info "Over max size!"

# Can't reject old logs, so all cache should be cleared and only the newest should exist.
assert { Thread.current[:last_same_log].size == 1 }
end
end

def test_dup
Expand Down

0 comments on commit 09617b4

Please sign in to comment.