diff --git a/.travis.yml b/.travis.yml
index 1cefc919fa..1f1110b07b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,8 +1,8 @@
language: ruby
rvm:
- - 2.0.0
- 2.1
+ - 2.1.5
- 2.2.3
- 2.3.0
- ruby-head
diff --git a/example/in_status.conf b/example/in_status.conf
new file mode 100644
index 0000000000..80b6df5b62
--- /dev/null
+++ b/example/in_status.conf
@@ -0,0 +1,10 @@
+
+
+
+ @type stdout
+
+
diff --git a/lib/fluent/agent.rb b/lib/fluent/agent.rb
index 5080f2377c..f0262842b6 100644
--- a/lib/fluent/agent.rb
+++ b/lib/fluent/agent.rb
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+
+# TODO consolidate w/ Label ?
+
module Fluent
require 'fluent/configurable'
require 'fluent/engine'
@@ -78,7 +81,7 @@ def shutdown
@started_filters.map { |f|
Thread.new do
begin
- log.info "shutting down filter#{@context.nil? ? '' : " in #{@context}"}", type: Plugin.lookup_name_from_class(f.class), plugin_id: f.plugin_id
+ log.info "shutting down filter#{@context.nil? ? '' : " in #{@context}"}", type: Plugin.lookup_type_from_class(f.class), plugin_id: f.plugin_id
f.shutdown
rescue => e
log.warn "unexpected error while shutting down filter plugins", :plugin => f.class, :plugin_id => f.plugin_id, :error_class => e.class, :error => e
@@ -92,7 +95,7 @@ def shutdown
@started_outputs.map { |o|
Thread.new do
begin
- log.info "shutting down output#{@context.nil? ? '' : " in #{@context}"}", type: Plugin.lookup_name_from_class(o.class), plugin_id: o.plugin_id
+ log.info "shutting down output#{@context.nil? ? '' : " in #{@context}"}", type: Plugin.lookup_type_from_class(o.class), plugin_id: o.plugin_id
o.shutdown
rescue => e
log.warn "unexpected error while shutting down output plugins", :plugin => o.class, :plugin_id => o.plugin_id, :error_class => e.class, :error => e
diff --git a/lib/fluent/command/debug.rb b/lib/fluent/command/debug.rb
index 22a21d18a5..a748d518af 100644
--- a/lib/fluent/command/debug.rb
+++ b/lib/fluent/command/debug.rb
@@ -85,7 +85,6 @@
puts "Usage:"
puts " Engine.match('some.tag').output : get an output plugin instance"
puts " Engine.sources[i] : get input plugin instances"
-puts " Plugin.load_plugin(type,name) : load plugin class (use this if you get DRb::DRbUnknown)"
puts ""
Encoding.default_internal = nil if Encoding.respond_to?(:default_internal)
diff --git a/lib/fluent/command/fluentd.rb b/lib/fluent/command/fluentd.rb
index 6ba415d5ab..1f09488636 100644
--- a/lib/fluent/command/fluentd.rb
+++ b/lib/fluent/command/fluentd.rb
@@ -92,6 +92,10 @@
opts[:without_source] = b
}
+op.on('--plugin-storage-path DIR_PATH', "directory path which is used for storages of plugin internal data") {|s|
+ opts[:plugin_storage_path] = s
+}
+
op.on('--use-v1-config', "Use v1 configuration format (default)", TrueClass) {|b|
opts[:use_v1_config] = b
}
diff --git a/lib/fluent/config.rb b/lib/fluent/config.rb
index 5ff253a714..cdeed1f52c 100644
--- a/lib/fluent/config.rb
+++ b/lib/fluent/config.rb
@@ -38,17 +38,4 @@ def self.new(name = '')
Element.new(name, '', {}, [])
end
end
-
- require 'fluent/configurable'
-
- module PluginId
- def configure(conf)
- @id = conf['@id'] || conf['id']
- super
- end
-
- def plugin_id
- @id ? @id : "object:#{object_id.to_s(16)}"
- end
- end
end
diff --git a/lib/fluent/config/configure_proxy.rb b/lib/fluent/config/configure_proxy.rb
index 5083da0b4a..5bccfa9658 100644
--- a/lib/fluent/config/configure_proxy.rb
+++ b/lib/fluent/config/configure_proxy.rb
@@ -77,19 +77,7 @@ def merge(other) # self is base class, other is subclass
merged.argument = other.argument || self.argument
merged.params = self.params.merge(other.params)
merged.defaults = self.defaults.merge(other.defaults)
- merged.sections = {}
- (self.sections.keys + other.sections.keys).uniq.each do |section_key|
- self_section = self.sections[section_key]
- other_section = other.sections[section_key]
- merged_section = if self_section && other_section
- self_section.merge(other_section)
- elsif self_section || other_section
- self_section || other_section
- else
- raise "BUG: both of self and other section are nil"
- end
- merged.sections[section_key] = merged_section
- end
+ merged.sections = self.sections.merge(other.sections)
merged
end
diff --git a/lib/fluent/config/section.rb b/lib/fluent/config/section.rb
index 8549e22fd3..d208493bad 100644
--- a/lib/fluent/config/section.rb
+++ b/lib/fluent/config/section.rb
@@ -33,6 +33,10 @@ def initialize(params = {})
alias :object_id :__id__
+ def to_s
+ inspect
+ end
+
def inspect
""
end
@@ -62,6 +66,19 @@ def [](key)
@params[key.to_sym]
end
+ def respond_to?(symbol, include_all=false)
+ case symbol
+ when :inspect, :nil?, :to_h, :+, :instance_of?, :kind_of?, :[], :respond_to?, :respond_to_missing?, :method_missing,
+ true
+ when :!, :!= , :==, :equal?, :instance_eval, :instane_exec
+ true
+ when :method_missing, :singleton_method_added, :singleton_method_removed, :singleton_method_undefined
+ include_all
+ else
+ false
+ end
+ end
+
def respond_to_missing?(symbol, include_private)
@params.has_key?(symbol)
end
@@ -70,7 +87,7 @@ def method_missing(name, *args)
if @params.has_key?(name)
@params[name]
else
- super
+ ::Kernel.raise ::NoMethodError, "undefined method `#{name}' for #{self.inspect}"
end
end
end
diff --git a/lib/fluent/configurable.rb b/lib/fluent/configurable.rb
index a17008ed2a..280810890e 100644
--- a/lib/fluent/configurable.rb
+++ b/lib/fluent/configurable.rb
@@ -14,12 +14,13 @@
# limitations under the License.
#
-module Fluent
- require 'fluent/config/configure_proxy'
- require 'fluent/config/section'
- require 'fluent/config/error'
- require 'fluent/registry'
+require 'fluent/config/configure_proxy'
+require 'fluent/config/section'
+require 'fluent/config/error'
+require 'fluent/registry'
+require 'fluent/plugin'
+module Fluent
module Configurable
def self.included(mod)
mod.extend(ClassMethods)
@@ -50,9 +51,13 @@ def configure(conf)
proxy = self.class.merged_configure_proxy
conf.corresponding_proxies << proxy
- # In the nested section, can't get plugin class through proxies so get plugin class here
- plugin_class = Plugin.lookup_name_from_class(proxy.name.to_s)
- root = Fluent::Config::SectionGenerator.generate(proxy, conf, logger, plugin_class)
+ # Configuration sections must have plugin types to show unused parameters (for what type of plugin)
+ # in configuration files,
+ # but nested sections cannot take the type of root configuration section.
+ # So we need to get plugin type here, and use it in configuration proxies.
+ # ("type" is a stuff specified by '@type' in configuration file)
+ plugin_type = Fluent::Plugin.lookup_type_from_class(proxy.name.to_s)
+ root = Fluent::Config::SectionGenerator.generate(proxy, conf, logger, plugin_type)
@config_root_section = root
root.instance_eval{ @params.keys }.each do |param_name|
diff --git a/lib/fluent/engine.rb b/lib/fluent/engine.rb
index 05883ffbe4..2029b76e6c 100644
--- a/lib/fluent/engine.rb
+++ b/lib/fluent/engine.rb
@@ -18,6 +18,7 @@ module Fluent
require 'fluent/event_router'
require 'fluent/root_agent'
require 'fluent/time'
+ require 'fluent/system_config'
class EngineClass
def initialize
@@ -34,26 +35,30 @@ def initialize
@msgpack_factory = MessagePack::Factory.new
@msgpack_factory.register_type(Fluent::EventTime::TYPE, Fluent::EventTime)
+ @system_config = SystemConfig.new({})
end
MATCH_CACHE_SIZE = 1024
LOG_EMIT_INTERVAL = 0.1
+ attr_reader :system_config
attr_reader :root_agent
attr_reader :matches, :sources
attr_reader :msgpack_factory
def init(opts = {})
BasicSocket.do_not_reverse_lookup = true
- Plugin.load_plugins
if defined?(Encoding)
Encoding.default_internal = 'ASCII-8BIT' if Encoding.respond_to?(:default_internal)
Encoding.default_external = 'ASCII-8BIT' if Encoding.respond_to?(:default_external)
end
+ @system_config = opts[:system_config] if opts[:system_config]
+
suppress_interval(opts[:suppress_interval]) if opts[:suppress_interval]
@suppress_config_dump = opts[:suppress_config_dump] if opts[:suppress_config_dump]
@without_source = opts[:without_source] if opts[:without_source]
+ @plugin_storage_path = opts[:plugin_storage_path] if opts[:plugin_storage_path]
@root_agent = RootAgent.new(opts)
@@ -113,22 +118,20 @@ def configure(conf)
end
end
- def load_plugin_dir(dir)
- Plugin.load_plugin_dir(dir)
+ def add_plugin_dir(dir)
+ Plugin.add_plugin_dir(dir)
end
def emit(tag, time, record)
- unless record.nil?
- emit_stream tag, OneEventStream.new(time, record)
- end
+ raise "BUG: use router.emit instead of Engine.emit"
end
def emit_array(tag, array)
- emit_stream tag, ArrayEventStream.new(array)
+ raise "BUG: use router.emit_array instead of Engine.emit_array"
end
def emit_stream(tag, es)
- @event_router.emit_stream(tag, es)
+ raise "BUG: use router.emit_stream instead of Engine.emit_stream"
end
def flush!
diff --git a/lib/fluent/env.rb b/lib/fluent/env.rb
index 8f20db8906..0102a393d4 100644
--- a/lib/fluent/env.rb
+++ b/lib/fluent/env.rb
@@ -18,9 +18,9 @@ module Fluent
DEFAULT_CONFIG_PATH = ENV['FLUENT_CONF'] || '/etc/fluent/fluent.conf'
DEFAULT_PLUGIN_DIR = ENV['FLUENT_PLUGIN'] || '/etc/fluent/plugin'
DEFAULT_SOCKET_PATH = ENV['FLUENT_SOCKET'] || '/var/run/fluent/fluent.sock'
- DEFAULT_LISTEN_PORT = 24224
- DEFAULT_FILE_PERMISSION = 0644
- DEFAULT_DIR_PERMISSION = 0755
+ DEFAULT_LISTEN_PORT = 24224 # TODO: obsolete
+ DEFAULT_FILE_PERMISSION = 0644 # TODO: configurable w/
+ DEFAULT_DIR_PERMISSION = 0755 # TODO: configurable w/
IS_WINDOWS = /mswin|mingw/ === RUBY_PLATFORM
private_constant :IS_WINDOWS
diff --git a/lib/fluent/filter.rb b/lib/fluent/filter.rb
index b5da9c4982..6b7b046faf 100644
--- a/lib/fluent/filter.rb
+++ b/lib/fluent/filter.rb
@@ -16,6 +16,7 @@
module Fluent
class Filter
+ # TODO: move to plugin/filter.rb, and make interoperability layer here
include Configurable
include PluginId
include PluginLoggerMixin
diff --git a/lib/fluent/formatter.rb b/lib/fluent/formatter.rb
index 5b14892f3a..85555207ef 100644
--- a/lib/fluent/formatter.rb
+++ b/lib/fluent/formatter.rb
@@ -14,9 +14,13 @@
# limitations under the License.
#
-module Fluent
- require 'fluent/registry'
+require 'fluent/registry'
+require 'fluent/configurable'
+require 'fluent/plugin'
+
+# Fluent::Plugin::Formatter ?
+module Fluent
class Formatter
include Configurable
@@ -240,7 +244,6 @@ def format(tag, time, record)
end
end
- TEMPLATE_REGISTRY = Registry.new(:formatter_type, 'fluent/plugin/formatter_')
{
'out_file' => Proc.new { OutFileFormatter.new },
'stdout' => Proc.new { StdoutFormatter.new },
@@ -251,23 +254,22 @@ def format(tag, time, record)
'csv' => Proc.new { CsvFormatter.new },
'single_value' => Proc.new { SingleValueFormatter.new },
}.each { |name, factory|
- TEMPLATE_REGISTRY.register(name, factory)
+ Fluent::Plugin.register_formatter(name, factory)
}
def self.register_template(name, factory_or_proc)
factory = if factory_or_proc.is_a?(Class) # XXXFormatter
- Proc.new { factory_or_proc.new }
+ factory_or_proc.new
elsif factory_or_proc.arity == 3 # Proc.new { |tag, time, record| }
Proc.new { ProcWrappedFormatter.new(factory_or_proc) }
else # Proc.new { XXXFormatter.new }
factory_or_proc
end
-
- TEMPLATE_REGISTRY.register(name, factory)
+ Fluent::Plugin.register_formatter(name, factory)
end
def self.lookup(format)
- TEMPLATE_REGISTRY.lookup(format).call
+ Fluent::Plugin.new_formatter(format)
end
# Keep backward-compatibility
diff --git a/lib/fluent/input.rb b/lib/fluent/input.rb
index 10a6022bae..e9b37cad7f 100644
--- a/lib/fluent/input.rb
+++ b/lib/fluent/input.rb
@@ -14,33 +14,10 @@
# limitations under the License.
#
-module Fluent
- class Input
- include Configurable
- include PluginId
- include PluginLoggerMixin
-
- attr_accessor :router
-
- def initialize
- super
- end
-
- def configure(conf)
- super
+require 'fluent/plugin/input'
- if label_name = conf['@label']
- label = Engine.root_agent.find_label(label_name)
- @router = label.event_router
- elsif @router.nil?
- @router = Engine.root_agent.event_router
- end
- end
-
- def start
- end
-
- def shutdown
- end
+module Fluent
+ class Input < Plugin::Input
+ # TODO: add interoperability layer
end
end
diff --git a/lib/fluent/load.rb b/lib/fluent/load.rb
index 200f09c8ec..e693450878 100644
--- a/lib/fluent/load.rb
+++ b/lib/fluent/load.rb
@@ -1,3 +1,6 @@
+# TODO: This file MUST be deleted
+# all test (and implementation) files should require its dependency by itself
+
require 'thread'
require 'socket'
require 'fcntl'
@@ -30,8 +33,8 @@
require 'fluent/parser'
require 'fluent/formatter'
require 'fluent/event'
-require 'fluent/buffer'
-require 'fluent/input'
-require 'fluent/output'
+require 'fluent/plugin/buffer'
+require 'fluent/plugin/input'
+require 'fluent/plugin/output'
require 'fluent/filter'
require 'fluent/match'
diff --git a/lib/fluent/log.rb b/lib/fluent/log.rb
index f14f27c32f..85919fd09e 100644
--- a/lib/fluent/log.rb
+++ b/lib/fluent/log.rb
@@ -65,6 +65,9 @@ def initialize(out=STDERR, level=LEVEL_TRACE, opts={})
# TODO: This variable name is unclear so we should change to better name.
@threads_exclude_events = []
+ @optional_header = nil
+ @optional_attrs = nil
+
if opts.has_key?(:suppress_repeated_stacktrace)
@suppress_repeated_stacktrace = opts[:suppress_repeated_stacktrace]
end
@@ -74,6 +77,7 @@ def initialize(out=STDERR, level=LEVEL_TRACE, opts={})
attr_accessor :level
attr_accessor :tag
attr_accessor :time_format
+ attr_accessor :optional_header, :optional_attrs
def enable_debug(b=true)
@debug_mode = b
@@ -263,8 +267,8 @@ def dump_stacktrace(backtrace, level)
def event(level, args)
time = Time.now
- message = ''
- map = {}
+ message = @optional_header ? @optional_header.dup : ''
+ map = @optional_attrs ? @optional_attrs.dup : {}
args.each {|a|
if a.is_a?(Hash)
a.each_pair {|k,v|
@@ -364,6 +368,13 @@ def configure(conf)
@log = PluginLogger.new($log)
end
@log.level = @log_level
+ @log.optional_header = "[#{self.class.name}#{@_id_configured ? "(" + @id + ")" : ""}] "
+ @log.optional_attrs = {
+ 'plugin_type' => self.class.name,
+ }
+ if @_id_configured
+ @log.optional_attrs.update({'plugin_id' => @id})
+ end
end
end
end
diff --git a/lib/fluent/output.rb b/lib/fluent/output.rb
index c5873f1ca9..3ea9ba7428 100644
--- a/lib/fluent/output.rb
+++ b/lib/fluent/output.rb
@@ -14,598 +14,118 @@
# limitations under the License.
#
-module Fluent
- class OutputChain
- def initialize(array, tag, es, chain=NullOutputChain.instance)
- @array = array
- @tag = tag
- @es = es
- @offset = 0
- @chain = chain
- end
-
- def next
- if @array.length <= @offset
- return @chain.next
- end
- @offset += 1
- result = @array[@offset-1].emit(@tag, @es, self)
- result
- end
- end
-
- class CopyOutputChain < OutputChain
- def next
- if @array.length <= @offset
- return @chain.next
- end
- @offset += 1
- es = @array.length > @offset ? @es.dup : @es
- result = @array[@offset-1].emit(@tag, es, self)
- result
- end
- end
+require 'fluent/plugin/output'
+require 'fluent/plugin/buffered_output'
+require 'fluent/plugin/object_buffered_output'
+require 'fluent/plugin/time_sliced_output'
- class NullOutputChain
- require 'singleton'
- include Singleton
-
- def next
- end
- end
+# This classes are for compatibility.
+# Fluent::Input (or other plugin base classes) are obsolete in v0.14.
+require 'fluent/plugin_support/emitter'
- class Output
- include Configurable
- include PluginId
- include PluginLoggerMixin
-
- attr_accessor :router
-
+module Fluent
+ class EngineCompat
def initialize
- super
+ @router = nil
end
- def configure(conf)
- super
-
- if label_name = conf['@label']
- label = Engine.root_agent.find_label(label_name)
- @router = label.event_router
- elsif @router.nil?
- @router = Engine.root_agent.event_router
- end
+ def reconfigure
+ @router = Fluent::Engine.root_agent.event_router
end
- def start
+ def emit(tag, time, record)
+ @router.emit(tag, time, record)
end
- def shutdown
+ def emit_array(tag, array)
+ @router.emit(tag, array)
end
- #def emit(tag, es, chain)
- #end
-
- def secondary_init(primary)
- if primary.class != self.class
- $log.warn "type of secondary output should be same as primary output", :primary=>primary.class.to_s, :secondary=>self.class.to_s
- end
+ def emit_stream(tag, es)
+ @router.emit_stream(tag, es)
end
-
- def inspect; "#<%s:%014x>" % [self.class.name, '0x%014x' % (__id__<<1)] end
end
+ module OutputPluginCompat
+ # TODO: add interoperability layer (especially for chain)
- class OutputThread
- def initialize(output)
- @output = output
- @finish = false
- @next_time = Time.now.to_f + 1.0
- end
-
- def configure(conf)
- end
+ # All traditional output plugins can emit events
+ include Fluent::PluginSupport::Emitter
- def start
- @mutex = Mutex.new
- @cond = ConditionVariable.new
- @thread = Thread.new(&method(:run))
- end
-
- def shutdown
- @finish = true
- @mutex.synchronize {
- @cond.signal
- }
- Thread.pass
- @thread.join
- end
+ # to overwrite Fluent::Engine in traditional plugin code
+ module Fluent; end
- def submit_flush
- @mutex.synchronize {
- @next_time = 0
- @cond.signal
- }
- Thread.pass
- end
-
- private
- def run
- @mutex.lock
- begin
- until @finish
- time = Time.now.to_f
-
- if @next_time <= time
- @mutex.unlock
- begin
- @next_time = @output.try_flush
- ensure
- @mutex.lock
- end
- next_wait = @next_time - Time.now.to_f
- else
- next_wait = @next_time - time
- end
-
- cond_wait(next_wait) if next_wait > 0
- end
- ensure
- @mutex.unlock
- end
- rescue
- $log.error "error on output thread", :error=>$!.to_s
- $log.error_backtrace
- raise
- ensure
- @mutex.synchronize {
- @output.before_shutdown
- }
- end
-
- def cond_wait(sec)
- @cond.wait(@mutex, sec)
- end
- end
-
-
- class BufferedOutput < Output
- def initialize
- super
- @next_flush_time = 0
- @last_retry_time = 0
- @next_retry_time = 0
- @num_errors = 0
- @num_errors_lock = Mutex.new
- @secondary_limit = 8
- @emit_count = 0
- end
-
- desc 'The buffer type (memory, file)'
- config_param :buffer_type, :string, :default => 'memory'
- desc 'The interval between data flushes.'
- config_param :flush_interval, :time, :default => 60
- config_param :try_flush_interval, :float, :default => 1
- desc 'If true, the value of `retry_value` is ignored and there is no limit'
- config_param :disable_retry_limit, :bool, :default => false
- desc 'The limit on the number of retries before buffered data is discarded'
- config_param :retry_limit, :integer, :default => 17
- desc 'The initial intervals between write retries.'
- config_param :retry_wait, :time, :default => 1.0
- desc 'The maximum intervals between write retries.'
- config_param :max_retry_wait, :time, :default => nil
- desc 'The number of threads to flush the buffer.'
- config_param :num_threads, :integer, :default => 1
- desc 'The interval between data flushes for queued chunk.'
- config_param :queued_chunk_flush_interval, :time, :default => 1
+ Fluent::Engine = Engine = EngineCompat.new # Engine.root_agent is not initialized yet
def configure(conf)
super
-
- @retry_wait = @retry_wait.to_f # converted to Float for calc_retry_wait
- @buffer = Plugin.new_buffer(@buffer_type)
- @buffer.configure(conf)
-
- if @buffer.respond_to?(:enable_parallel)
- if @num_threads == 1
- @buffer.enable_parallel(false)
- else
- @buffer.enable_parallel(true)
- end
- end
-
- @writers = (1..@num_threads).map {
- writer = OutputThread.new(self)
- writer.configure(conf)
- writer
- }
-
- if sconf = conf.elements.select {|e| e.name == 'secondary' }.first
- type = sconf['@type'] || conf['@type'] || sconf['type'] || conf['type']
- @secondary = Plugin.new_output(type)
- @secondary.router = router
- @secondary.configure(sconf)
-
- if secondary_limit = conf['secondary_limit']
- @secondary_limit = secondary_limit.to_i
- if @secondary_limit < 0
- raise ConfigError, "invalid parameter 'secondary_limit #{secondary_limit}'"
- end
- end
-
- @secondary.secondary_init(self)
- end
- end
-
- def start
- @next_flush_time = Time.now.to_f + @flush_interval
- @buffer.start
- @secondary.start if @secondary
- @writers.each {|writer| writer.start }
- @writer_current_position = 0
- @writers_size = @writers.size
- end
-
- def shutdown
- @writers.each {|writer| writer.shutdown }
- @secondary.shutdown if @secondary
- @buffer.shutdown
+ # set root_agent.event_router here
+ Engine.reconfigure
end
+ end
- def emit(tag, es, chain, key="")
- @emit_count += 1
- data = format_stream(tag, es)
- if @buffer.emit(key, data, chain)
- submit_flush
- end
- end
+ class Output < Plugin::Output
+ include OutputPluginCompat
+ end
- def submit_flush
- # Without locks: it is rough but enough to select "next" writer selection
- @writer_current_position = (@writer_current_position + 1) % @writers_size
- @writers[@writer_current_position].submit_flush
- end
+ class BufferedOutput < Plugin::BufferedOutput
+ include OutputPluginCompat
+ end
- def format_stream(tag, es)
- out = ''
- es.each {|time,record|
- out << format(tag, time, record)
- }
- out
- end
+ class ObjectBufferedOutput < Plugin::ObjectBufferedOutput
+ include OutputPluginCompat
+ end
- #def format(tag, time, record)
- #end
+ class TimeSlicedOutput < Plugin::TimeSlicedOutput
+ include OutputPluginCompat
+ end
- #def write(chunk)
+ class MultiOutput < Output
+ #def outputs
+ # # TODO
#end
-
- def enqueue_buffer(force = false)
- @buffer.keys.each {|key|
- @buffer.push(key)
- }
- end
-
- def try_flush
- time = Time.now.to_f
-
- empty = @buffer.queue_size == 0
- if empty && @next_flush_time < (now = Time.now.to_f)
- @buffer.synchronize do
- if @next_flush_time < now
- enqueue_buffer
- @next_flush_time = now + @flush_interval
- empty = @buffer.queue_size == 0
- end
- end
- end
- if empty
- return time + @try_flush_interval
- end
-
- begin
- retrying = !@num_errors.zero?
-
- if retrying
- @num_errors_lock.synchronize do
- if retrying = !@num_errors.zero? # re-check in synchronize
- if @next_retry_time >= time
- # allow retrying for only one thread
- return time + @try_flush_interval
- end
- # assume next retry failes and
- # clear them if when it succeeds
- @last_retry_time = time
- @num_errors += 1
- @next_retry_time += calc_retry_wait
- end
- end
- end
-
- if @secondary && !@disable_retry_limit && @num_errors > @retry_limit
- has_next = flush_secondary(@secondary)
- else
- has_next = @buffer.pop(self)
- end
-
- # success
- if retrying
- @num_errors = 0
- # Note: don't notify to other threads to prevent
- # burst to recovered server
- $log.warn "retry succeeded.", :plugin_id=>plugin_id
- end
-
- if has_next
- return Time.now.to_f + @queued_chunk_flush_interval
- else
- return time + @try_flush_interval
- end
-
- rescue => e
- if retrying
- error_count = @num_errors
- else
- # first error
- error_count = 0
- @num_errors_lock.synchronize do
- if @num_errors.zero?
- @last_retry_time = time
- @num_errors += 1
- @next_retry_time = time + calc_retry_wait
- end
- end
- end
-
- if @disable_retry_limit || error_count < @retry_limit
- $log.warn "temporarily failed to flush the buffer.", :next_retry=>Time.at(@next_retry_time), :error_class=>e.class.to_s, :error=>e.to_s, :plugin_id=>plugin_id
- $log.warn_backtrace e.backtrace
-
- elsif @secondary
- if error_count == @retry_limit
- $log.warn "failed to flush the buffer.", :error_class=>e.class.to_s, :error=>e.to_s, :plugin_id=>plugin_id
- $log.warn "retry count exceededs limit. falling back to secondary output."
- $log.warn_backtrace e.backtrace
- retry # retry immediately
- elsif error_count <= @retry_limit + @secondary_limit
- $log.warn "failed to flush the buffer, next retry will be with secondary output.", :next_retry=>Time.at(@next_retry_time), :error_class=>e.class.to_s, :error=>e.to_s, :plugin_id=>plugin_id
- $log.warn_backtrace e.backtrace
- else
- $log.warn "failed to flush the buffer.", :error_class=>e.class, :error=>e.to_s, :plugin_id=>plugin_id
- $log.warn "secondary retry count exceededs limit."
- $log.warn_backtrace e.backtrace
- write_abort
- @num_errors = 0
- end
-
- else
- $log.warn "failed to flush the buffer.", :error_class=>e.class.to_s, :error=>e.to_s, :plugin_id=>plugin_id
- $log.warn "retry count exceededs limit."
- $log.warn_backtrace e.backtrace
- write_abort
- @num_errors = 0
- end
-
- return @next_retry_time
- end
- end
-
- def force_flush
- @num_errors_lock.synchronize do
- @next_retry_time = Time.now.to_f - 1
- end
- enqueue_buffer(true)
- submit_flush
- end
-
- def before_shutdown
- begin
- @buffer.before_shutdown(self)
- rescue
- $log.warn "before_shutdown failed", :error=>$!.to_s
- $log.warn_backtrace
- end
- end
-
- def calc_retry_wait
- # TODO retry pattern
- wait = if @disable_retry_limit || @num_errors <= @retry_limit
- @retry_wait * (2 ** (@num_errors - 1))
- else
- # secondary retry
- @retry_wait * (2 ** (@num_errors - 2 - @retry_limit))
- end
- retry_wait = wait.finite? ? wait + (rand * (wait / 4.0) - (wait / 8.0)) : wait
- @max_retry_wait ? [retry_wait, @max_retry_wait].min : retry_wait
- end
-
- def write_abort
- $log.error "throwing away old logs."
- begin
- @buffer.clear!
- rescue
- $log.error "unexpected error while aborting", :error=>$!.to_s
- $log.error_backtrace
- end
- end
-
- def flush_secondary(secondary)
- @buffer.pop(secondary)
- end
end
-
- class ObjectBufferedOutput < BufferedOutput
- config_param :time_as_integer, :bool, :default => true
-
- def initialize
- super
- end
-
- def emit(tag, es, chain)
- @emit_count += 1
- if @time_as_integer
- data = es.to_msgpack_stream_forced_integer
- else
- data = es.to_msgpack_stream
- end
- key = tag
- if @buffer.emit(key, data, chain)
- submit_flush
- end
+ # Output Chain does nothing currently.
+ # These will be removed at v1.
+ class OutputChain
+ def initialize(array, tag, es, chain=NullOutputChain.instance)
+ @array = array
+ @tag = tag
+ @es = es
+ @offset = 0
+ @chain = chain
end
- module BufferedEventStreamMixin
- include Enumerable
-
- def repeatable?
- true
- end
-
- def each(&block)
- msgpack_each(&block)
- end
-
- def to_msgpack_stream
- read
+ def next
+ if @array.length <= @offset
+ return @chain.next
end
- end
-
- def write(chunk)
- chunk.extend(BufferedEventStreamMixin)
- write_objects(chunk.key, chunk)
+ @offset += 1
+ result = @array[@offset-1].emit(@tag, @es, self)
+ result
end
end
-
- class TimeSlicedOutput < BufferedOutput
- require 'fluent/timezone'
-
- def initialize
- super
- @localtime = true
- #@ignore_old = false # TODO
- end
-
- desc 'The time format used as part of the file name.'
- config_param :time_slice_format, :string, :default => '%Y%m%d'
- desc 'The amount of time Fluentd will wait for old logs to arrive.'
- config_param :time_slice_wait, :time, :default => 10*60
- desc 'Parse the time value in the specified timezone'
- config_param :timezone, :string, :default => nil
- config_set_default :buffer_type, 'file' # overwrite default buffer_type
- config_set_default :buffer_chunk_limit, 256*1024*1024 # overwrite default buffer_chunk_limit
- config_set_default :flush_interval, nil
-
- attr_accessor :localtime
- attr_reader :time_slicer # for test
-
- def configure(conf)
- super
-
- if conf['utc']
- @localtime = false
- elsif conf['localtime']
- @localtime = true
- end
-
- if conf['timezone']
- @timezone = conf['timezone']
- Fluent::Timezone.validate!(@timezone)
- end
-
- if @timezone
- @time_slicer = Timezone.formatter(@timezone, @time_slice_format)
- elsif @localtime
- @time_slicer = Proc.new {|time|
- Time.at(time).strftime(@time_slice_format)
- }
- else
- @time_slicer = Proc.new {|time|
- Time.at(time).utc.strftime(@time_slice_format)
- }
- end
-
- @time_slice_cache_interval = time_slice_cache_interval
- @before_tc = nil
- @before_key = nil
-
- if @flush_interval
- if conf['time_slice_wait']
- $log.warn "time_slice_wait is ignored if flush_interval is specified: #{conf}"
- end
- @enqueue_buffer_proc = Proc.new do
- @buffer.keys.each {|key|
- @buffer.push(key)
- }
- end
-
- else
- @flush_interval = [60, @time_slice_cache_interval].min
- @enqueue_buffer_proc = Proc.new do
- nowslice = @time_slicer.call(Time.now - @time_slice_wait)
- @buffer.keys.each {|key|
- if key < nowslice
- @buffer.push(key)
- end
- }
- end
- end
- end
-
- def emit(tag, es, chain)
- @emit_count += 1
- formatted_data = {}
- es.each {|time,record|
- tc = time / @time_slice_cache_interval
- if @before_tc == tc
- key = @before_key
- else
- @before_tc = tc
- key = @time_slicer.call(time)
- @before_key = key
- end
- formatted_data[key] ||= ''
- formatted_data[key] << format(tag, time, record)
- }
- formatted_data.each { |key, data|
- if @buffer.emit(key, data, chain)
- submit_flush
- end
- }
- end
-
- def enqueue_buffer(force = false)
- if force
- @buffer.keys.each {|key|
- @buffer.push(key)
- }
- else
- @enqueue_buffer_proc.call
- end
- end
-
- #def format(tag, event)
- #end
-
- private
- def time_slice_cache_interval
- if @time_slicer.call(0) != @time_slicer.call(60-1)
- return 1
- elsif @time_slicer.call(0) != @time_slicer.call(60*60-1)
- return 30
- elsif @time_slicer.call(0) != @time_slicer.call(24*60*60-1)
- return 60*30
- else
- return 24*60*30
+ class CopyOutputChain < OutputChain
+ def next
+ if @array.length <= @offset
+ return @chain.next
end
+ @offset += 1
+ es = @array.length > @offset ? @es.dup : @es
+ result = @array[@offset-1].emit(@tag, es, self)
+ result
end
end
+ class NullOutputChain
+ require 'singleton'
+ include Singleton
- class MultiOutput < Output
- #def outputs
- #end
+ def next
+ end
end
end
diff --git a/lib/fluent/parser.rb b/lib/fluent/parser.rb
index b30b47a344..d1f23dc79e 100644
--- a/lib/fluent/parser.rb
+++ b/lib/fluent/parser.rb
@@ -14,9 +14,13 @@
# limitations under the License.
#
-module Fluent
- require 'fluent/registry'
+require 'fluent/registry'
+require 'fluent/configurable'
+require 'fluent/plugin'
+
+# TODO Fluent::Plugin::Parser ?
+module Fluent
class ParserError < StandardError
end
@@ -622,7 +626,6 @@ def check_format_regexp(format, key)
end
end
- TEMPLATE_REGISTRY = Registry.new(:config_type, 'fluent/plugin/parser_')
{
'apache' => Proc.new { RegexpParser.new(/^(?[^ ]*) [^ ]* (?[^ ]*) \[(?
EOS
s = FakeSupervisor.new
- sc = Fluent::Supervisor::SystemConfig.new(conf)
+ sc = Fluent::SystemConfig.new(conf)
sc.apply(s)
assert_equal(Fluent::Log::LEVEL_WARN, s.instance_variable_get("@log").level)
end
diff --git a/test/helper.rb b/test/helper.rb
index 86acd6953b..02f9a3c4e1 100644
--- a/test/helper.rb
+++ b/test/helper.rb
@@ -42,16 +42,27 @@ def to_masked_element
require 'fluent/log'
require 'fluent/test'
+$log = Fluent::Log.new(Fluent::Test::DummyLogDevice.new, Fluent::Log::LEVEL_WARN)
+
unless defined?(Test::Unit::AssertionFailedError)
class Test::Unit::AssertionFailedError < StandardError
end
end
-def unused_port
- s = TCPServer.open(0)
- port = s.addr[1]
- s.close
- port
+def unused_port(num=1)
+ ports = []
+ sockets = []
+ num.times do
+ s = TCPServer.open(0)
+ sockets << s
+ ports << s.addr[1]
+ end
+ sockets.each{|s| s.close }
+ if num == 1
+ ports.first
+ else
+ ports
+ end
end
def ipv6_enabled?
@@ -64,5 +75,3 @@ def ipv6_enabled?
false
end
end
-
-$log = Fluent::Log.new(Fluent::Test::DummyLogDevice.new, Fluent::Log::LEVEL_WARN)
diff --git a/test/plugin/test_in_debug_agent.rb b/test/plugin/test_in_debug_agent.rb
index 4ca94eb682..3de87e0461 100644
--- a/test/plugin/test_in_debug_agent.rb
+++ b/test/plugin/test_in_debug_agent.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fileutils'
+require 'fluent/plugin/in_debug_agent'
class DebugAgentInputTest < Test::Unit::TestCase
def setup
diff --git a/test/plugin/test_in_dummy.rb b/test/plugin/test_in_dummy.rb
index d2bf159bc0..31ee031a25 100644
--- a/test/plugin/test_in_dummy.rb
+++ b/test/plugin/test_in_dummy.rb
@@ -1,13 +1,15 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/in_dummy'
+require 'fileutils'
class DummyTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
- def create_driver(conf)
- Fluent::Test::InputTestDriver.new(Fluent::DummyInput).configure(conf)
+ def create_driver(conf, system_opts={})
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::DummyInput, system_opts).configure(conf)
end
sub_test_case 'configure' do
@@ -70,9 +72,9 @@ def create_driver(conf)
test 'simple' do
d = create_driver(config)
- d.run {
- # d.run sleeps 0.5 sec
- }
+ d.expected_emits_length = 5
+ d.run
+
emits = d.emits
emits.each do |tag, time, record|
assert_equal("dummy", tag)
@@ -83,9 +85,9 @@ def create_driver(conf)
test 'with auto_increment_key' do
d = create_driver(config + %[auto_increment_key id])
- d.run {
- # d.run sleeps 0.5 sec
- }
+ d.expected_emits_length = 5
+ d.run
+
emits = d.emits
emits.each_with_index do |(tag, time, record), i|
assert_equal("dummy", tag)
@@ -93,4 +95,81 @@ def create_driver(conf)
end
end
end
+
+ TEST_PLUGIN_STORAGE_PATH = File.join( File.dirname(File.dirname(__FILE__)), 'tmp', 'in_dummy', 'store' )
+ FileUtils.mkdir_p TEST_PLUGIN_STORAGE_PATH
+
+ sub_test_case "doesn't suspend internal counters in default" do
+ config1 = %[
+ @id test-01
+ tag dummy
+ rate 10
+ dummy [{"x": 1, "y": "1"}, {"x": 2, "y": "2"}, {"x": 3, "y": "3"}]
+ auto_increment_key id
+ ]
+ test "value of auto increment key is not suspended after stop-and-start" do
+ assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-01.json'))
+
+ d1 = create_driver(config1, plugin_storage_path: TEST_PLUGIN_STORAGE_PATH)
+ d1.expected_emits_length = 4
+ d1.run
+
+ first_id1 = d1.emits.first[2]['id']
+ assert_equal 0, first_id1
+
+ last_id1 = d1.emits.last[2]['id']
+ assert { last_id1 > 0 }
+
+ assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-01.json'))
+
+ d2 = create_driver(config1, plugin_storage_path: TEST_PLUGIN_STORAGE_PATH)
+ d2.expected_emits_length = 4
+ d2.run
+
+ first_id2 = d2.emits.first[2]['id']
+ assert_equal 0, first_id2
+
+ assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-01.json'))
+ end
+ end
+
+ sub_test_case "suspend internal counters if suspend is true" do
+ setup do
+ FileUtils.rm_rf(TEST_PLUGIN_STORAGE_PATH)
+ end
+
+ config2 = %[
+ @id test-02
+ tag dummy
+ rate 2
+ dummy [{"x": 1, "y": "1"}, {"x": 2, "y": "2"}, {"x": 3, "y": "3"}]
+ auto_increment_key id
+ suspend true
+ ]
+ test "value of auto increment key is suspended after stop-and-start" do
+ assert !File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-02.json'))
+
+ d1 = create_driver(config2, plugin_storage_path: TEST_PLUGIN_STORAGE_PATH)
+
+ d1.expected_emits_length = 4
+ d1.run
+
+ first_id1 = d1.emits.first[2]['id']
+ assert_equal 0, first_id1
+
+ last_id1 = d1.emits.last[2]['id']
+ assert { last_id1 > 0 }
+
+ assert File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-02.json'))
+
+ d2 = create_driver(config2, plugin_storage_path: TEST_PLUGIN_STORAGE_PATH)
+ d2.expected_emits_length = 4
+ d2.run
+
+ first_id2 = d2.emits.first[2]['id']
+ assert_equal last_id1 + 1, first_id2
+
+ assert File.exist?(File.join(TEST_PLUGIN_STORAGE_PATH, 'json', 'test-02.json'))
+ end
+ end
end
diff --git a/test/plugin/test_in_exec.rb b/test/plugin/test_in_exec.rb
index 34c7abcea1..96e672c03b 100644
--- a/test/plugin/test_in_exec.rb
+++ b/test/plugin/test_in_exec.rb
@@ -1,59 +1,53 @@
require_relative '../helper'
require 'fluent/test'
require 'net/http'
+require 'fluent/plugin/in_exec'
class ExecInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
- @test_time = Fluent::EventTime.parse("2011-01-02 13:14:15")
- @script = File.expand_path(File.join(File.dirname(__FILE__), '..', 'scripts', 'exec_script.rb'))
end
- def create_driver(conf = tsv_config)
- Fluent::Test::InputTestDriver.new(Fluent::ExecInput).configure(conf)
+ def create_driver(conf=TSV_CONFIG)
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::ExecInput).configure(conf)
end
- def tsv_config
- %[
- command ruby #{@script} "2011-01-02 13:14:15" 0
+ TEST_TIME = Time.parse("2011-01-02 13:14:15").to_i
+ SCRIPT = File.expand_path(File.join(File.dirname(__FILE__), '..', 'scripts', 'exec_script.rb'))
+
+ TSV_CONFIG = %[
+ command ruby #{SCRIPT} #{TEST_TIME} 0
keys time,tag,k1
time_key time
tag_key tag
time_format %Y-%m-%d %H:%M:%S
run_interval 1s
- ]
- end
+ ]
- def json_config
- %[
- command ruby #{@script} #{@test_time} 1
+ JSON_CONFIG = %[
+ command ruby #{SCRIPT} #{TEST_TIME} 1
format json
tag_key tag
time_key time
run_interval 1s
- ]
- end
+ ]
- def msgpack_config
- %[
- command ruby #{@script} #{@test_time} 2
+ MSGPACK_CONFIG = %[
+ command ruby #{SCRIPT} #{TEST_TIME} 2
format msgpack
tag_key tagger
time_key datetime
run_interval 1s
- ]
- end
+ ]
- def regexp_config
- %[
- command ruby #{@script} "2011-01-02 13:14:15" 3
+ REGEXP_CONFIG = %[
+ command ruby #{SCRIPT} #{TEST_TIME} 3
format /(?[^\\\]]*) (?[^ ]*)/
tag regex_tag
run_interval 1s
- ]
- end
+ ]
- def test_configure
+ test "configure with basic configuration" do
d = create_driver
assert_equal 'tsv', d.instance.format
assert_equal ["time","tag","k1"], d.instance.keys
@@ -62,75 +56,39 @@ def test_configure
assert_equal "%Y-%m-%d %H:%M:%S", d.instance.time_format
end
- def test_configure_with_json
- d = create_driver json_config
- assert_equal 'json', d.instance.format
+ test "configure with json configuration" do
+ d = create_driver JSON_CONFIG
+ assert_equal :json, d.instance.format
assert_equal [], d.instance.keys
end
- def test_configure_with_msgpack
- d = create_driver msgpack_config
- assert_equal 'msgpack', d.instance.format
+ test "configure with msgpack configuration" do
+ d = create_driver MSGPACK_CONFIG
+ assert_equal :msgpack, d.instance.format
assert_equal [], d.instance.keys
end
- def test_configure_with_regexp
- d = create_driver regexp_config
+ test "configure with regexp configuration" do
+ d = create_driver REGEXP_CONFIG
assert_equal '/(?[^\]]*) (?[^ ]*)/', d.instance.format
assert_equal 'regex_tag', d.instance.tag
end
- # TODO: Merge following tests into one case with parameters
-
- def test_emit
- d = create_driver
-
- d.run do
- sleep 2
- end
-
- emits = d.emits
- assert_equal true, emits.length > 0
- assert_equal ["tag1", @test_time, {"k1"=>"ok"}], emits[0]
- assert_equal_event_time(@test_time, emits[0][1])
- end
-
- def test_emit_json
- d = create_driver json_config
-
- d.run do
- sleep 2
- end
-
- emits = d.emits
- assert_equal true, emits.length > 0
- assert_equal ["tag1", @test_time, {"k1"=>"ok"}], emits[0]
- assert_equal_event_time(@test_time, emits[0][1])
- end
-
- def test_emit_msgpack
- d = create_driver msgpack_config
-
- d.run do
- sleep 2
- end
-
- emits = d.emits
- assert_equal true, emits.length > 0
- assert_equal ["tag1", @test_time, {"k1"=>"ok"}], emits[0]
- assert_equal_event_time(@test_time, emits[0][1])
- end
-
- def test_emit_regexp
- d = create_driver regexp_config
-
- d.run do
- sleep 2
- end
+ data(
+ 'tsv' => TSV_CONFIG,
+ 'json' => JSON_CONFIG,
+ 'msgpack' => MSGPACK_CONFIG,
+ 'regexp' => REGEXP_CONFIG,
+ )
+ test "this plugin emits record from executed command output" do |data|
+ d = create_driver data
+ d.run_timeout = 2
+ d.expected_emits_length = 1
+ d.run
emits = d.emits
- assert_equal true, emits.length > 0
- assert_equal ["regex_tag", @test_time, {"message"=>"hello"}], emits[0]
- assert_equal_event_time(@test_time, emits[0][1])
+ assert emits.length > 0
+ assert_equal ["tag1", TEST_TIME, {"k1"=>"ok"}], emits[0]
+ assert_equal_event_time(TEST_TIME, emits[0][1])
end
end
diff --git a/test/plugin/test_in_forward.rb b/test/plugin/test_in_forward.rb
index 36d5b41d18..4323dcf921 100644
--- a/test/plugin/test_in_forward.rb
+++ b/test/plugin/test_in_forward.rb
@@ -1,6 +1,12 @@
require_relative '../helper'
require 'fluent/test'
require 'base64'
+require 'socket'
+require 'openssl'
+require 'json'
+require 'msgpack'
+
+require 'fluent/plugin/in_forward'
class ForwardInputTest < Test::Unit::TestCase
def setup
@@ -8,14 +14,67 @@ def setup
@responses = [] # for testing responses after sending data
end
- PORT = unused_port
+ PORT, REMOTE_PORT = unused_port(2) # REMOTE_PORT is used for dummy source of heartbeat
+
+ SHARED_KEY = 'foobar2'
+ USER_NAME = 'tagomoris'
+ USER_PASSWORD = 'fluentd'
+
CONFIG = %[
port #{PORT}
bind 127.0.0.1
]
+ SSL_CONFIG = %[
+ port #{PORT}
+ bind 127.0.0.1
+
+ version TLSv1.2
+ cert_auto_generate yes
+ digest SHA512
+ algorithm RSA
+ key_length 4096
+
+ ]
+ CONFIG_AUTH = %[
+ port #{PORT}
+ bind 127.0.0.1
+
+ shared_key foobar1
+ user_auth true
+
+ username #{USER_NAME}
+ password #{USER_PASSWORD}
+
+
+ network 127.0.0.0/8
+ shared_key #{SHARED_KEY}
+ users ["#{USER_NAME}"]
+
+
+ ]
+ SSL_CONFIG_AUTH = %[
+ port #{PORT}
+ bind 127.0.0.1
+
+ cert_auto_generate yes
+
+
+ shared_key foobar1
+ user_auth true
+
+ username #{USER_NAME}
+ password #{USER_PASSWORD}
+
+
+ network 127.0.0.0/8
+ shared_key #{SHARED_KEY}
+ users ["#{USER_NAME}"]
+
+
+ ]
def create_driver(conf=CONFIG)
- Fluent::Test::InputTestDriver.new(Fluent::ForwardInput).configure(conf)
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::ForwardInput).configure(conf)
end
def test_configure
@@ -23,120 +82,187 @@ def test_configure
assert_equal PORT, d.instance.port
assert_equal '127.0.0.1', d.instance.bind
assert_equal 0, d.instance.linger_timeout
- assert_equal 0.5, d.instance.blocking_timeout
assert !d.instance.backlog
end
- # TODO: Will add Loop::run arity check with stub/mock library
+ def test_configure_auth
+ d = create_driver(CONFIG_AUTH)
+ assert_equal PORT, d.instance.port
+ assert_equal '127.0.0.1', d.instance.bind
+ assert_equal 0, d.instance.linger_timeout
+ assert !d.instance.backlog
+
+ assert d.instance.security
+ assert_equal 1, d.instance.security.users.size
+ assert_equal 1, d.instance.security.clients.size
+ end
+
+ def test_configure_ssl
+ d = create_driver(SSL_CONFIG)
+ assert_equal PORT, d.instance.port
+ assert_equal '127.0.0.1', d.instance.bind
+ assert d.instance.ssl_options
+ assert_equal :TLSv1_2, d.instance.ssl_options.version
+ assert d.instance.ssl_options.cert_auto_generate
+ assert_equal OpenSSL::Digest::SHA512, d.instance.ssl_options.digest
+ assert_equal OpenSSL::PKey::RSA, d.instance.ssl_options.algorithm
+ assert_equal 4096, d.instance.ssl_options.key_length
+ end
def connect
TCPSocket.new('127.0.0.1', PORT)
end
+ def connect_ssl
+ sock = OpenSSL::SSL::SSLSocket.new(TCPSocket.new('127.0.0.1', PORT))
+ sock.sync = true
+ sock.sync_close = true
+ sock.connect
+ sock
+ end
+
def test_time
d = create_driver
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
Fluent::Engine.now = time
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ records = [
+ ["tag", time, {"a" => 1}],
+ ["tag2", time, {"a" => 2}],
+ ]
+ d.expected_emits_length = records.length
+ d.run_timeout = 2
d.run do
- d.expected_emits.each {|tag,time,record|
- send_data Fluent::Engine.msgpack_factory.packer.write([tag, 0, record]).to_s
+ records.each {|tag,time,record|
+ send_data false, false, Fluent::Engine.msgpack_factory.packer.write([tag, 0, record]).to_s
}
end
+ assert_equal records, d.emits.sort{|a,b| a[0] <=> b[0] }
end
+ data('tcp' => [CONFIG, false], 'ssl' => [SSL_CONFIG, true])
def test_message
- d = create_driver
+ conf, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
-
+ records = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time, {"a"=>2}],
+ ]
d.run do
- d.expected_emits.each {|tag,time,record|
- send_data Fluent::Engine.msgpack_factory.packer.write([tag, time, record]).to_s
+ records.each {|tag,time,record|
+ send_data ssl, Fluent::Engine.msgpack_factory.packer.write([tag, time, record]).to_s
}
end
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
+ data('tcp' => [CONFIG, false], 'ssl' => [SSL_CONFIG, true])
def test_message_with_time_as_integer
- d = create_driver
+ conf, ssl = data
+ d = create_driver(conf)
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ records = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time+1, {"a"=>2}],
+ ]
+ d.expected_emits_length = records.length
+ d.run_timeout = 2
d.run do
- d.expected_emits.each {|tag,time,record|
- send_data Fluent::Engine.msgpack_factory.packer.write([tag, time, record]).to_s
+ records.each {|tag,time,record|
+ send_data ssl, false, Fluent::Engine.msgpack_factory.packer.write([tag, time, record]).to_s
}
end
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true])
def test_message_with_skip_invalid_event
- d = create_driver(CONFIG + "skip_invalid_event true")
+ conf, auth, ssl = data
+ d = create_driver(conf + "skip_invalid_event true")
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
- d.expect_emit "tag1", time, {"a" => 1}
- d.expect_emit "tag2", time, {"a" => 2}
+ records = [
+ ["tag1", time, {"a" => 1}],
+ ["tag2", time, {"a" => 2}],
+ ]
d.run do
- entries = d.expected_emits.map { |tag, time, record| [tag, time, record] }
# These entries are skipped
- entries << ['tag1', true, {'a' => 3}] << ['tag2', time, 'invalid record']
+ records << ['tag1', true, {'a' => 3}] << ['tag2', time, 'invalid record']
- entries.each { |tag, time, record|
+ records.each { |tag, time, record|
# Without ack, logs are sometimes not saved to logs during test.
- send_data Fluent::Engine.msgpack_factory.packer.write([tag, time, record]).to_s, true
+ send_data auth, ssl, Fluent::Engine.msgpack_factory.packer.write([tag, time, record]).to_s, true
}
end
-
assert_equal 2, d.instance.log.logs.count { |line| line =~ /got invalid event and drop it/ }
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
- def test_forward
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_forward(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag1", time, {"a"=>2}
+ records = [
+ ["tag1", time, {"a"=>1}],
+ ["tag1", time, {"a"=>2}],
+ ]
d.run do
entries = []
- d.expected_emits.each {|tag,time,record|
+ records.each {|tag,time,record|
entries << [time, record]
}
- send_data Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
+ send_data auth, ssl, Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
end
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
def test_forward_with_time_as_integer
- d = create_driver
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag1", time, {"a"=>2}
+ records = [
+ ["tag1", time, {"a"=>1}],
+ ["tag1", time+1, {"a"=>2}],
+ ]
+ d.expected_emits_length = records.length
+ d.run_timeout = 2
d.run do
entries = []
- d.expected_emits.each {|tag,time,record|
+ records.each {|tag,time,record|
entries << [time, record]
}
- send_data Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
+ send_data auth, ssl, Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
end
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
def test_forward_with_skip_invalid_event
- d = create_driver(CONFIG + "skip_invalid_event true")
+ conf, auth, ssl = data
+ d = create_driver(conf + "skip_invalid_event true")
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
@@ -148,56 +274,75 @@ def test_forward_with_skip_invalid_event
# These entries are skipped
entries << ['invalid time', {'a' => 3}] << [time, 'invalid record']
- send_data Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
+ send_data auth, ssl, Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
end
-
assert_equal 2, d.instance.log.logs.count { |line| line =~ /skip invalid event/ }
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
- def test_packed_forward
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_packed_forward(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag1", time, {"a"=>2}
+ records = [
+ ["tag1", time, {"a"=>1}],
+ ["tag1", time, {"a"=>2}],
+ ]
d.run do
entries = ''
- d.expected_emits.each {|tag,time,record|
+ records.each {|tag,time,record|
Fluent::Engine.msgpack_factory.packer(entries).write([time, record]).flush
}
- send_data Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
+ send_data auth, ssl, Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
end
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
def test_packed_forward_with_time_as_integer
+ conf, auth, ssl = data
d = create_driver
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag1", time, {"a"=>2}
+ records = [
+ ["tag1", time, {"a"=>1}],
+ ["tag1", time+1, {"a"=>2}],
+ ]
+ d.expected_emits_length = records.length
+ d.run_timeout = 2
d.run do
entries = ''
- d.expected_emits.each {|tag,time,record|
+ records.each {|tag,time,record|
Fluent::Engine.msgpack_factory.packer(entries).write([time, record]).flush
}
- send_data Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
+ send_data auth, ssl, Fluent::Engine.msgpack_factory.packer.write(["tag1", entries]).to_s
end
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
def test_packed_forward_with_skip_invalid_event
- d = create_driver(CONFIG + "skip_invalid_event true")
+ conf, auth, ssl = data
+ d = create_driver(conf + "skip_invalid_event true")
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
- d.expect_emit "tag1", time, {"a" => 1}
- d.expect_emit "tag1", time, {"a" => 2}
+ records = [
+ ["tag1", time, {"a" => 1}],
+ ["tag1", time, {"a" => 2}],
+ ]
d.run do
- entries = d.expected_emits.map { |tag ,time, record| [time, record] }
+ entries = records.map { |tag, time, record| [time, record] }
# These entries are skipped
entries << ['invalid time', {'a' => 3}] << [time, 'invalid record']
@@ -205,25 +350,33 @@ def test_packed_forward_with_skip_invalid_event
entries.each { |time, record|
Fluent::Engine.msgpack_factory.packer(packed_entries).write([time, record]).flush
}
- send_data Fluent::Engine.msgpack_factory.packer.write(["tag1", packed_entries]).to_s
+ send_data auth, ssl, Fluent::Engine.msgpack_factory.packer.write(["tag1", packed_entries]).to_s
end
-
assert_equal 2, d.instance.log.logs.count { |line| line =~ /skip invalid event/ }
+ assert_equal records[0], d.emits[0]
+ assert_equal records[1], d.emits[1]
end
- def test_message_json
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true]) # with json, auth doesn't work
+ def test_message_json(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ records = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time+1, {"a"=>2}],
+ ]
+ d.expected_emits_length = records.length
+ d.run_timeout = 2
d.run do
- d.expected_emits.each {|tag,time,record|
- send_data [tag, time, record].to_json
+ records.each {|tag,time,record|
+ send_data auth, ssl, [tag, time, record].to_json
}
end
+ assert_equal records, d.emits.sort{|a,b| a[1] <=> b[1] }
end
def test_send_large_chunk_warning
@@ -240,9 +393,11 @@ def test_send_large_chunk_warning
assert chunk.size > (16 * 1024 * 1024)
assert chunk.size < (32 * 1024 * 1024)
+ d.expected_emits_length = 16
+ d.run_timeout = 2
d.run do
Fluent::Engine.msgpack_factory.unpacker.feed_each(chunk) do |obj|
- d.instance.send(:on_message, obj, chunk.size, "host: 127.0.0.1, addr: 127.0.0.1, port: 0000")
+ d.instance.send(:emit_message, obj, chunk.size, "host: 127.0.0.1, addr: 127.0.0.1, port: 0000")
end
end
@@ -269,9 +424,11 @@ def test_send_large_chunk_only_warning
str = "X" * 1024 * 1024
chunk = [ "test.tag", (0...16).map{|i| [time + i, {"data" => str}] } ].to_msgpack
+ d.expected_emits_length = 16
+ d.run_timeout = 2
d.run do
Fluent::Engine.msgpack_factory.unpacker.feed_each(chunk) do |obj|
- d.instance.send(:on_message, obj, chunk.size, "host: 127.0.0.1, addr: 127.0.0.1, port: 0000")
+ d.instance.send(:emit_message, obj, chunk.size, "host: 127.0.0.1, addr: 127.0.0.1, port: 0000")
end
end
@@ -295,10 +452,14 @@ def test_send_large_chunk_limit
chunk = [ "test.tag", (0...32).map{|i| [time + i, {"data" => str}] } ].to_msgpack
assert chunk.size > (32 * 1024 * 1024)
+ d.end_if {
+ d.instance.log.logs.select{|line| line =~ / \[warn\]: Input chunk size is larger than 'chunk_size_limit', dropped:/ }.size == 1
+ }
+ d.run_timeout = 2
# d.run => send_data
d.run do
Fluent::Engine.msgpack_factory.unpacker.feed_each(chunk) do |obj|
- d.instance.send(:on_message, obj, chunk.size, "host: 127.0.0.1, addr: 127.0.0.1, port: 0000")
+ d.instance.send(:emit_message, obj, chunk.size, "host: 127.0.0.1, addr: 127.0.0.1, port: 0000")
end
end
@@ -333,16 +494,19 @@ def test_send_broken_chunk(data)
}.size == 1, "should not accept broken chunk"
end
- def test_respond_to_message_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_respond_to_message_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
- ["tag2", time, {"a"=>2}]
+ ["tag2", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
expected_acks = []
@@ -350,25 +514,27 @@ def test_respond_to_message_requiring_ack
events.each {|tag,time,record|
op = { 'chunk' => Base64.encode64(record.object_id.to_s) }
expected_acks << op['chunk']
- send_data [tag, time, record, op].to_msgpack, true
+ send_data auth, ssl, [tag, time, record, op].to_msgpack, true
}
end
- assert_equal events, d.emits
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
end
- # FIX: response is not pushed into @responses because IO.select has been blocked until InputForward shutdowns
- def test_respond_to_forward_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_respond_to_forward_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
- ["tag1", time, {"a"=>2}]
+ ["tag1", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
expected_acks = []
@@ -379,23 +545,26 @@ def test_respond_to_forward_requiring_ack
}
op = { 'chunk' => Base64.encode64(entries.object_id.to_s) }
expected_acks << op['chunk']
- send_data ["tag1", entries, op].to_msgpack, true
+ send_data auth, ssl, ["tag1", entries, op].to_msgpack, true
end
- assert_equal events, d.emits
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
end
- def test_respond_to_packed_forward_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_respond_to_packed_forward_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
- ["tag1", time, {"a"=>2}]
+ ["tag1", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
expected_acks = []
@@ -406,23 +575,26 @@ def test_respond_to_packed_forward_requiring_ack
}
op = { 'chunk' => Base64.encode64(entries.object_id.to_s) }
expected_acks << op['chunk']
- send_data ["tag1", entries, op].to_msgpack, true
+ send_data auth, ssl, ["tag1", entries, op].to_msgpack, true
end
- assert_equal events, d.emits
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
end
- def test_respond_to_message_json_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true]) # with json, auth doesn't work
+ def test_respond_to_message_json_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
events = [
["tag1", time, {"a"=>1}],
- ["tag2", time, {"a"=>2}]
+ ["tag2", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
expected_acks = []
@@ -430,118 +602,398 @@ def test_respond_to_message_json_requiring_ack
events.each {|tag,time,record|
op = { 'chunk' => Base64.encode64(record.object_id.to_s) }
expected_acks << op['chunk']
- send_data [tag, time, record, op].to_json, true
+ send_data auth, ssl, [tag, time, record, op].to_json, true
}
end
- assert_equal events, d.emits
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
assert_equal expected_acks, @responses.map { |res| JSON.parse(res)['ack'] }
-
end
- def test_not_respond_to_message_not_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_not_respond_to_message_not_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
- ["tag2", time, {"a"=>2}]
+ ["tag2", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
d.run do
events.each {|tag,time,record|
- send_data [tag, time, record].to_msgpack, true
+ send_data auth, ssl, [tag, time, record].to_msgpack, true
}
end
- assert_equal events, d.emits
- assert_equal [nil, nil], @responses
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
+ assert_equal ["", ""], @responses
end
- def test_not_respond_to_forward_not_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_not_respond_to_forward_not_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
- ["tag1", time, {"a"=>2}]
+ ["tag1", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
d.run do
entries = []
events.each {|tag,time,record|
entries << [time, record]
}
- send_data ["tag1", entries].to_msgpack, true
+ send_data auth, ssl, ["tag1", entries].to_msgpack, true
end
- assert_equal events, d.emits
- assert_equal [nil], @responses
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
+ assert_equal [""], @responses
end
- def test_not_respond_to_packed_forward_not_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true], 'auth' => [CONFIG_AUTH, true, false], 'auth_ssl' => [SSL_CONFIG_AUTH, true, true])
+ def test_not_respond_to_packed_forward_not_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Fluent::EventTime.parse("2011-01-02 13:14:15 UTC")
events = [
["tag1", time, {"a"=>1}],
- ["tag1", time, {"a"=>2}]
+ ["tag1", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
d.run do
entries = ''
events.each {|tag,time,record|
[time, record].to_msgpack(entries)
}
- send_data ["tag1", entries].to_msgpack, true
+ send_data auth, ssl, ["tag1", entries].to_msgpack, true
end
- assert_equal events, d.emits
- assert_equal [nil], @responses
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
+ assert_equal [""], @responses
end
- def test_not_respond_to_message_json_not_requiring_ack
- d = create_driver
+ data('tcp' => [CONFIG, false, false], 'ssl' => [SSL_CONFIG, false, true]) # with json, auth doen NOT work
+ def test_not_respond_to_message_json_not_requiring_ack(data)
+ conf, auth, ssl = data
+ d = create_driver(conf)
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
events = [
["tag1", time, {"a"=>1}],
- ["tag2", time, {"a"=>2}]
+ ["tag2", time+1, {"a"=>2}]
]
d.expected_emits_length = events.length
+ d.run_timeout = 2
d.run do
events.each {|tag,time,record|
- send_data [tag, time, record].to_json, true
+ send_data auth, ssl, [tag, time, record].to_json, true
}
end
- assert_equal events, d.emits
- assert_equal [nil, nil], @responses
+ assert_equal events, d.emits.sort{|a,b| a[1] <=> b[1] }
+ assert_equal ["", ""], @responses
+ end
+
+ def test_heartbeat_reply
+ heartbeat_data = nil
+ heartbeat_addr = nil
+
+ bind = '0.0.0.0'
+ listener_thread = Thread.new {
+ usock = if IPAddr.new(IPSocket.getaddress(bind)).ipv4?
+ UDPSocket.new
+ else
+ UDPSocket.new(::Socket::AF_INET6)
+ end
+ usock.bind(bind, REMOTE_PORT)
+ heartbeat_arrived = false
+ until heartbeat_arrived
+ if IO.select([usock])
+ heartbeat_data, heartbeat_addr = usock.recvfrom_nonblock(1024)
+ heartbeat_arrived = true
+ end
+ end
+ }
+ until listener_thread.alive?
+ sleep 0.01
+ end
+
+ d = create_driver
+ d.end_if { ! heartbeat_data.nil? }
+ d.run_timeout = 60
+ d.run do
+ sock = if IPAddr.new(IPSocket.getaddress(bind)).ipv4?
+ UDPSocket.new
+ else
+ UDPSocket.new(::Socket::AF_INET6)
+ end
+ until heartbeat_data
+ sock.send("\0", 0, '127.0.0.1', REMOTE_PORT)
+ unless heartbeat_data
+ sleep 1
+ end
+ end
+ end
+
+ assert_equal "\0", heartbeat_data
+ assert_equal "127.0.0.1", heartbeat_addr[2]
+ end
+
+ CONFIG_AUTH_TEST = %[
+ port #{PORT}
+ bind 127.0.0.1
+
+ shared_key foobar1
+ user_auth true
+
+ username #{USER_NAME}
+ password #{USER_PASSWORD}
+
+
+ network 127.0.0.0/8
+ shared_key #{SHARED_KEY}
+
+
+ ]
+
+ data('tcp' => false, 'ssl' => true)
+ def test_auth_invalid_shared_key(data)
+ ssl = data
+ conf = if ssl
+ CONFIG_AUTH_TEST + %[\n cert_auto_generate yes \n\n]
+ else
+ CONFIG_AUTH_TEST
+ end
+ d = create_driver(conf)
+ error = nil
+ d.run do
+ io = ssl ? connect_ssl : connect
+ begin
+ simulate_auth_sequence(io, 'fake shared key', USER_NAME, USER_PASSWORD)
+ rescue => e
+ error = e
+ # "assert_raise" raises LocalJumpError here
+ end
+ end
+ assert_equal RuntimeError, error.class
+ assert_equal "Authentication Failure: shared_key mismatch", error.message
+ end
+
+ data('tcp' => false, 'ssl' => true)
+ def test_auth_invalid_password(data)
+ ssl = data
+ conf = if ssl
+ CONFIG_AUTH_TEST + %[\n cert_auto_generate yes \n\n]
+ else
+ CONFIG_AUTH_TEST
+ end
+ d = create_driver(conf)
+ error = nil
+ d.run do
+ io = ssl ? connect_ssl : connect
+ begin
+ simulate_auth_sequence(io, SHARED_KEY, USER_NAME, 'fake-password')
+ rescue RuntimeError => e
+ error = e
+ # "assert_raise" raises LocalJumpError here
+ end
+ end
+ assert_equal "Authentication Failure: username/password mismatch", error.message
+ end
+
+ CONFIG_AUTH_TEST_2 = %[
+ port #{PORT}
+ bind 127.0.0.1
+
+ shared_key foobar1
+ user_auth true
+
+ username tagomoris
+ password fluentd
+
+
+ username frsyuki
+ password embulk
+
+
+ network 127.0.0.0/8
+ shared_key #{SHARED_KEY}
+ users ["tagomoris"]
+
+
+ ]
+
+ data('tcp' => false, 'ssl' => true)
+ def test_auth_disallowed_user(data)
+ ssl = data
+ conf = if ssl
+ CONFIG_AUTH_TEST_2 + %[\n cert_auto_generate yes \n\n]
+ else
+ CONFIG_AUTH_TEST_2
+ end
+ d = create_driver(conf)
+ error = nil
+ d.run do
+ io = ssl ? connect_ssl : connect
+ begin
+ simulate_auth_sequence(io, SHARED_KEY, 'frsyuki', 'embulk')
+ rescue RuntimeError => e
+ error = e
+ # "assert_raise" raises LocalJumpError here
+ end
+ end
+ assert_equal "Authentication Failure: username/password mismatch", error.message
end
- def send_data(data, try_to_receive_response=false, response_timeout=1)
- io = connect
+ CONFIG_AUTH_TEST_3 = %[
+ port #{PORT}
+ bind 127.0.0.1
+
+ shared_key foobar1
+ user_auth true
+ allow_anonymous_source no
+
+ username tagomoris
+ password fluentd
+
+
+ network 192.168.0.0/24
+ shared_key #{SHARED_KEY}
+ users ["tagomoris"]
+
+
+ ]
+
+ data('tcp' => false, 'ssl' => true)
+ def test_auth_anonymous_host(data)
+ ssl = data
+ conf = if ssl
+ CONFIG_AUTH_TEST_3 + %[\n cert_auto_generate yes \n\n]
+ else
+ CONFIG_AUTH_TEST_3
+ end
+ d = create_driver(conf)
+ error = nil
+ d.run do
+ io = ssl ? connect_ssl : connect
+ begin
+ simulate_auth_sequence(io, SHARED_KEY, 'frsyuki', 'embulk')
+ rescue RuntimeError => e
+ error = e
+ # "assert_raise" raises LocalJumpError here
+ end
+ end
+ assert_equal "Authentication Failure: anonymous source host '127.0.0.1' denied", error.message
+ end
+
+ # res
+ # '' : socket is disconnected without any data
+ # nil: socket read timeout
+ def read_data(io, timeout)
+ res = ''
+ timeout_at = Time.now + timeout
begin
- io.write data
- if try_to_receive_response
- if IO.select([io], nil, nil, response_timeout)
- res = io.recv(1024)
+ buf = ''
+ while io.read_nonblock(2048, buf)
+ if buf == ''
+ sleep 0.01
+ break if Time.now >= timeout_at
+ next
end
- # timeout means no response, so push nil to @responses
+ res << buf
+ buf = ''
end
- ensure
- io.close
+ res = nil # timeout
+ rescue IO::EAGAINWaitReadable
+ sleep 0.01
+ retry if res == ''
+ # if res is not empty, all data in socket buffer are read, so do not retry
+ rescue OpenSSL::SSL::SSLErrorWaitReadable
+ sleep 0.01
+ retry if res == ''
+ # if res is not empty, all data in socket buffer are read, so do not retry
+ rescue IOError, EOFError, Errno::ECONNRESET
+ # socket disconnected
+ end
+ res
+ end
+
+ def simulate_auth_sequence(io, shared_key=SHARED_KEY, username=USER_NAME, password=USER_PASSWORD)
+ auth_response_timeout = 2
+ shared_key_salt = 'salt'
+
+ # reading helo
+ helo_data = read_data(io, auth_response_timeout)
+ # ['HELO', options(hash)]
+ helo = MessagePack.unpack(helo_data)
+ raise "Invalid HELO header" unless helo[0] == 'HELO'
+ raise "Invalid HELO option object" unless helo[1].is_a?(Hash)
+ @options = helo[1]
+
+ # sending ping
+ ping = [
+ 'PING',
+ 'selfhostname',
+ shared_key_salt,
+ Digest::SHA512.new.update(shared_key_salt).update('selfhostname').update(shared_key).hexdigest,
+ ]
+ if @options['auth'] # auth enabled -> value is auth salt
+ pass_digest = Digest::SHA512.new.update(@options['auth']).update(username).update(password).hexdigest
+ ping.push(username, pass_digest)
+ else
+ ping.push('', '')
+ end
+ io.write ping.to_msgpack
+ io.flush
+
+ # reading pong
+ pong_data = read_data(io, auth_response_timeout)
+ # ['PING', bool(auth_result), string(reason_if_failed), self_hostname, shared_key_digest]
+ pong = MessagePack.unpack(pong_data)
+ raise "Invalid PONG header" unless pong[0] == 'PONG'
+ raise "Authentication Failure: #{pong[2]}" unless pong[1]
+ clientside_calculated = Digest::SHA512.new.update(shared_key_salt).update(pong[3]).update(shared_key).hexdigest
+ raise "Shared key digest mismatch" unless clientside_calculated == pong[4]
+
+ # authentication success
+ true
+ end
+
+ # Data ordering is not assured:
+ # Records in different sockets are processed on different thread, so its scheduling make effect
+ # on order of emitted records.
+ # So, we MUST sort emitted records in different `send_data` before assertion.
+ def send_data(auth, ssl, data, try_to_receive_response=false, response_timeout=5)
+ io = ssl ? connect_ssl : connect
+
+ if auth
+ simulate_auth_sequence(io)
+ end
+
+ res = nil
+ io.write data
+ io.flush
+ if try_to_receive_response
+ @responses << read_data(io, response_timeout)
end
- @responses << res if try_to_receive_response
+ ensure
+ io.close rescue nil # SSL socket requires any writes to close sockets
end
- # TODO heartbeat
end
diff --git a/test/plugin/test_in_gc_stat.rb b/test/plugin/test_in_gc_stat.rb
deleted file mode 100644
index ff0638251f..0000000000
--- a/test/plugin/test_in_gc_stat.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-require_relative '../helper'
-require 'fluent/test'
-
-class GCStatInputTest < Test::Unit::TestCase
- def setup
- Fluent::Test.setup
- end
-
- CONFIG = %[
- emit_interval 1
- tag t1
- ]
-
- def create_driver(conf=CONFIG)
- Fluent::Test::InputTestDriver.new(Fluent::GCStatInput).configure(conf)
- end
-
- def test_configure
- d = create_driver
- assert_equal(1, d.instance.emit_interval)
- assert_equal("t1", d.instance.tag)
- end
-
- def test_emit
- stat = GC.stat
- stub(GC).stat { stat }
-
- d = create_driver
- d.run do
- sleep 2
- end
-
- emits = d.emits
- assert(emits.length > 0)
- assert_equal(stat, emits[0][2])
- assert(emits[0][1].is_a?(Fluent::EventTime))
- end
-end
diff --git a/test/plugin/test_in_http.rb b/test/plugin/test_in_http.rb
index 0ecc0706d4..0569378a34 100644
--- a/test/plugin/test_in_http.rb
+++ b/test/plugin/test_in_http.rb
@@ -1,6 +1,7 @@
require_relative '../helper'
require 'fluent/test'
require 'net/http'
+require 'fluent/plugin/in_http'
class HttpInputTest < Test::Unit::TestCase
def setup
@@ -17,7 +18,7 @@ def setup
]
def create_driver(conf=CONFIG)
- Fluent::Test::InputTestDriver.new(Fluent::HttpInput).configure(conf, true)
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::HttpInput).configure(conf, true)
end
def test_configure
@@ -33,17 +34,24 @@ def test_time
d = create_driver
time = Fluent::EventTime.new(Time.parse("2011-01-02 13:14:15 UTC").to_i)
+ # Why this sentence works well and does NOT effect any other tests?
Fluent::Engine.now = time
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ expected = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time, {"a"=>2}]
+ ]
d.run do
- d.expected_emits.each {|tag,time,record|
+ expected.each do |tag, time, record|
res = post("/#{tag}", {"json"=>record.to_json})
assert_equal "200", res.code
- }
+ end
end
+
+ assert_equal 2, d.emits.size
+ assert_equal expected[0], d.emits[0]
+ assert_equal expected[1], d.emits[1]
end
def test_time_as_float
@@ -67,16 +75,22 @@ def test_json
time = Fluent::EventTime.new(Time.parse("2011-01-02 13:14:15 UTC").to_i)
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ expected = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time, {"a"=>2}],
+ ]
d.run do
- d.expected_emits.each {|tag,time,record|
+ expected.each {|tag, time, record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time.to_s})
assert_equal "200", res.code
}
end
+ assert_equal 2, d.emits.size
+ assert_equal expected[0], d.emits[0]
+ assert_equal expected[1], d.emits[1]
+
d.emit_streams.each { |tag, es|
assert !include_http_header?(es.first[1])
}
@@ -90,15 +104,13 @@ def test_multi_json
events = [{"a"=>1},{"a"=>2}]
tag = "tag1"
- events.each { |ev|
- d.expect_emit tag, time, ev
- }
-
d.run do
res = post("/#{tag}", {"json"=>events.to_json, "time"=>time.to_s})
assert_equal "200", res.code
end
+ assert_equal ["tag1", time, events[0]], d.emits[0]
+ assert_equal ["tag1", time, events[1]], d.emits[1]
end
def test_json_with_add_remote_addr
@@ -159,16 +171,15 @@ def test_json_with_add_http_headers
records = [["tag1", time, {"a"=>1}], ["tag2", time, {"a"=>2}]]
d.run do
- records.each {|tag,time,record|
+ records.each do |tag,time,record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time.to_s})
assert_equal "200", res.code
-
- }
+ end
end
- d.emit_streams.each { |tag, es|
+ d.emit_streams.each do |tag, es|
assert include_http_header?(es.first[1])
- }
+ end
end
def test_application_json
@@ -176,15 +187,20 @@ def test_application_json
time = Fluent::EventTime.new(Time.parse("2011-01-02 13:14:15 UTC").to_i)
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ expected = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time, {"a"=>2}],
+ ]
d.run do
- d.expected_emits.each {|tag,time,record|
+ expected.each do |tag,time,record|
res = post("/#{tag}?time=#{time.to_s}", record.to_json, {"content-type"=>"application/json; charset=utf-8"})
assert_equal "200", res.code
- }
+ end
end
+
+ assert_equal expected[0], d.emits[0]
+ assert_equal expected[1], d.emits[1]
end
def test_msgpack
@@ -192,15 +208,20 @@ def test_msgpack
time = Fluent::EventTime.new(Time.parse("2011-01-02 13:14:15 UTC").to_i)
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ expected = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time, {"a"=>2}],
+ ]
d.run do
- d.expected_emits.each {|tag,time,record|
+ expected.each do |tag,time,record|
res = post("/#{tag}", {"msgpack"=>record.to_msgpack, "time"=>time.to_s})
assert_equal "200", res.code
- }
+ end
end
+
+ assert_equal expected[0], d.emits[0]
+ assert_equal expected[1], d.emits[1]
end
def test_multi_msgpack
@@ -211,15 +232,13 @@ def test_multi_msgpack
events = [{"a"=>1},{"a"=>2}]
tag = "tag1"
- events.each { |ev|
- d.expect_emit tag, time, ev
- }
-
d.run do
res = post("/#{tag}", {"msgpack"=>events.to_msgpack, "time"=>time.to_s})
assert_equal "200", res.code
end
+ assert_equal ["tag1", time, events[0]], d.emits[0]
+ assert_equal ["tag1", time, events[1]], d.emits[1]
end
def test_with_regexp
@@ -230,18 +249,23 @@ def test_with_regexp
time = Fluent::EventTime.new(Time.parse("2011-01-02 13:14:15 UTC").to_i)
- d.expect_emit "tag1", time, {"field_1" => 1, "field_2" => 'str'}
- d.expect_emit "tag2", time, {"field_1" => 2, "field_2" => 'str'}
+ expected = [
+ ["tag1", time, {"field_1" => 1, "field_2" => 'str'}],
+ ["tag2", time, {"field_1" => 2, "field_2" => 'str'}],
+ ]
d.run do
- d.expected_emits.each { |tag, time, record|
+ expected.each do |tag, time, record|
body = record.map { |k, v|
v.to_s
}.join(':')
res = post("/#{tag}?time=#{time.to_s}", body)
assert_equal "200", res.code
- }
+ end
end
+
+ assert_equal expected[0], d.emits[0]
+ assert_equal expected[1], d.emits[1]
end
def test_with_csv
@@ -254,16 +278,21 @@ def test_with_csv
time = Fluent::EventTime.new(Time.parse("2011-01-02 13:14:15 UTC").to_i)
- d.expect_emit "tag1", time, {"foo" => "1", "bar" => 'st"r'}
- d.expect_emit "tag2", time, {"foo" => "2", "bar" => 'str'}
+ expected = [
+ ["tag1", time, {"foo" => "1", "bar" => 'st"r'}],
+ ["tag2", time, {"foo" => "2", "bar" => 'str'}],
+ ]
d.run do
- d.expected_emits.each { |tag, time, record|
+ expected.each do |tag, time, record|
body = record.map { |k, v| v }.to_csv
res = post("/#{tag}?time=#{time.to_s}", body)
assert_equal "200", res.code
- }
+ end
end
+
+ assert_equal expected[0], d.emits[0]
+ assert_equal expected[1], d.emits[1]
end
def test_resonse_with_empty_img
@@ -272,54 +301,55 @@ def test_resonse_with_empty_img
time = Fluent::EventTime.new(Time.parse("2011-01-02 13:14:15 UTC").to_i)
- d.expect_emit "tag1", time, {"a"=>1}
- d.expect_emit "tag2", time, {"a"=>2}
+ expected = [
+ ["tag1", time, {"a"=>1}],
+ ["tag2", time, {"a"=>2}],
+ ]
d.run do
- d.expected_emits.each {|tag,time,record|
+ expected.each do |tag,time,record|
res = post("/#{tag}", {"json"=>record.to_json, "time"=>time.to_s})
assert_equal "200", res.code
# Ruby returns ASCII-8 encoded string for GIF.
- assert_equal Fluent::HttpInput::EMPTY_GIF_IMAGE, res.body.force_encoding("UTF-8")
- }
+ assert_equal Fluent::Plugin::HttpInput::EMPTY_GIF_IMAGE, res.body.force_encoding("UTF-8")
+ end
end
+
+ assert_equal expected[0], d.emits[0]
+ assert_equal expected[1], d.emits[1]
+ end
+
+ $test_in_http_connection_object_ids = []
+ $test_in_http_content_types = []
+ $test_in_http_content_types_flag = false
+ module ContentTypeHook
+ def on_message_begin
+ super
+ if $test_in_http_content_types_flag
+ $test_in_http_connection_object_ids << @io_handler.object_id
+ end
+ end
+
+ def on_headers_complete(headers)
+ super
+ if $test_in_http_content_types_flag
+ $test_in_http_content_types << self.content_type
+ end
+ end
+ end
+
+ class Fluent::Plugin::HttpInput::HttpParserHandler
+ prepend ContentTypeHook
end
def test_if_content_type_is_initialized_properly
# This test is to check if Fluent::HttpInput::Handler's @content_type is initialized properly.
# Especially when in Keep-Alive and the second request has no 'Content-Type'.
- #
- # Actually, in the current implementation of in_http, we can't test it directly.
- # So we replace Fluent::HttpInput::Handler temporally with the extended Handler
- # in order to collect @content_type(s) per request.
- # Finally, we check those collected @content_type(s).
-
- # Save the original Handler
- orig_handler = Fluent::HttpInput::Handler
begin
- # Create the extended Handler which can store @content_type per request
- ext_handler = Class.new(Fluent::HttpInput::Handler) do
- @@content_types = []
-
- def self.content_types
- @@content_types
- end
-
- def on_message_complete
- @@content_types << @content_type
- super
- end
- end
-
- # Replace the original Handler temporally with the extended one
- Fluent::HttpInput.module_eval do
- remove_const(:Handler) if const_defined?(:Handler)
- const_set(:Handler, ext_handler)
- end
-
d = create_driver
+ $test_in_http_content_types_flag = true
d.run do
# Send two requests the second one has no Content-Type in Keep-Alive
Net::HTTP.start("127.0.0.1", PORT) do |http|
@@ -329,16 +359,13 @@ def on_message_complete
req = Net::HTTP::Get.new("/foodb/bartbl", {"connection" => "keepalive"})
res = http.request(req)
end
-
- assert_equal(['application/json', ''], ext_handler.content_types)
end
ensure
- # Revert the original Handler
- Fluent::HttpInput.module_eval do
- remove_const(:Handler) if const_defined?(:Handler)
- const_set(:Handler, orig_handler)
- end
+ $test_in_http_content_types_flag = false
end
+ assert_equal(['application/json', ''], $test_in_http_content_types)
+ # Asserting keepalive
+ assert_equal $test_in_http_connection_object_ids[0], $test_in_http_connection_object_ids[1]
end
def post(path, params, header = {})
diff --git a/test/plugin/test_in_object_space.rb b/test/plugin/test_in_object_space.rb
index fc370e9529..7a3cc4d2a4 100644
--- a/test/plugin/test_in_object_space.rb
+++ b/test/plugin/test_in_object_space.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/in_object_space'
class ObjectSpaceInputTest < Test::Unit::TestCase
class FailObject
diff --git a/test/plugin/test_in_stream.rb b/test/plugin/test_in_stream.rb
index 2501832f17..462eb24308 100644
--- a/test/plugin/test_in_stream.rb
+++ b/test/plugin/test_in_stream.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/in_stream'
module StreamInputTest
def setup
diff --git a/test/plugin/test_in_syslog.rb b/test/plugin/test_in_syslog.rb
index d65c4afd32..3bb9ea6d36 100755
--- a/test/plugin/test_in_syslog.rb
+++ b/test/plugin/test_in_syslog.rb
@@ -1,10 +1,10 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/in_syslog'
class SyslogInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
- require 'fluent/plugin/socket_util'
end
PORT = unused_port
@@ -21,7 +21,7 @@ def setup
]
def create_driver(conf=CONFIG)
- Fluent::Test::InputTestDriver.new(Fluent::SyslogInput).configure(conf)
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::SyslogInput).configure(conf)
end
def test_configure
@@ -47,13 +47,17 @@ def test_time_format
{'msg' => '<6>Sep 1 00:00:00 localhost logger: foo', 'expected' => Fluent::EventTime.from_time(Time.strptime('Sep 1 00:00:00', '%b %d %H:%M:%S'))},
]
+ d.expected_emits_length = 1
d.run do
- u = Fluent::SocketUtil.create_udp_socket(k)
+ u = if IPAddr.new(IPSocket.getaddress(k)).ipv4?
+ UDPSocket.new
+ else
+ UDPSocket.new(Socket::AF_INET6)
+ end
u.connect(k, PORT)
tests.each {|test|
u.send(test['msg'], 0)
}
- sleep 1
end
emits = d.emits
@@ -67,13 +71,13 @@ def test_msg_size
d = create_driver
tests = create_test_case
+ d.expected_emits_length = 1
d.run do
u = UDPSocket.new
u.connect('127.0.0.1', PORT)
tests.each {|test|
u.send(test['msg'], 0)
}
- sleep 1
end
compare_test_result(d.emits, tests)
@@ -83,13 +87,13 @@ def test_msg_size_with_tcp
d = create_driver([CONFIG, 'protocol_type tcp'].join("\n"))
tests = create_test_case
+ d.expected_emits_length = 2
d.run do
tests.each {|test|
TCPSocket.open('127.0.0.1', PORT) do |s|
s.send(test['msg'], 0)
end
}
- sleep 1
end
compare_test_result(d.emits, tests)
@@ -99,13 +103,13 @@ def test_msg_size_with_same_tcp_connection
d = create_driver([CONFIG, 'protocol_type tcp'].join("\n"))
tests = create_test_case
+ d.expected_emits_length = 2
d.run do
TCPSocket.open('127.0.0.1', PORT) do |s|
tests.each {|test|
s.send(test['msg'], 0)
}
end
- sleep 1
end
compare_test_result(d.emits, tests)
@@ -119,23 +123,24 @@ def test_msg_size_with_json_format
{'msg' => '<6>' + event.to_json + "\n", 'expected' => msg}
}
+ d.expected_emits_length = 2
d.run do
u = UDPSocket.new
u.connect('127.0.0.1', PORT)
tests.each {|test|
u.send(test['msg'], 0)
}
- sleep 1
end
compare_test_result(d.emits, tests)
end
def test_msg_size_with_include_source_host
- d = create_driver([CONFIG, 'include_source_host'].join("\n"))
+ d = create_driver([CONFIG, 'include_source_host yes'].join("\n"))
tests = create_test_case
host = nil
+ d.expected_emits_length = 2
d.run do
u = UDPSocket.new
u.connect('127.0.0.1', PORT)
@@ -143,7 +148,6 @@ def test_msg_size_with_include_source_host
tests.each {|test|
u.send(test['msg'], 0)
}
- sleep 1
end
compare_test_result(d.emits, tests, host)
diff --git a/test/plugin/test_in_tail.rb b/test/plugin/test_in_tail.rb
index 21f40ff7b2..30dc15165f 100644
--- a/test/plugin/test_in_tail.rb
+++ b/test/plugin/test_in_tail.rb
@@ -2,6 +2,7 @@
require 'fluent/test'
require 'net/http'
require 'flexmock'
+require 'fluent/plugin/in_tail'
class TailInputTest < Test::Unit::TestCase
include FlexMock::TestCase
diff --git a/test/plugin/test_in_tcp.rb b/test/plugin/test_in_tcp.rb
index 0b89da8c9e..e0f24563a3 100755
--- a/test/plugin/test_in_tcp.rb
+++ b/test/plugin/test_in_tcp.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/in_tcp'
class TcpInputTest < Test::Unit::TestCase
def setup
@@ -21,7 +22,7 @@ def setup
]
def create_driver(conf)
- Fluent::Test::InputTestDriver.new(Fluent::TcpInput).configure(conf)
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::TcpInput).configure(conf)
end
def test_configure
diff --git a/test/plugin/test_in_udp.rb b/test/plugin/test_in_udp.rb
index 1d4d46284f..bcc1eb5851 100755
--- a/test/plugin/test_in_udp.rb
+++ b/test/plugin/test_in_udp.rb
@@ -1,5 +1,7 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/in_udp'
+require 'fluent/plugin/socket_util'
class UdpInputTest < Test::Unit::TestCase
def setup
@@ -21,7 +23,7 @@ def setup
!
def create_driver(conf)
- Fluent::Test::InputTestDriver.new(Fluent::UdpInput).configure(conf)
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::UdpInput).configure(conf)
end
def test_configure
diff --git a/test/plugin/test_out_copy.rb b/test/plugin/test_out_copy.rb
index 04725d198c..52c947d4b0 100644
--- a/test/plugin/test_out_copy.rb
+++ b/test/plugin/test_out_copy.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/out_copy'
class CopyOutputTest < Test::Unit::TestCase
class << self
diff --git a/test/plugin/test_out_exec.rb b/test/plugin/test_out_exec.rb
index 3b8a8a25af..f76f223197 100644
--- a/test/plugin/test_out_exec.rb
+++ b/test/plugin/test_out_exec.rb
@@ -1,6 +1,7 @@
require_relative '../helper'
require 'fluent/test'
require 'fileutils'
+require 'fluent/plugin/out_exec'
class ExecOutputTest < Test::Unit::TestCase
def setup
diff --git a/test/plugin/test_out_exec_filter.rb b/test/plugin/test_out_exec_filter.rb
index 00d888dd5c..2a7b72a6e2 100644
--- a/test/plugin/test_out_exec_filter.rb
+++ b/test/plugin/test_out_exec_filter.rb
@@ -1,6 +1,7 @@
require_relative '../helper'
require 'fluent/test'
require 'fileutils'
+require 'fluent/plugin/out_exec_filter'
class ExecFilterOutputTest < Test::Unit::TestCase
def setup
diff --git a/test/plugin/test_out_file.rb b/test/plugin/test_out_file.rb
index daa50fc5da..6a05c99c28 100644
--- a/test/plugin/test_out_file.rb
+++ b/test/plugin/test_out_file.rb
@@ -2,6 +2,7 @@
require 'fluent/test'
require 'fileutils'
require 'time'
+require 'fluent/plugin/out_file'
class FileOutputTest < Test::Unit::TestCase
def setup
diff --git a/test/plugin/test_out_forward.rb b/test/plugin/test_out_forward.rb
index b3f60bd06a..cd29172b98 100644
--- a/test/plugin/test_out_forward.rb
+++ b/test/plugin/test_out_forward.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/out_forward'
class ForwardOutputTest < Test::Unit::TestCase
def setup
@@ -7,7 +8,7 @@ def setup
end
TARGET_HOST = '127.0.0.1'
- TARGET_PORT = 13999
+ TARGET_PORT = unused_port
CONFIG = %[
send_timeout 51
heartbeat_type udp
@@ -58,7 +59,7 @@ def test_configure
node = nodes.first
assert_equal "test", node.name
assert_equal '127.0.0.1', node.host
- assert_equal 13999, node.port
+ assert_equal TARGET_PORT, node.port
end
def test_configure_udp_heartbeat
@@ -189,9 +190,11 @@ def test_send_without_time_as_integer
end
def test_send_to_a_node_supporting_responses
- target_input_driver = create_target_input_driver(true)
+ target_input_driver = create_target_input_driver # default in_forward supports responses
+ target_input_driver.expected_emits_length = 2
+ target_input_driver.run_timeout = 3
- d = create_driver(CONFIG + %[flush_interval 1s])
+ d = create_driver(CONFIG + %[flush_interval 0s])
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
@@ -200,7 +203,7 @@ def test_send_to_a_node_supporting_responses
{"a" => 2}
]
d.register_run_post_condition do
- d.instance.responses.length == 1
+ d.instance.responses.length > 0
end
target_input_driver.run do
@@ -220,7 +223,7 @@ def test_send_to_a_node_supporting_responses
end
def test_send_to_a_node_not_supporting_responses
- target_input_driver = create_target_input_driver
+ target_input_driver = create_target_input_driver(->(options){ nil })
d = create_driver(CONFIG + %[flush_interval 1s])
@@ -251,7 +254,7 @@ def test_send_to_a_node_not_supporting_responses
end
def test_require_a_node_supporting_responses_to_respond_with_ack
- target_input_driver = create_target_input_driver(true)
+ target_input_driver = create_target_input_driver()
d = create_driver(CONFIG + %[
flush_interval 1s
@@ -288,7 +291,9 @@ def test_require_a_node_supporting_responses_to_respond_with_ack
def test_require_a_node_not_supporting_responses_to_respond_with_ack
# in_forward, that doesn't support ack feature, and keep connection alive
- target_input_driver = create_target_input_driver
+ target_input_driver = create_target_input_driver(->(options){ sleep 5 })
+ target_input_driver.expected_emits_length = 2
+ target_input_driver.run_timeout = 5
d = create_driver(CONFIG + %[
flush_interval 1s
@@ -329,7 +334,7 @@ def test_require_a_node_not_supporting_responses_to_respond_with_ack
# bdf1f4f104c00a791aa94dc20087fe2011e1fd83
def test_require_a_node_not_supporting_responses_2_to_respond_with_ack
# in_forward, that doesn't support ack feature, and disconnect immediately
- target_input_driver = create_target_input_driver(false, true)
+ target_input_driver = create_target_input_driver(nil, true)
d = create_driver(CONFIG + %[
flush_interval 1s
@@ -367,129 +372,19 @@ def test_require_a_node_not_supporting_responses_2_to_respond_with_ack
assert_equal false, node.available # node is regarded as unavailable when unexpected EOF
end
- def create_target_input_driver(do_respond=false, disconnect=false, conf=TARGET_CONFIG)
+ ## TODO: check this works well or not...
+ def create_target_input_driver(response_stub=nil, disconnect=false, conf=TARGET_CONFIG)
+ ## TODO: disconnect
require 'fluent/plugin/in_forward'
- # TODO: Support actual TCP heartbeat test
- DummyEngineDriver.new(Fluent::ForwardInput) {
- handler_class = Class.new(Fluent::ForwardInput::Handler) { |klass|
- attr_reader :chunk_counter # for checking if received data is successfully deserialized
-
- def initialize(sock, log, on_message)
- @sock = sock
- @log = log
- @chunk_counter = 0
- @on_message = on_message
- end
-
- if do_respond
- def write(data)
- @sock.write data
- rescue => e
- @sock.close_write
- @sock.close
- end
- else
- def write(data)
- # do nothing
- end
- end
-
- def close
- @sock.close_write
- @sock.close
- end
- }
-
- define_method(:start) do
- @thread = Thread.new do
- Socket.tcp_server_loop(@host, @port) do |sock, client_addrinfo|
- begin
- handler = handler_class.new(sock, @log, method(:on_message))
- loop do
- raw_data = sock.recv(1024)
- handler.on_read(raw_data)
- # chunk_counter is reset to zero only after all the data have been received and successfully deserialized.
- break if handler.chunk_counter == 0
- end
- if disconnect
- handler.close
- sock = nil
- end
- sleep # wait for connection to be closed by client
- ensure
- if sock
- sock.close_write
- sock.close
- end
- end
- end
+ Fluent::Test::Driver::Input.new(Fluent::Plugin::ForwardInput) {
+ if response_stub.nil?
+ # do nothing because in_forward responds for ack option in default
+ else
+ define_method(:response) do |options|
+ return response_stub.(options)
end
end
-
- def shutdown
- @thread.kill
- @thread.join
- end
- }.configure(conf).inject_router()
- end
-
- def test_heartbeat_type_none
- d = create_driver(CONFIG + "\nheartbeat_type none")
- node = d.instance.nodes.first
- assert_equal Fluent::ForwardOutput::NoneHeartbeatNode, node.class
-
- d.instance.start
- assert_nil d.instance.instance_variable_get(:@loop) # no HeartbeatHandler, or HeartbeatRequestTimer
- assert_nil d.instance.instance_variable_get(:@thread) # no HeartbeatHandler, or HeartbeatRequestTimer
-
- stub(node.failure).phi { raise 'Should not be called' }
- node.tick
- assert_equal node.available, true
- end
-
- class DummyEngineDriver < Fluent::Test::TestDriver
- def initialize(klass, &block)
- super(klass, &block)
- @engine = DummyEngineClass.new
- @klass = klass
- # To avoid accessing Fluent::Engine, set Engine as a plugin's class constant (Fluent::SomePlugin::Engine).
- # But this makes it impossible to run tests concurrently by threading in a process.
- @klass.const_set(:Engine, @engine)
- end
-
- def inject_router
- @instance.router = @engine
- self
- end
-
- def run(&block)
- super(&block)
- @klass.class_eval do
- remove_const(:Engine)
- end
- end
-
- def emits
- all = []
- @engine.emit_streams.each {|tag,events|
- events.each {|time,record|
- all << [tag, time, record]
- }
- }
- all
- end
-
- class DummyEngineClass
- attr_reader :emit_streams
-
- def initialize
- @emit_streams ||= []
- end
-
- def emit_stream(tag, es)
- @emit_streams << [tag, es.to_a]
- end
- end
+ }.configure(conf)
end
end
diff --git a/test/plugin/test_out_roundrobin.rb b/test/plugin/test_out_roundrobin.rb
index 76c87460ab..56722f0174 100644
--- a/test/plugin/test_out_roundrobin.rb
+++ b/test/plugin/test_out_roundrobin.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/out_roundrobin'
class RoundRobinOutputTest < Test::Unit::TestCase
class << self
diff --git a/test/plugin/test_out_stdout.rb b/test/plugin/test_out_stdout.rb
index ba07c33083..49e0567057 100644
--- a/test/plugin/test_out_stdout.rb
+++ b/test/plugin/test_out_stdout.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/out_stdout'
class StdoutOutputTest < Test::Unit::TestCase
def setup
diff --git a/test/plugin/test_out_stream.rb b/test/plugin/test_out_stream.rb
index b105411a5c..f79d51cf86 100644
--- a/test/plugin/test_out_stream.rb
+++ b/test/plugin/test_out_stream.rb
@@ -1,5 +1,6 @@
require_relative '../helper'
require 'fluent/test'
+require 'fluent/plugin/out_stream'
module StreamOutputTest
def setup
diff --git a/test/storage/test_json.rb b/test/storage/test_json.rb
new file mode 100644
index 0000000000..0a2fb8c311
--- /dev/null
+++ b/test/storage/test_json.rb
@@ -0,0 +1,123 @@
+require_relative '../helper'
+require 'fluent/test'
+require 'fluent/storage/json'
+
+require 'fileutils'
+
+module Fluent::Storage
+ class JSONStorageTest < Test::Unit::TestCase
+ sub_test_case Fluent::Storage::JSON do
+ setup do
+ @storage_path = File.join(File.dirname(File.dirname(__FILE__)), 'tmp', 'storage')
+ FileUtils.rm_rf(@storage_path, secure: true) if Dir.exist?(@storage_path)
+ FileUtils.mkdir_p(@storage_path)
+
+ @storage = Fluent::Storage::JSON.new
+ end
+
+ sub_test_case '#put' do
+ test "#put handle keys both symbols and strings as same" do
+ obj = "data1"
+ @storage.put(:key1, obj)
+ assert_equal obj.object_id, @storage.instance_eval{ @store[:key1] }.object_id
+
+ @storage.put("key1", obj)
+ assert_equal obj.object_id, @storage.instance_eval{ @store[:key1] }.object_id
+ end
+ end
+
+ sub_test_case '#get' do
+ test "return nil for unknown keys" do
+ assert_nil @storage.get(:key1)
+
+ @storage.put(:key1, "data1")
+ assert_equal "data1", @storage.get(:key1)
+
+ assert_nil @storage.get(:key2)
+ end
+
+ test "get handle keys both symbols and strings as same" do
+ obj = "data1"
+ @storage.put(:key1, obj)
+ assert_equal obj.object_id, @storage.get(:key1).object_id
+ assert_equal obj.object_id, @storage.get("key1").object_id
+
+ obj = "data2"
+ @storage.put("key1", obj)
+ assert_equal obj.object_id, @storage.get(:key1).object_id
+ assert_equal obj.object_id, @storage.get("key1").object_id
+ end
+ end
+
+ sub_test_case '#fetch' do
+ test "fetch works as well as Hash#fetch" do
+ assert_equal "data", @storage.fetch(:key1, "data")
+
+ @storage.put(:key1, "data1")
+ assert_equal "data1", @storage.fetch(:key1, "data")
+ end
+ end
+
+ def e(name, arg = '', attrs = {}, elements = [])
+ attrs_str_keys = {}
+ attrs.each{|key, value| attrs_str_keys[key.to_s] = value }
+ Fluent::Config::Element.new(name, arg, attrs_str_keys, elements)
+ end
+
+ sub_test_case '#save' do
+ test "stored data is saved as plain-text JSON data on the disk" do
+ test_path = File.join(@storage_path, 'save_test_1.json')
+ assert !(File.exist?(test_path))
+
+ @storage.configure(e('ROOT', '', {}, [e('storage', '', {'path' => test_path, 'pretty_print' => 'false'}, [])]))
+ assert_equal test_path, @storage.storage_config.path
+ assert_equal test_path + '.tmp', @storage.instance_eval{ @tmp_path }
+ assert_equal false, @storage.storage_config.pretty_print
+
+ assert_equal({}, @storage.instance_eval{ @store })
+
+ t = Time.now
+
+ @storage.put(:key1, 'data1')
+ @storage.put(:key2, {k1: 'v1', k2: 'v2', k3: [1,2,3]})
+ @storage.put(:key3, ['a', 'b', 'c', 'd'])
+ @storage.put(:key4, t.to_i)
+
+ @storage.save
+
+ assert !(File.exist?(test_path + '.tmp'))
+ assert File.exist?(test_path)
+
+ content = open(test_path){|f| ::JSON.parse(f.read, symbolize_names: true)}
+ expected = {key1: 'data1', key2: {k1: 'v1', k2: 'v2', k3: [1,2,3]}, key3: ['a', 'b', 'c', 'd'], key4: t.to_i}
+ assert_equal expected, content
+ end
+ end
+
+ sub_test_case '#load' do
+ test "saved data is successfully loaded with symbolized keys" do
+ test_path = File.join(@storage_path, 'load_test_1.json')
+ assert !(File.exist?(test_path))
+
+ t = Time.now
+
+ expected = {key1: 'data1', key2: {k1: 'v1', k2: 'v2', k3: [1,2,3]}, key3: ['a', 'b', 'c', 'd'], key4: t.to_i}
+ open(test_path, 'w:utf-8') do |f|
+ f.write expected.to_json
+ end
+ assert File.exist?(test_path)
+
+ @storage.configure(e('ROOT', '', {}, [e('storage', '', {'path' => test_path, 'pretty_print' => 'false'}, [])]))
+ @storage.load
+
+ assert_equal expected, @storage.instance_eval{ @store }
+
+ assert_equal 'data1', @storage.get(:key1)
+ assert_equal({k1: 'v1', k2: 'v2', k3: [1,2,3]}, @storage.get(:key2))
+ assert_equal ['a', 'b', 'c', 'd'], @storage.get(:key3)
+ assert_equal t.to_i, @storage.get(:key4)
+ end
+ end
+ end
+ end
+end