diff --git a/docs/index.asciidoc b/docs/index.asciidoc index 7e67c7c..94b1c07 100644 --- a/docs/index.asciidoc +++ b/docs/index.asciidoc @@ -24,7 +24,7 @@ include::{include_path}/plugin_header.asciidoc[] Read rows from an sqlite database. This is most useful in cases where you are logging directly to a table. -Any tables being watched must have an `id` column that is monotonically +Any tables being watched must have a specified column that is monotonically increasing. All tables are read by default except: @@ -85,6 +85,7 @@ This plugin supports the following configuration options plus the <> |<>|No | <> |<>|No +| <> |<>|NO | <> |<>|Yes |======================================================================= @@ -110,6 +111,14 @@ How many rows to fetch at a time from each `SELECT` call. Any tables to exclude by name. By default all tables are followed. +[id="plugins-{type}s-{plugin}-id_field"] +===== `path` + + * Value type is <> + * Default value is `id`. + +The name of the field in the database that is monotonically increasing. + [id="plugins-{type}s-{plugin}-path"] ===== `path` diff --git a/lib/logstash/inputs/sqlite.rb b/lib/logstash/inputs/sqlite.rb index ed8badd..05ae255 100644 --- a/lib/logstash/inputs/sqlite.rb +++ b/lib/logstash/inputs/sqlite.rb @@ -72,6 +72,9 @@ class LogStash::Inputs::Sqlite < LogStash::Inputs::Base # How many rows to fetch at a time from each `SELECT` call. config :batch, :validate => :number, :default => 5 + # The name of the monotonically increasing "id" field + config :id_field, :validate => :string, :default => "id" + SINCE_TABLE = :since_table public @@ -121,7 +124,7 @@ def get_all_tables(db) public def get_n_rows_from_table(db, table, offset, limit) dataset = db["SELECT * FROM #{table}"] - return db["SELECT * FROM #{table} WHERE (id > #{offset}) ORDER BY 'id' LIMIT #{limit}"].map { |row| row } + return db["SELECT * FROM #{table} WHERE (#{@id_field} > #{offset}) ORDER BY '#{@id_field}' LIMIT #{limit}"].map { |row| row } end public @@ -156,16 +159,16 @@ def run(queue) @logger.debug("offset is #{offset}", :k => k, :table => table_name) rows = get_n_rows_from_table(@db, table_name, offset, @batch) count += rows.count - rows.each do |row| - event = LogStash::Event.new("host" => @host, "db" => @db) + rows.each do |row| + event = LogStash::Event.new("host" => @host, "db" => @path) decorate(event) # store each column as a field in the event. row.each do |column, element| - next if column == :id + next if column == @id_field event.set(column.to_s, element) end queue << event - @table_data[k][:place] = row[:id] + @table_data[k][:place] = row[@id_field.to_sym] end # Store the last-seen row in the database update_placeholder(@db, table_name, @table_data[k][:place])