Search code examples
elasticsearchlogstashlogstash-configurationlogstash-jdbc

How to dynamically define elasticsearch index with logstash?


See below a logstash config file to pull records from a mysql database into an elasticsearch index using the jdbc plugin. How can it be modified so that there are separate indexes produced based on a company_id value found in the database, like this: company_%{company_id}_user_events.

Can this be done dynamically or does it require the creation of separate logstash config files preconfigured and hardcoded for each company id? Is there some middle ground like a script or template?

If it would help, the company_id field could be added to the ahoy_events table in the database, instead of "added" through the user association the way it is now.

Current logstash.conf

input {
    jdbc {
        jdbc_driver_library => "/opt/mysql-connector-java-5.1.47-bin.jar"
        jdbc_driver_class => "com.mysql.jdbc.Driver"
        jdbc_connection_string => "jdbc:mysql://mysql:3306/${DB_NAME}"
        jdbc_user => "${DB_USER}"
        jdbc_password => "${DB_PASSWORD}"
        schedule => "* * * * *"
        statement => "select * from ahoy_events where time > :sql_last_value"
    }
}

filter {
    jdbc_streaming {
        jdbc_driver_library => "/opt/mysql-connector-java-5.1.47-bin.jar"
        jdbc_driver_class => "com.mysql.jdbc.Driver"
        jdbc_connection_string => "jdbc:mysql://mysql:3306/${DB_NAME}"
        jdbc_user => "${DB_USER}"
        jdbc_password => "${DB_PASSWORD}"
        statement => "select * from users where id = :user"
        parameters => { "user" => "user_id" }
        target => "user"
    }
    jdbc_streaming {
        jdbc_driver_library => "/opt/mysql-connector-java-5.1.47-bin.jar"
        jdbc_driver_class => "com.mysql.jdbc.Driver"
        jdbc_connection_string => "jdbc:mysql://mysql:3306/${DB_NAME}"
        jdbc_user => "${DB_USER}"
        jdbc_password => "${DB_PASSWORD}"
        statement => "select * from visits where id = :visits"
        parameters => { "visits" => "visit_id" }
        target => "visits"
    }
    mutate {
        add_field => { "company_id" =>  "%{[user][0][company_id]}"}
    }
    jdbc_streaming {
        jdbc_driver_library => "/opt/mysql-connector-java-5.1.47-bin.jar"
        jdbc_driver_class => "com.mysql.jdbc.Driver"
        jdbc_connection_string => "jdbc:mysql://mysql:3306/${DB_NAME}"
        jdbc_user => "${DB_USER}"
        jdbc_password => "${DB_PASSWORD}"
        statement => "select * from companies where id = :company_id"
        parameters => { "company_id" => "company_id" }
        target => "company"
    }
    json {
        source => "properties"
        target => "properties"
    }
    mutate {
        add_field => { "user_name" => "%{[user][0][name]}" }
    }
    mutate {
        add_field => { "company_name" => "%{[company][0][name]}" }
    }
    mutate {
        rename => { "[visits][0]" => "visit" }
    }
    mutate {
        remove_field => ["visits", "company", "user"]
    }
}

output {
    elasticsearch {
        hosts => ["http://elasticsearch:9200"]
        index => "user_events-%{+YYYY.MM.dd}"
        document_id => "%{id}"
    }
}

The desired result is an index that has a namespace of the company_id: company_%{company_id}_user_events so that I can add other indexes later in the same pattern company_%{company_id}_other_records


Solution

  • Not 100% sure but technically it should be that simple:

    output {
        elasticsearch {
            hosts => ["http://elasticsearch:9200"]
            index => "company_%{company_id}_events-%{+YYYY.MM.dd}"
            document_id => "%{id}"
        }
    }