使用logstash同步mysql 多表数据到ElasticSearch实践

参考样式即可,具体使用配置参数根据实际情况而定

input {  
    jdbc {  
      jdbc_connection_string => "jdbc:mysql://localhost/数据库名"  
      jdbc_user => "root"  
      jdbc_password => "password"  
      jdbc_driver_library => "mysql-connector-java-5.1.45-bin.jar所在位置"  
      jdbc_driver_class => "com.mysql.jdbc.Driver"
      codec => plain {charset => "UTF-8"}
      record_last_run => true
      jdbc_paging_enabled => "true"  
      jdbc_page_size => "1000"  
      statement => "sql statement"   
      schedule => "* * * * *"  
      type => "数据库表名1"  
      tags => "数据库表名1"
    }
    jdbc {  
      jdbc_connection_string => "jdbc:mysql://localhost/数据库名"  
      jdbc_user => "root"  
      jdbc_password => "password"  
      jdbc_driver_library => "mysql-connector-java-5.1.45-bin.jar所在位置"  
      jdbc_driver_class => "com.mysql.jdbc.Driver"
      codec => plain {charset => "UTF-8"}
      record_last_run => true
      jdbc_paging_enabled => "true"  
      jdbc_page_size => "1000"  
      statement => "sql statement"   
      schedule => "* * * * *"  
      type => "数据库表名2"
      tags => "数据库表名2"
    }
}  

filter {  
    json {  
        source => "message"  
        remove_field => ["message"]  
    }  
}  

output {  
    if [type] == "数据库表名1"{
        elasticsearch {
            hosts => ["els的host地址"]  
            index => "数据库表名1对应的els的index"  
            document_id => "%{唯一id}"
        }
    }
    if [type] == "数据库表名2"{
        elasticsearch {
            hosts => ["els的host地址"]  
            index => "数据库表名2对应的els的index"  
            document_id => "%{唯一id}"
        }
    }
    stdout {   
        codec => json_lines  
    }  
}  

使用logstash同步mysql 多表数据到ElasticSearch实践

上一篇:spark利用sparkSQL将数据写入hive两种通用方式实现及比较


下一篇:Mysql 使用临时表比较数据差异以及 临时表的优化