Hi All,
I am working on pushing my single mysql data to Opendistro Elasticsearch using fluentd mysql-replicator plugin , But my fluentd throwing following error.
I have tried below opendistro certificates and mentioned those in fluentd config file but still getting same error. root-ca.pem,esnode.pem,esnode-key.pem,kirk.pem,kirk_key.pem
error_class=OpenSSL::SSL::SSLError error=“SSL_connect returned=1 errno=0 state=error: certificate verify failed (unable to get local issuer certificate)”
FluentD config file:
<source>
@type mysql_replicator
# Set connection settings for replicate source.
host xxx.xx.xx.xx
port 6036
username root
password xxxx
database testdb
# Set replicate query configuration.
query SELECT tutorial_id, tutorial_title from mysqles;
primary_key tutorial_id
interval 10s
# Enable detect deletion event not only insert/update events. (default: yes)
# It is useful to use `enable_delete no` that keep following recently updated record with this query.
# `SELECT * FROM search_test WHERE DATE_ADD(updated_at, INTERVAL 5 MINUTE) > NOW();`
enable_delete no
# Format output tag for each events. Placeholders usage as described below.
tag replicator.testdb.mysqles.${event}.${primary_key}
# ${event} : the variation of row event type by insert/update/delete.
# ${primary_key} : the value of `replicator_manager.settings.primary_key` in manager table.
</source>
<match replicator.**>
@type mysql_replicator_elasticsearch
# Set Elasticsearch connection.
host 172.16.23.6
port 9200
# You can configure to use SSL for connecting to Elasticsearch.
ssl true
# Basic authentication credentials can be configured
username xxxxx
password xxxxx
scheme https
ssl_version TLSv1_2
ca_file /fluentd/etc/certs/root-ca.pem
client_cert /fluentd/etc/certs/esnode.pem
client_key /fluentd/etc/certs/esnode-key.pem
kirk_file /fluentd/etc/certs/kirk.pem
kirk_key /fluentd/etc/certs/kirk_key.pem
kibana_key /fluentd/etc/certs/opendistroforelasticsearch.example.org.cert
# Set Elasticsearch index, type, and unique id (primary_key) from tag.
#tag_format (?<index_name>[^\.]+)\.(?<_doc>[^\.]+)\.(?<primary_key>[^\.]+)$
#tag_format (?<index_name>[^\.]+)\.(?<_doc>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
tag_format (?<index_name>[^\.]+)\.(?<type_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
# Set frequency of sending bulk request to Elasticsearch node.
flush_interval 5s
# Set maximum retry interval (required fluentd >= 0.10.41)
#max_retry_wait 1800
# Queued chunks are flushed at shutdown process.
# It's sample for td-agent. If you use Yamabiko, replace path from 'td-agent' to 'yamabiko'.
flush_at_shutdown yes
buffer_type file
</match>