#!/bin/bash
# 创建数据目录
MODULE_PATH=/opt/module/es/logstash
DOCKER_NAME=es-logstash
CONTAINER_PATH=/usr/share/logstash

# 删除数据目录及窗器
rm -rf $MODULE_PATH
if(( `docker ps -a | grep $DOCKER_NAME | wc -l` >0 ));then
    docker rm -f $DOCKER_NAME
fi
# config连接配置文件，pipeline连接管道，script插件安装脚本，mysql驱动，同步脚本
for folder in "config" "pipeline" "script" "mysql" "logs"; do
    if [ ! -d "$MODULE_PATH/$folder" ]; then
    mkdir -p $MODULE_PATH/$folder
    chmod 777 $MODULE_PATH/$folder
fi
done

# 下载mysql连接驱动包
URL=https://repo1.maven.org/maven2/mysql/mysql-connector-java/5.1.48/mysql-connector-java-5.1.48.jar
source ../linux/download_unzip.sh "es/logstash" $URL
IP=`/sbin/ifconfig -a|grep inet|grep -v 127.0.0.1|grep -v 172.17.0.1|grep -v 10.0.2.15|grep -v inet6|awk '{print $2}'|tr -d "addr:"`

#============== 调用本脚本时可用变量 ==============
#>>> $FILE_PATH 文件绝对路径：/opt/software/mysql-connector-java-5.1.48.jar
#>>> $FILE_EXTENSION 文件后缀：jar
#>>> $File_HOME 软件HOME：/opt/module/es/logstash/pipeline
#>>> $REPO_PATH 仓库目录：/opt/repository/es/logstash

# (1)mysql驱动包复制到挂载目录
cp $FILE_PATH $MODULE_PATH/mysql
# (2)配置日志文件输出
cat >$MODULE_PATH/config/log4j2.properties<<EOF
status = error
name = LogstashPropertiesConfig

appender.console.type = Console
appender.console.name = plain_console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n

appender.json_console.type = Console
appender.json_console.name = json_console
appender.json_console.layout.type = JSONLayout
appender.json_console.layout.compact = true
appender.json_console.layout.eventEol = true

appender.rolling.type = RollingFile
appender.rolling.name = plain_rolling
appender.rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log
appender.rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
appender.rolling.policies.type = Policies
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.rolling.policies.time.interval = 1
appender.rolling.policies.time.modulate = true
appender.rolling.layout.type = PatternLayout
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %-.10000m%n
appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
appender.rolling.policies.size.size = 100MB

appender.json_rolling.type = RollingFile
appender.json_rolling.name = json_rolling
appender.json_rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log
appender.json_rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
appender.json_rolling.policies.type = Policies
appender.json_rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.json_rolling.policies.time.interval = 1
appender.json_rolling.policies.time.modulate = true
appender.json_rolling.layout.type = JSONLayout
appender.json_rolling.layout.compact = true
appender.json_rolling.layout.eventEol = true
appender.json_rolling.policies.size.type = SizeBasedTriggeringPolicy
appender.json_rolling.policies.size.size = 100MB


rootLogger.level = ${sys:ls.log.level}
rootLogger.appenderRef.console.ref = ${sys:ls.log.format}_console
rootLogger.appenderRef.rolling.ref = ${sys:ls.log.format}_rolling

# Slowlog

appender.console_slowlog.type = Console
appender.console_slowlog.name = plain_console_slowlog
appender.console_slowlog.layout.type = PatternLayout
appender.console_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n

appender.json_console_slowlog.type = Console
appender.json_console_slowlog.name = json_console_slowlog
appender.json_console_slowlog.layout.type = JSONLayout
appender.json_console_slowlog.layout.compact = true
appender.json_console_slowlog.layout.eventEol = true

appender.rolling_slowlog.type = RollingFile
appender.rolling_slowlog.name = plain_rolling_slowlog
appender.rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}.log
appender.rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
appender.rolling_slowlog.policies.type = Policies
appender.rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
appender.rolling_slowlog.policies.time.interval = 1
appender.rolling_slowlog.policies.time.modulate = true
appender.rolling_slowlog.layout.type = PatternLayout
appender.rolling_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
appender.rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
appender.rolling_slowlog.policies.size.size = 100MB

appender.json_rolling_slowlog.type = RollingFile
appender.json_rolling_slowlog.name = json_rolling_slowlog
appender.json_rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}.log
appender.json_rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
appender.json_rolling_slowlog.policies.type = Policies
appender.json_rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
appender.json_rolling_slowlog.policies.time.interval = 1
appender.json_rolling_slowlog.policies.time.modulate = true
appender.json_rolling_slowlog.layout.type = JSONLayout
appender.json_rolling_slowlog.layout.compact = true
appender.json_rolling_slowlog.layout.eventEol = true
appender.json_rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
appender.json_rolling_slowlog.policies.size.size = 100MB

logger.slowlog.name = slowlog
logger.slowlog.level = trace
logger.slowlog.appenderRef.console_slowlog.ref = ${sys:ls.log.format}_console_slowlog
logger.slowlog.appenderRef.rolling_slowlog.ref = ${sys:ls.log.format}_rolling_slowlog
logger.slowlog.additivity = false

logger.licensereader.name = logstash.licensechecker.licensereader
logger.licensereader.level = error

EOF
# (3)es连接配置，配置（config/logstash.yml）
cat >$MODULE_PATH/config/logstash.yml<<EOF
http.host: "0.0.0.0"
xpack.monitoring.elasticsearch.url: http://$IP:9200
EOF

# (4)管道指定，连接配置（config/pipelines.yml）
cat >$MODULE_PATH/config/pipelines.yml<<EOF
# This file is where you define your pipelines. You can define multiple.
# For more information on multiple pipelines, see the documentation:
#   https://www.elastic.co/guide/en/logstash/current/multiple-pipelines.html
- pipeline.id: main
  path.config: "$CONTAINER_PATH/pipeline/mysql_logstash.config"
EOF

# (3)管道指定，连接配置（config/pipelines.yml）
cat >$MODULE_PATH/pipeline/mysql_logstash.config<<EOF
input {
    stdin { }   
}

output {
    stdout { codec => rubydebug }
}
EOF

cat >$MODULE_PATH/script/plugin_install.sh<<EOF
#!/bin/bash
# 输入mysql
$CONTAINER_PATH/bin/logstash-plugin install logstash-input-jdbc
# 输出到es
$CONTAINER_PATH/bin/logstash-plugin install logstash-output-elasticsearch
EOF

# exit
# 参考 未调试完。
# https://blog.csdn.net/chongshi4396/article/details/100805225
# https://blog.csdn.net/supercmd/article/details/91048042?depth_1-
# https://blog.csdn.net/wangyong20083344/article/details/91949036
# 启动容器
docker run -d \
    --privileged=true \
    -v /etc/localtime:/etc/localtime:ro \
    --name $DOCKER_NAME \
    -p 5044:5044 \
    -p 9600:9600 \
    -v $MODULE_PATH/config/log4j2.properties:$CONTAINER_PATH/config/log4j2.properties  \
    -v $MODULE_PATH/config/logstash.yml:$CONTAINER_PATH/config/logstash.yml  \
    -v $MODULE_PATH/config/pipelines.yml:$CONTAINER_PATH/config/pipelines.yml  \
    -v $MODULE_PATH/script:$CONTAINER_PATH/script  \
    -v $MODULE_PATH/logs:$CONTAINER_PATH/logs  \
    -v $MODULE_PATH/mysql:/usr/share/logstash/mysql  \
    -v $MODULE_PATH/pipeline/mysql_logstash.config:$CONTAINER_PATH/pipeline/mysql_logstash.config  \
    daocloud.io/library/logstash:6.5.4
echo "基础安装成功，下面要安装插件，并编写同步脚本"
exit

# 安装jdbc 输入插件
# bin/logstash-plugin install logstash-input-jdbc
# 安装es 输出插件
# bin/logstash-plugin install logstash-output-elasticsearch



# (4)库表连接，配置（pipeline/logstash.config）
cat >$MODULE_PATH/pipeline/mysql_logstash.config <<EOF
input {
	stdin { }
	jdbc {
		type => "jdbc"
		jdbc_connection_string => "jdbc:mysql://$IP/test?characterEncoding=UTF-8&autoReconnect=true"
		jdbc_user => "root"
		jdbc_password => "123456"
		jdbc_driver_library => "$File_HOME/mysql/mysql-connector-java-5.1.48.jar"
		jdbc_driver_class => "com.mysql.jdbc.Driver"
		connection_retry_attempts => "3"
		jdbc_validate_connection => "true"
		jdbc_validation_timeout => "3600"
		jdbc_paging_enabled => "true"
		jdbc_page_size => "500"
		statement => "SELECT id,name FROM test WHERE id >= :sql_last_value"
		lowercase_column_names => false
		sql_log_level => info
		record_last_run => true
		use_column_value => true
		tracking_column => "id"
		tracking_column_type => numeric
		last_run_metadata_path => "$File_HOME/mysql/last_id.txt"
		clean_run => false
		schedule => "* * * * *"
	}
}
output {
	stdout {
		codec => json_lines
	}
	elasticsearch {
		hosts => ["$IP:9200"]
		index => "test"
		document_type => "test"
		document_id => "%{id}"
	}
}
EOF




