FROM openjdk:8-jdk

ENV FLINK_VERSION=1.19.1
ENV FLINK_SHORT_VERSION=1.19
ENV DINKY_VERSION=1.2.0
ENV FLINK_HOME=/opt/flink
ENV DINKY_HOME=/opt/dinky

# 创建目录
RUN mkdir -p $FLINK_HOME \
    && mkdir -p $DINKY_HOME

# 安装Flink
RUN wget https://downloads.apache.org/flink/flink-${FLINK_VERSION}/flink-${FLINK_VERSION}-bin-scala_2.12.tgz \
    && tar -xvf flink-${FLINK_VERSION}-bin-scala_2.12.tgz -C $FLINK_HOME --strip-components=1 \
    && rm flink-${FLINK_VERSION}-bin-scala_2.12.tgz

# 安装Dinky
RUN wget https://gitee.com/DataLinkDC/dinky/releases/download/v${DINKY_VERSION}/dinky-release-${FLINK_SHORT_VERSION}-${DINKY_VERSION}.tar.gz \
    && tar -xvf dinky-release-${FLINK_SHORT_VERSION}-${DINKY_VERSION}.tar.gz -C $DINKY_HOME --strip-components=1 \
    && rm dinky-release-${FLINK_SHORT_VERSION}-${DINKY_VERSION}.tar.gz

# Dinky 并没有内置的 mysql/postgres 数据库驱动
RUN wget -O $DINKY_HOME/extends/mysql-connector-java-8.0.29.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.29/mysql-connector-java-8.0.29.jar \
    # yarn 的 per-job 与 application 执行模式依赖 flink-shaded-hadoop,可以先上传到Flink，再拷贝到Dinky路径
    && wget -O $DINKY_HOME/extends/flink-shaded-hadoop-3-uber-3.1.1.7.2.9.0-173-9.0.jar https://repository.cloudera.com/artifactory/cloudera-repos/org/apache/flink/flink-shaded-hadoop-3-uber/3.1.1.7.2.9.0-173-9.0/flink-shaded-hadoop-3-uber-3.1.1.7.2.9.0-173-9.0.jar \
    # flink 相关
    && wget -O $DINKY_HOME/extends/flink$FLINK_SHORT_VERSION/commons-cli-1.9.0.jar https://repo1.maven.org/maven2/commons-cli/commons-cli/1.9.0/commons-cli-1.9.0.jar \
    # wget -O $DINKY_HOME/extends/flink$FLINK_SHORT_VERSION/flink-table-planner_2.12-${FLINK_VERSION}.jar https://repo1.maven.org/maven2/org/apache/flink/flink-table-planner_2.12/${FLINK_VERSION}/flink-table-planner_2.12-${FLINK_VERSION}.jar
    && cp -r $FLINK_HOME/opt/flink-table-planner_2.12-${FLINK_VERSION}.jar $DINKY_HOME/extends/flink$FLINK_SHORT_VERSION/ \
    && cp -r $FLINK_HOME/lib/* $DINKY_HOME/extends/flink$FLINK_SHORT_VERSION/ \
    # !!!特别注意,flink自带lib里的planner是带loader的,比如:flink-table-planner-loader-1.16.1.jar,
    # 需要删除带loader的jar包，换一个不带loader的jar, 可前往flink根下的opt目录中找到
    # 对于standalone，hdfs，k8s等集群上的flink集群同样要做此操作
    && rm -rf $DINKY_HOME/extends/flink$FLINK_SHORT_VERSION/flink-table-planner-loader-${FLINK_VERSION}.jar


WORKDIR $DINKY_HOME

RUN mkdir /opt/dinky/customJar && chmod -R 777 $DINKY_HOME/ && sed -i 's/-Xms512M -Xmx2048M -XX:PermSize=512M/-XX:+UseContainerSupport -XX:InitialRAMPercentage=70.0 -XX:MaxRAMPercentage=70.0/g' ./bin/auto.sh

EXPOSE 8888

# 确保每个用户都可以使用JAVA_HOME
RUN echo 'export JAVA_HOME=/usr/local/openjdk-8' >> /etc/profile \
    && echo 'export PATH=$PATH:$JAVA_HOME/bin' >> /etc/profile

CMD  ./bin/auto.sh startOnPending
