#!/bin/bash

# Author: ALEX_2473 2473.top
# 虽然写得不咋地但是能用就行

HADOOP_VERSION=2.10.2
HIVE_VERSION=2.3.9
FLINK_VERSION=1.18.0
SPARK_VERSION=3.3.4

start_ssh_fn() {
    for node in node0 node1 node2; do
        docker exec -it "$node" /usr/sbin/sshd
    done
}

compose_cmd() {
    local compose_file="$1"
    case $2 in
        start) docker-compose -f "$compose_file" start ;;
        stop) docker-compose -f "$compose_file" stop ;;
        up) docker-compose -f "$compose_file" up -d ;;
        down) docker-compose -f "$compose_file" down ;;
        build) 
        if [[ $compose_file == *flink* ]]; then
            docker buildx build . -f DockerFile/flink.DockerFile -t flink:$FLINK_VERSION;
        elif [[ $compose_file == *spark* ]]; then
            docker buildx build . -f DockerFile/spark.DockerFile -t spark:$SPARK_VERSION;
        else 
        docker-compose -f "$compose_file" build 
        fi
         ;;
        *) echo unkonwn $2 ;;
    esac
}

case $1 in
    build)
        compose_cmd "Compose/compose-cluster.yml" stop
        docker buildx build . -f DockerFile/base.DockerFile -t hadoop_base:$HADOOP_VERSION
        ;;
    start)
        compose_cmd "Compose/compose-cluster.yml" start
        start_ssh_fn
        ;;
    stop)
        compose_cmd "Compose/compose-cluster.yml" stop
        ;;
    deploy)
        sudo rm -rf data/hadoop/data/node*/*
        sudo rm -rf data/hadoop/name/node*/*
        sudo rm -rf data/hadoop/tmp/node*/*
        compose_cmd "Compose/compose-cluster.yml" up
        start_ssh_fn
        ;;
    help)
echo "hadoop 容器 管理
cluster.sh [ build | start | stop | deploy | down]
组件管理
cluster.sh [ hive | flink | spark ] [ build | start | stop | up | down ]"
    ;;
    down)
        compose_cmd "Compose/compose-cluster.yml" down
        ;;
    hive)
        compose_cmd "Compose/compose-hive.yml" "$2"
        ;;
    spark)
        compose_cmd "Compose/compose-spark.yml" "$2"
        ;;
    flink)
        compose_cmd "Compose/compose-flink.yml" "$2"
        ;;
esac
