
docker exec -it hadoop-node1 /usr/local/hadoop-2.8.5/start-hadoop.sh
docker exec -it hadoop-node1 /usr/local/spark-3.0.1-bin-hadoop2.7/sbin/start-master.sh
docker exec -it hadoop-node2 /usr/local/spark-3.0.1-bin-hadoop2.7/sbin/start-slave.sh spark://hadoop-node1:7077
docker exec -it hadoop-node3 /usr/local/spark-3.0.1-bin-hadoop2.7/sbin/start-slave.sh spark://hadoop-node1:7077


##原因就是节点启动太多，承受不了，spark只能启动两个