1.下載spark2.4.4及解壓文件
wget https://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-2.4.4/spark-2.4.4-bin-without-hadoop-scala-2.12.tgz
tar -zxvf spark-2.4.4/spark-2.4.4-bin-without-hadoop-scala-2.12.tgz -C /usr/local/spark
2.配置spark環境變量
vim ~/.bash_profile
#添加spark環境變量
export SPARK_HOME=/usr/local/spark
export PATH=$PATH:$SPARK_HOME/bin
3.配置spark
#進入spark的conf目錄
cp spark-env.sh.template spark-env.sh
vim spark-env.sh
#添加如下內容
export SPARK_MASTER_IP=192.168.100.200
export HADOOP_HOME=/usr/local/hadoop
export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
export SPARK_DIST_CLASSPATH=$(/usr/local/hadoop/bin/hadoop classpath) #不添加,就會報錯
#修改slaves
cp slaves.template slaves
vim slaves
#添加如下內容
node2
node3
4.將spark複製到從機
scp -r /usr/local/spark root@node2:/usr/local/
scp -r /usr/local/spark root@node3:/usr/local/
5.啓動spark
#進入spark文件夾
sbin/start-all.sh
【備註】本文配置都是基於已經配置完成hadoop後完成的,可參考