1.安裝Scala
1.1下載解壓安裝包
cd /usr/local
wget https://downloads.lightbend.com/scala/2.12.2/scala-2.12.2.tgz
tar -zxvf scala-2.12.2.tgz
mv scala-2.12.2 scala
1.2配置環境變量
vi /etc/profile
export SCALA_HOME= /usr/local/scala
export PATH= $PATH : $SCALA_HOME /bin
source /etc/profile
scala -version
2.下載解壓Spark安裝包
cd /usr/local
wget http://mirror.bit.edu.cn/apache/spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz
tar -zxvf spark-2.4.4-bin-hadoop2.7.tgz
mv spark-2.4.4-bin-hadoop2.7 spark
3.配置環境變量
sudo vi /etc/profile
export SPARK_HOME= /usr/local/spark
export PATH= $PATH : $SPARK_HOME /bin
source /etc/profile
4.配置Spark系統文件
cd spark/conf
mv spark-env.sh.template spark-env.sh
vi spark-env.sh
export JAVA_HOME= /usr/local/jdk1.8.0_231
export HADOOP_HOME= /usr/local/hadoop-2.7.1
export HADOOP_CONF_DIR= /usr/local/hadoop-2.7.1/etc/hadoop
export SPARK_MASTER_IP= dn1
export SPARK_WORKER_MEMORY= 1g
export SPARK_WORKER_COSER= 1
export SPARK_WORKER_INSTANCES= 1
mv slaves.template slaves
dn2
dn3
scp /etc/profile hadoop@dn2:/etc
scp /etc/profile hadoop@dn3:/etc
scp -r scala hadoop@dn2:/usr/local
scp -r scala hadoop@dn3:/usr/local
scp -r spark hadoop@dn2:/usr/local
scp -r spark hadoop@dn3:/usr/local
5.啓動集羣
cd /usr/local/spark/sbin
./start-all.sh
jps
http://192.168.65.77:8080/