0. spark 下载解压
wget http://mirrors.cnnic.cn/apache/spark/spark-2.0.1/spark-2.0.1-bin-hadoop2.6.tgz
tar -zxvf spark-2.0.1-bin-hadoop2.6.tgz
1. scala配置 https://www.scala-lang.org/download/2.11.8.html
vi /etc/profile
export SCALA_HOME=/home/hadoop3/scala-2.11.7
export PATH=$PATH:$SCALA_HOME/bin
2. jdk配置
vi /etc/profile
export JAVA_HOME=/opt/java/jdk1.8.0_121
export CLASS_PATH=.:${JAVA_HOME}/lib:$CLASS_PATH
export PATH=.:${JAVA_HOME}/bin: ${SCALA_HOME}/bin:$PATH
3. spark_home 配置
vi /etc/profile
# Spark Eenvironment Variables
export SPARK_HOME=/opt/spark-2.0.1-bin-hadoop2.6
export PATH=$SPARK_HOME/bin:$PATH
export PATH=$SPARK_HOME/sbin:$PATH
source /etc/profile #以上配置生效
4. spark-env.sh 配置
cp spark-env.sh.template spark-env.sh
vi spark-env.h
export SCALA_HOME=/opt/scala/scala-2.12.2
export JAVA_HOME=/opt/java/jdk1.8.0_121
export SPARK_HOME=/opt/spark/spark-2.1.1-bin-hadoop2.7
export SPARK_MASTER_IP=spark01
export SPARK_EXECUTOR_MEMORY=1G
5. slaves 配置
cp slaves.template slaves
vi slaves
spark01
6. 启动测试 测试:
./bin/run-example SparkPi 10
shell启动:
./bin/spark-shell
spark启动:
mv start-all.sh spark-start-all.sh
mv stop-all.sh spark-stop-all.sh
spark-start-all.sh
web访问: spark01:8080