export SCALA_HOME=/usr/local/Cellar/scala@2.11/2.11.12/ export PATH=.$PATH:$JAVA_HOME/bin:$MAVEN_HOME/bin:$MYSQL_HOME/bin:/usr/local/bin:$SCALA_HOME/bin
$ scala -version
Scala code runner version 2.11.12 -- Copyright 2002-2017, LAMP/EPFL
$ tar -zxf spark-2.1.0-bin-without-hadoop.tgz
配置spark环境变量
vim ~/.bash_profile
# 添加如下内容:
export SPARK_HOME=/Users/walker/software/spark/spark-2.1.0 export PATH=$PATH:$SPARK_HOME/bin
$ source ~/.bash_profile
spark-shell 启动报错:
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.fs.FSDataInputStream # 解决方案:
# vim spark-env.sh export SPARK_DIST_CLASSPATH=$(/Users/walker/software/hadoop/hadoop-2.7.1/bin/hadoop classpath)
原文:https://www.cnblogs.com/wooluwalker/p/12249867.html