SPARK1.6 分布式安装
安装的前提是 hadoop yarn 正在运行 开始安装
1、解压安装scala并配置环境变量
[hadoop@new-cdh9 soft]$ tar -zvxf scala-2.10.4.tgz
[hadoop@new-cdh13 ~]$ vi ~/.bash_profile
# .bash_profile
# Get the aliases and functions
if [ -f ~/.bashrc ]; then
. ~/.bashrc
fi
# User specific environment and startup programs
export SCALA_HOME=/hadoop/soft/scala-2.10.4/
export HADOOP_HOME=/hadoop/soft/hadoop-2.6.0-cdh5.7.0
#set zookeeper environment
export ZOOKEEPER_HOME=/hadoop/soft/zookeeper-3.4.5-cdh5.7.0
PATH=HOME/bin:ZOOKEEPER_HOME/conf:HADOOP_HOME/sbin:$SCALA_HOME/bin
export PATH
2、解压安装spark并配置环境变量
[hadoop@new-cdh13 soft]$ tar -zvxf spark-1.6.0-cdh5.7.0.tar.gz
[hadoop@new-cdh9 soft]$ vi ~/.bash_profile
# .bash_profile
# Get the aliases and functions
if [ -f ~/.bashrc ]; then
. ~/.bashrc
fi
# User specific environment and startup programs
export SCALA_HOME=/hadoop/soft/scala-2.10.4/
export SPARK_HOME=/hadoop/soft/spark-1.6.0-cdh5.7.0
export HBASE_HOME=/hadoop/soft/hbase-1.2.0-cdh5.7.0
export HADOOP_HOME=/hadoop/soft/hadoop-2.6.0-cdh5.7.0
PATH=HOME/bin:HADOOP_HOME/sbin:SCALA_HOME/bin:$SPARK_HOME/bin
export PATH
3、复制配置文件
[hadoop@new-cdh9 conf]$ cd /hadoop/soft/spark-1.6.0-cdh5.7.0/conf
[hadoop@new-cdh9 conf]$ cp spark-env.sh.template spark-env.sh
[hadoop@new-cdh9 conf]$ cp slaves.template slaves
4、修改slaves和spark-env.sh 并拷贝到其他节点
[hadoop@new-cdh9 conf]$ vi slaves
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the “License”); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A Spark Worker will be started on each of the machines listed below.
new-cdh9
new-cdh10
new-cdh11
new-cdh12
new-cdh13
[hadoop@new-cdh9 conf]$ vi spark-env.sh
#update by jingshuai
export SPARK_CONF_DIR=/hadoop/soft/spark-1.6.0-cdh5.7.0/conf
export SPARK_HOME=/hadoop/soft/spark-1.6.0-cdh5.7.0
export JAVA_HOME=/opt/jdk1.7.0_79
export SCALA_HOME=/hadoop/soft/scala-2.10.4/
export HADOOP_CONF_DIR=/hadoop/soft/hadoop-2.6.0-cdh5.7.0/etc/hadoop
export HADOOP_HOME=/hadoop/soft/hadoop-2.6.0-cdh5.7.0
### Path of Spark assembly jar in HDFS
export SPARK_JAR_HDFS_PATH=hdfs://familyha/user/spark/lib/spark-assembly-1.6.0-cdh5.7.0-hadoop2.6.0-cdh5.7.0.jar
SPARK_DIST_CLASSPATH=“SPARK_DIST_CLASSPATH:SPARK_DIST_CLASSPATH:SPARK_DIST_CLASSPATH:SPARK_DIST_CLASSPATH:SPARK_DIST_CLASSPATH:SPARK_DIST_CLASSPATH:SPARK_DIST_CLASSPATH:SPARK_DIST_CLASSPATH:$HADOOP_HOME/share/hadoop/tools/lib/*”
export SPARK_DIST_CLASSPATH
5、启动spark
[hadoop@new-cdh9 conf]$ …/sbin/start-all.sh
starting org.apache.spark.deploy.master.Master, logging to /hadoop/soft/spark-1.6.0-cdh5.7.0/logs/spark-hadoop-org.apache.spark.deploy.master.Master-1-new-cdh9.out
new-cdh9: starting org.apache.spark.deploy.worker.Worker, logging to /hadoop/soft/spark-1.6.0-cdh5.7.0/logs/spark-hadoop-org.apache.spark.deploy.worker.Worker-1-new-cdh9.out
new-cdh12: starting org.apache.spark.deploy.worker.Worker, logging to /hadoop/soft/spark-1.6.0-cdh5.7.0/logs/spark-hadoop-org.apache.spark.deploy.worker.Worker-1-new-cdh12.out
new-cdh13: starting org.apache.spark.deploy.worker.Worker, logging to /hadoop/soft/spark-1.6.0-cdh5.7.0/logs/spark-hadoop-org.apache.spark.deploy.worker.Worker-1-new-cdh13.out
new-cdh10: starting org.apache.spark.deploy.worker.Worker, logging to /hadoop/soft/spark-1.6.0-cdh5.7.0/logs/spark-hadoop-org.apache.spark.deploy.worker.Worker-1-new-cdh10.out
new-cdh11: starting org.apache.spark.deploy.worker.Worker, logging to /hadoop/soft/spark-1.6.0-cdh5.7.0/logs/spark-hadoop-org.apache.spark.deploy.worker.Worker-1-new-cdh11.out