Spark-class启动脚本解读
#!/usr/bin/env bash #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# # NOTE: Any changes to this file must be reflected in SparkSubmitDriverBootstrapper.scala! #判断是否是cygwin环境
cygwin=false
case "`uname`" in
CYGWIN*) cygwin=true;;
esac SCALA_VERSION=2.10 # Figure out where Spark is installed
#进去到SPark的安装目录
FWDIR="$(cd `dirname $0`/..; pwd)" # Export this as SPARK_HOME
# 生成SPARK_HOME环境变量
export SPARK_HOME="$FWDIR" #执行load-spark-env.sh脚本,主要功能为:
#执行spark-env.sh
#spark-env.sh的主要内容为一些程序过程中的配置和路径的环境变量
. $FWDIR/bin/load-spark-env.sh #如果没有参数的话执行以下内容
if [ -z "$1" ]; then
echo "Usage: spark-class <class> [<args>]" >&
exit
fi #如果SPARK_MEM不为null
if [ -n "$SPARK_MEM" ]; then
echo -e "Warning: SPARK_MEM is deprecated, please use a more specific config option" >&
echo -e "(e.g., spark.executor.memory or spark.driver.memory)." >&
fi # Use SPARK_MEM or 512m as the default memory, to be overridden by specific options
#默认SPARK_MEM的大小为512M
DEFAULT_MEM=${SPARK_MEM:-512m} SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.akka.logLifecycleEvents=true" #注意SPARK_DRIVER_MEMORY从spark-env.sh的配置文件中读取SPARK_DRIVER_MEMORY参数 # Add java opts and memory settings for master, worker, history server, executors, and repl.
case "$1" in
# Master, Worker, and HistoryServer use SPARK_DAEMON_JAVA_OPTS (and specific opts) + SPARK_DAEMON_MEMORY.
'org.apache.spark.deploy.master.Master')
OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_MASTER_OPTS"
OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
;;
'org.apache.spark.deploy.worker.Worker')
OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_WORKER_OPTS"
OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
;;
'org.apache.spark.deploy.history.HistoryServer')
OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_HISTORY_OPTS"
OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
;; # Executors use SPARK_JAVA_OPTS + SPARK_EXECUTOR_MEMORY.
'org.apache.spark.executor.CoarseGrainedExecutorBackend')
OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
OUR_JAVA_MEM=${SPARK_EXECUTOR_MEMORY:-$DEFAULT_MEM}
;;
'org.apache.spark.executor.MesosExecutorBackend')
OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
OUR_JAVA_MEM=${SPARK_EXECUTOR_MEMORY:-$DEFAULT_MEM}
;; # Spark submit uses SPARK_JAVA_OPTS + SPARK_SUBMIT_OPTS +
# SPARK_DRIVER_MEMORY + SPARK_SUBMIT_DRIVER_MEMORY.
'org.apache.spark.deploy.SparkSubmit')
OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_SUBMIT_OPTS"
OUR_JAVA_MEM=${SPARK_DRIVER_MEMORY:-$DEFAULT_MEM}
if [ -n "$SPARK_SUBMIT_LIBRARY_PATH" ]; then
OUR_JAVA_OPTS="$OUR_JAVA_OPTS -Djava.library.path=$SPARK_SUBMIT_LIBRARY_PATH"
fi
if [ -n "$SPARK_SUBMIT_DRIVER_MEMORY" ]; then
OUR_JAVA_MEM="$SPARK_SUBMIT_DRIVER_MEMORY"
fi
;; *)
OUR_JAVA_OPTS="$SPARK_JAVA_OPTS"
OUR_JAVA_MEM=${SPARK_DRIVER_MEMORY:-$DEFAULT_MEM}
;;
esac #找到java的安装目录 # Find the java binary
if [ -n "${JAVA_HOME}" ]; then
RUNNER="${JAVA_HOME}/bin/java"
else
if [ `command -v java` ]; then
RUNNER="java"
else
echo "JAVA_HOME is not set" >&
exit
fi
fi # Set JAVA_OPTS to be able to load native libraries and to set heap size
JAVA_OPTS="-XX:MaxPermSize=128m $OUR_JAVA_OPTS"
JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM" # Load extra JAVA_OPTS from conf/java-opts, if it exists
if [ -e "$FWDIR/conf/java-opts" ] ; then
JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
fi # Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala! TOOLS_DIR="$FWDIR"/tools SPARK_TOOLS_JAR=""
if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/spark-tools*[-9Tg].jar ]; then
# Use the JAR from the SBT build
export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/spark-tools*[-9Tg].jar`
fi
if [ -e "$TOOLS_DIR"/target/spark-tools*[-9Tg].jar ]; then
# Use the JAR from the Maven build
# TODO: this also needs to become an assembly!
export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[-9Tg].jar`
fi # Compute classpath using external script
classpath_output=$($FWDIR/bin/compute-classpath.sh)
if [[ "$?" != "" ]]; then
echo "$classpath_output"
exit
else
CLASSPATH=$classpath_output
fi if [[ "$1" =~ org.apache.spark.tools.* ]]; then
if test -z "$SPARK_TOOLS_JAR"; then
echo "Failed to find Spark Tools Jar in $FWDIR/tools/target/scala-$SCALA_VERSION/" >&
echo "You need to build spark before running $1." >&
exit
fi
CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
fi if $cygwin; then
CLASSPATH=`cygpath -wp $CLASSPATH`
if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR`
fi
fi
export CLASSPATH # In Spark submit client mode, the driver is launched in the same JVM as Spark submit itself.
# Here we must parse the properties file for relevant "spark.driver.*" configs before launching
# the driver JVM itself. Instead of handling this complexity in Bash, we launch a separate JVM
# to prepare the launch environment of this driver JVM. # 最终调用org.apache.spark.deploy.SparkSubmit类 if [ -n "$SPARK_SUBMIT_BOOTSTRAP_DRIVER" ]; then
# This is used only if the properties file actually contains these special configs
# Export the environment variables needed by SparkSubmitDriverBootstrapper
export RUNNER
export CLASSPATH
export JAVA_OPTS
export OUR_JAVA_MEM
export SPARK_CLASS=
shift # Ignore main class (org.apache.spark.deploy.SparkSubmit) and use our own
exec "$RUNNER" org.apache.spark.deploy.SparkSubmitDriverBootstrapper "$@"
else
# Note: The format of this command is closely echoed in SparkSubmitDriverBootstrapper.scala
if [ -n "$SPARK_PRINT_LAUNCH_COMMAND" ]; then
echo -n "Spark Command: " >&
echo "$RUNNER" #E:\Program Files\Java\jdk1..0_79/bin/java
echo "$CLASSPATH" #E:\cygwin64\home\hadoop2\hive\lib\mysql-connector-java-5.1.-bin.jar;E:\cygwin64\home\hadoop2\hive\conf\hive-site.xml;E:\cygwin64\home\hadoop2\spark-1.1.-bin-hadoop2.\lib\datanucleus-core-3.2..jar;E:\cygwin64\home\hadoop2\spark-1.1.-bin-hadoop2.\lib\datanucleus-api-jdo-3.2..jar;E:\cygwin64\home\hadoop2\spark-1.1.-bin-hadoop2.\lib\datanucleus-rdbms-3.2..jar;.;E:\cygwin64\usr\local\spark-1.1.-bin-hadoop2.\conf;E:\cygwin64\usr\local\spark-1.1.-bin-hadoop2.\lib\spark-assembly-1.1.-hadoop2.4.0.jar;E:\cygwin64\home\hadoop2\hadoop-2.5.\etc\hadoop\
echo $JAVA_OPTS #-XX:MaxPermSize=512m -Djline.terminal=unix -Xms2048M -Xmx2048M
echo "$@" #org.apache.spark.deploy.SparkSubmit --class org.apache.spark.repl.Main spark-shell
echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" >&
echo -e "========================================\n" >&
fi
exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
fi
用Client模式跑一下:

执行一个WordCount:

Spark-class启动脚本解读的更多相关文章
- Spark配置&启动脚本分析
本文档基于Spark2.0,对spark启动脚本进行分析. date:2016/8/3 author:wangxl Spark配置&启动脚本分析 我们主要关注3类文件,配置文件,启动脚本文件以 ...
- Spark-shell启动脚本解读
#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contrib ...
- Spark学习之路 (十五)SparkCore的源码解读(一)启动脚本
一.启动脚本分析 独立部署模式下,主要由master和slaves组成,master可以利用zk实现高可用性,其driver,work,app等信息可以持久化到zk上:slaves由一台至多台主机构成 ...
- spark的sparkUI如何解读?
spark的sparkUI如何解读? 以spark2.1.4来做例子 Job - schedule mode 进入之后默认是进入spark job 页面 这个说明有很详细的解释,spark有两种操作算 ...
- spark Master启动流程
spark Master是spark集群的首脑,负责资源调度,任务分配,负载平衡等功能 以下是master启动流程概述 通过shell进行对master进行启动 首先看一下启动脚本more start ...
- logstash服务启动脚本
logstash服务启动脚本 最近在弄ELK,发现logstash没有sysv类型的服务启动脚本,于是按照网上一个老外提供的模板自己进行修改 #添加用户 useradd logstash -M -s ...
- 改进uwsgi启动脚本,使其支持多个独立配置文件
最近在研究flask,在架设运行环境的时候犯了难.因为我想把每个独立的应用像NGINX处理多个网站那样,每个应用单独一个配置文件.而网上流传的uwsgi启动脚本都只支持单个配置文件.虽然有文章说可以把 ...
- linux nginx 启动脚本
linux nginx 启动脚本 [root@webtest76 ~]# vi /etc/init.d/nginx #!/bin/bash # nginx Startup script for the ...
- busybox rootfs 启动脚本分析(二)
上次分析了busybox的启动脚本,这次分析一下init.d中一些脚本的内容. 参考链接 http://www.cnblogs.com/helloworldtoyou/p/6169678.html h ...
随机推荐
- HttpWebRequest调用WebService后台需要Session信息问题的解决办法
今天在用HttpWebRequest调用后台ASP.NET 的WebService方法时遇到了一个问题,后台的WebService方法里使用到了Session对象中的用户信息,而Session对象中的 ...
- P2294 [HNOI2005]狡猾的商人
题目描述 输入输出格式 输入格式: 从文件input.txt中读入数据,文件第一行为一个正整数w,其中w < 100,表示有w组数据,即w个账本,需要你判断.每组数据的第一行为两个正整数n和m, ...
- GCC 中 -L、-rpath和-rpath-link的区别
GCC 中 -L.-rpath和-rpath-link的区别 来源 http://blog.csdn.net/q1302182594/article/details/42102961 关于这3个参数的 ...
- [poj] 1149 PIGS || 最大流经典题目
原题 题目大意 给你m个猪圈以及每个猪圈里原来有多少头猪,先后给你n个人,每个人能打开一些猪圈并且他们最多想买Ki头猪,在每一个人买完后能将打开的猪圈中的猪顺意分配在这次打开猪圈里,在下一个人来之前 ...
- BZOJ 2458: [BeiJing2011]最小三角形 | 平面分治
题目: 给出若干个点 求三个点构成的周长最小的三角形的周长(我们认为共线的三点也算三角形) 题解: 可以参考平面最近点对的做法 只不过合并的时候改成枚举三个点更新周长最小值,其他的和最近点对大同小异 ...
- android的apk文件结构
什么是APK?APK文件都由那些组成?不懂没关系,让小编来为你详细解答. 一.APK简介与描述 APK是AndroidPackage的缩写,即Android安装包(apk).APK是类似Symbian ...
- 【CZY选讲·最大子矩阵和】
题目描述 有一个n*m的矩阵,恰好改变其中一个数变成给定的常数P,使得改变后的这个矩阵的最大子矩阵最大. 数据范围 n,m<=300. 题解: ①如果没有p,那么二维矩阵和就是一维最长 ...
- 牛客网暑期ACM多校训练营(第十场)D Rikka with Prefix Sum (数学)
Rikka with Prefix Sum 题意: 给出一个数组a,一开始全为0,现在有三种操作: 1. 1 L R W,让区间[L,R]里面的数全都加上W: 2. 2 将a数组变为其前缀 ...
- Welcome to Workrave
Welcome to Workrave Workrave is a free program that assists in the recovery and prevention of Repeti ...
- 物理和虚拟兼容性RDM的区别
Difference between Physical compatibility RDMs and Virtual compatibility RDMs (2009226) Purpose This ...