#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# #
# Shell script for starting the Spark Shell REPL #判断是否为cygwin
cygwin=false
case "`uname`" in
CYGWIN*) cygwin=true;;
esac # Enter posix mode for bash
set -o posix ## Global script variables #进入到spark的安装目录
FWDIR="$(cd `dirname $0`/..; pwd)" #定义帮助信息的方法
#调用spark-submit的帮助信息,只是把submit以下帮助信息过滤掉
# Usage: spark-submit [options] <app jar | python file> [app arguments]
# Usage: spark-submit --kill [submission ID] --master [spark://...]
# Usage: spark-submit --status [submission ID] --master [spark://...] function usage() {
echo "Usage: ./bin/spark-shell [options]"
$FWDIR/bin/spark-submit --help >& | grep -v Usage >&
exit
} if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
fi #引用utils.sh脚本,脚本的功能为整理脚本参数、判断部分参数的合法性,给以下两个变量赋值
#SUBMISSION_OPTS:
#SUBMISSION_OPTS参数包括:
# K-V形式的有: --master | --deploy-mode | --class | --name | --jars | --py-files | --files | \
# --conf | --properties-file | --driver-memory | --driver-java-options | \
# --driver-library-path | --driver-class-path | --executor-memory | --driver-cores | \
# --total-executor-cores | --executor-cores | --queue | --num-executors | --archives
# 非K-V形式的有
# --verbose | -v | --supervise
# KV形式的需要对个数进行判断
#
#APPLICATION_OPTS参数包括除SUBMISSION_OPTS之外的参数
source $FWDIR/bin/utils.sh

#定义帮助信息方法的变量
SUBMIT_USAGE_FUNCTION=usage
#调用utils.sh脚本中的gatherSparkSubmitOpts方法。对参数进行整理
gatherSparkSubmitOpts "$@" #主函数,调用spark-submit --class org.apache.spark.repl.Main方法 function main() {
if $cygwin; then
# Workaround for issue involving JLine and Cygwin
# (see http://sourceforge.net/p/jline/bugs/40/).
# If you're using the Mintty terminal emulator in Cygwin, may need to set the
# "Backspace sends ^H" setting in "Keys" section of the Mintty options
# (see https://github.com/sbt/sbt/issues/562).
stty -icanon min -echo > /dev/null >&
export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"
stty icanon echo > /dev/null >&
else
export SPARK_SUBMIT_OPTS
$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"
fi
} # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
# binary distribution of Spark where Scala is not installed
exit_status=
saved_stty="" # restore stty settings (echo in particular)
function restoreSttySettings() {
stty $saved_stty
saved_stty=""
} function onExit() {
if [[ "$saved_stty" != "" ]]; then
restoreSttySettings
fi
exit $exit_status
} # to reenable echo if we are interrupted before completing.
trap onExit INT # save terminal settings
saved_stty=$(stty -g >/dev/null)
# clear on error so we don't later try to restore them
if [[ ! $? ]]; then
saved_stty=""
fi main "$@" # record the exit status lest it be overwritten:
# then reenable echo and propagate the code.
exit_status=$?
onExit

utils.sh脚本内容:

 #!/usr/bin/env bash

 #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# # Gather all all spark-submit options into SUBMISSION_OPTS
function gatherSparkSubmitOpts() { if [ -z "$SUBMIT_USAGE_FUNCTION" ]; then
echo "Function for printing usage of $0 is not set." >&
echo "Please set usage function to shell variable 'SUBMIT_USAGE_FUNCTION' in $0" >&
exit
fi # NOTE: If you add or remove spark-sumbmit options,
# modify NOT ONLY this script but also SparkSubmitArgument.scala
SUBMISSION_OPTS=()
APPLICATION_OPTS=()
while (($#)); do
case "$1" in
--master | --deploy-mode | --class | --name | --jars | --py-files | --files | \
--conf | --properties-file | --driver-memory | --driver-java-options | \
--driver-library-path | --driver-class-path | --executor-memory | --driver-cores | \
--total-executor-cores | --executor-cores | --queue | --num-executors | --archives)
if [[ $# -lt ]]; then
"$SUBMIT_USAGE_FUNCTION"
exit ;
fi
SUBMISSION_OPTS+=("$1"); shift
SUBMISSION_OPTS+=("$1"); shift
;; --verbose | -v | --supervise)
SUBMISSION_OPTS+=("$1"); shift
;; *)
APPLICATION_OPTS+=("$1"); shift
;;
esac
done export SUBMISSION_OPTS
export APPLICATION_OPTS
}

Spark-shell启动脚本解读的更多相关文章

  1. Spark配置&启动脚本分析

    本文档基于Spark2.0,对spark启动脚本进行分析. date:2016/8/3 author:wangxl Spark配置&启动脚本分析 我们主要关注3类文件,配置文件,启动脚本文件以 ...

  2. 一篇关于Maven项目的jar包Shell启动脚本

    使用Maven作为项目jar包依赖的管理,常常会遇到命令行启动,笔者也是哥菜鸟,在做微服务,以及服务器端开发的过程中,常常会遇到项目的启动需要使用main方法,笔者潜心的研究了很多博客,发现大多写的都 ...

  3. shell 启动脚本

    启动脚本是bash启动时自动执行的脚本.用户可以把一些环境变量的设置和alias.umask设置放在启动脚本中,这样每次启动Shell时这些设置都自动生效.思考一下,bash在执行启动脚本时是以for ...

  4. (转)mysql5.6.7多实例安装、配置的详细讲解分析及shell启动脚本的编写

    一.mysql安装 1.下载mysql数据库源码包: wget http://cdn.mysql.com/Downloads/MySQL-5.6/mysql-5.6.27.tar.gz 2.安装mys ...

  5. Spark Shell启动时遇到<console>:14: error: not found: value spark import spark.implicits._ <console>:14: error: not found: value spark import spark.sql错误的解决办法(图文详解)

    不多说,直接上干货! 最近,开始,进一步学习spark的最新版本.由原来经常使用的spark-1.6.1,现在来使用spark-2.2.0-bin-hadoop2.6.tgz. 前期博客 Spark ...

  6. Spark-class启动脚本解读

    #!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contrib ...

  7. Linux 下 Redis 服务 Shell启动脚本

    # chkconfig: 2345 10 90 # description: Start and Stop redis  PATH=/usr/local/bin:/sbin:/usr/bin:/bin ...

  8. Spark学习之路 (十五)SparkCore的源码解读(一)启动脚本

    一.启动脚本分析 独立部署模式下,主要由master和slaves组成,master可以利用zk实现高可用性,其driver,work,app等信息可以持久化到zk上:slaves由一台至多台主机构成 ...

  9. linux shell 之尝试编写 企业级 启动脚本

    企业Shell面试题10:开发企业级MySQL启动脚本 说明: MySQL启动命令为: 1 /bin/sh mysqld_safe --pid-file=$mysqld_pid_file_path 2 ...

随机推荐

  1. virt-install command

    安装 virt-install --connect qemu:///system \ --virt-type=kvm \ --name windows2008 --ram --vcpus --arch ...

  2. 个人支付宝监控并自动获取交易记录对接系统API

    我们都知道,支付宝支付API接口只有企业才能使用,但有一部分业务,可能我们不方便使用企业收款,但又想做到自动化,那怎么办呢 于是一个支付宝交易记录自动监控软件诞生了. 支付宝都有一个收款二维码,收款提 ...

  3. js 清除文本中的html标签

    text.replace(/<[^>]+>/g,"");

  4. FreeBSD NTP 简单使用

    FreeBSD NTP 简单使用 来源 https://blog.csdn.net/stevexk/article/details/1349506 1.ntptrace xxx.xxx.xxx.xxx ...

  5. [洛谷P4074][WC2013]糖果公园

    题目大意:给一棵$n$个节点的树,每个点有一个值$C_i$,每次询问一条路径$x->y$,求$\sum\limits_{c}val_c\times \sum\limits_{i=1}^{cnt_ ...

  6. BZOJ 1901: Zju2112 Dynamic Rankings | 带修改主席树

    题目: emmmm是个权限题 题解: 带修改主席树的板子题,核心思想是用树状数组维护动态前缀和的性质来支持修改 修改的时候修改类似树状数组一样进行logn个Insert 查询的时候同理,树状数组的方法 ...

  7. Debian中文字体安装

    默认装的英文办的debian7,看中国字不太美,这好办照着做吧 1. Setup locales #dpkg-reconfigure locales 选择 zh_CN GB2312 zh_CN.GBK ...

  8. 原生方法scrollTo支持滚动特效

    scrollTo默认的是瞬间滚动到坐标位置, 使用配置方法, behavior属性设置为smooth就可以支持平滑滚动了,不过这种方法兼容性不行,并且无法支持配置滚动速率 // 默认滚动效果,瞬时滚动 ...

  9. webpack最佳入门实践系列(5)

    9.路径相关 原来我们打包的东西都存放到了dist目录下,并没有进行分类存储,乱成一团,这一节我们就要处理一下打包的路径,让打包后的目录看起来更加优雅 9.1.代码准备 我们先建立起这样一个目录结构 ...

  10. Ubuntu系统用户与用户组

    1.查看用户组 vi /etc/group 结果说明: 组名: 组名是用户组的名称,由字母或数字构成.与/etc/passwd中的登录名一样,组名不应重复.   口令: 口令字段存放的是用户组加密后的 ...