#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# #
# Shell script for starting the Spark Shell REPL #判断是否为cygwin
cygwin=false
case "`uname`" in
CYGWIN*) cygwin=true;;
esac # Enter posix mode for bash
set -o posix ## Global script variables #进入到spark的安装目录
FWDIR="$(cd `dirname $0`/..; pwd)" #定义帮助信息的方法
#调用spark-submit的帮助信息,只是把submit以下帮助信息过滤掉
# Usage: spark-submit [options] <app jar | python file> [app arguments]
# Usage: spark-submit --kill [submission ID] --master [spark://...]
# Usage: spark-submit --status [submission ID] --master [spark://...] function usage() {
echo "Usage: ./bin/spark-shell [options]"
$FWDIR/bin/spark-submit --help >& | grep -v Usage >&
exit
} if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
fi #引用utils.sh脚本,脚本的功能为整理脚本参数、判断部分参数的合法性,给以下两个变量赋值
#SUBMISSION_OPTS:
#SUBMISSION_OPTS参数包括:
# K-V形式的有: --master | --deploy-mode | --class | --name | --jars | --py-files | --files | \
# --conf | --properties-file | --driver-memory | --driver-java-options | \
# --driver-library-path | --driver-class-path | --executor-memory | --driver-cores | \
# --total-executor-cores | --executor-cores | --queue | --num-executors | --archives
# 非K-V形式的有
# --verbose | -v | --supervise
# KV形式的需要对个数进行判断
#
#APPLICATION_OPTS参数包括除SUBMISSION_OPTS之外的参数
source $FWDIR/bin/utils.sh

#定义帮助信息方法的变量
SUBMIT_USAGE_FUNCTION=usage
#调用utils.sh脚本中的gatherSparkSubmitOpts方法。对参数进行整理
gatherSparkSubmitOpts "$@" #主函数,调用spark-submit --class org.apache.spark.repl.Main方法 function main() {
if $cygwin; then
# Workaround for issue involving JLine and Cygwin
# (see http://sourceforge.net/p/jline/bugs/40/).
# If you're using the Mintty terminal emulator in Cygwin, may need to set the
# "Backspace sends ^H" setting in "Keys" section of the Mintty options
# (see https://github.com/sbt/sbt/issues/562).
stty -icanon min -echo > /dev/null >&
export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"
stty icanon echo > /dev/null >&
else
export SPARK_SUBMIT_OPTS
$FWDIR/bin/spark-submit --class org.apache.spark.repl.Main "${SUBMISSION_OPTS[@]}" spark-shell "${APPLICATION_OPTS[@]}"
fi
} # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
# binary distribution of Spark where Scala is not installed
exit_status=
saved_stty="" # restore stty settings (echo in particular)
function restoreSttySettings() {
stty $saved_stty
saved_stty=""
} function onExit() {
if [[ "$saved_stty" != "" ]]; then
restoreSttySettings
fi
exit $exit_status
} # to reenable echo if we are interrupted before completing.
trap onExit INT # save terminal settings
saved_stty=$(stty -g >/dev/null)
# clear on error so we don't later try to restore them
if [[ ! $? ]]; then
saved_stty=""
fi main "$@" # record the exit status lest it be overwritten:
# then reenable echo and propagate the code.
exit_status=$?
onExit

utils.sh脚本内容:

 #!/usr/bin/env bash

 #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# # Gather all all spark-submit options into SUBMISSION_OPTS
function gatherSparkSubmitOpts() { if [ -z "$SUBMIT_USAGE_FUNCTION" ]; then
echo "Function for printing usage of $0 is not set." >&
echo "Please set usage function to shell variable 'SUBMIT_USAGE_FUNCTION' in $0" >&
exit
fi # NOTE: If you add or remove spark-sumbmit options,
# modify NOT ONLY this script but also SparkSubmitArgument.scala
SUBMISSION_OPTS=()
APPLICATION_OPTS=()
while (($#)); do
case "$1" in
--master | --deploy-mode | --class | --name | --jars | --py-files | --files | \
--conf | --properties-file | --driver-memory | --driver-java-options | \
--driver-library-path | --driver-class-path | --executor-memory | --driver-cores | \
--total-executor-cores | --executor-cores | --queue | --num-executors | --archives)
if [[ $# -lt ]]; then
"$SUBMIT_USAGE_FUNCTION"
exit ;
fi
SUBMISSION_OPTS+=("$1"); shift
SUBMISSION_OPTS+=("$1"); shift
;; --verbose | -v | --supervise)
SUBMISSION_OPTS+=("$1"); shift
;; *)
APPLICATION_OPTS+=("$1"); shift
;;
esac
done export SUBMISSION_OPTS
export APPLICATION_OPTS
}

Spark-shell启动脚本解读的更多相关文章

  1. Spark配置&启动脚本分析

    本文档基于Spark2.0,对spark启动脚本进行分析. date:2016/8/3 author:wangxl Spark配置&启动脚本分析 我们主要关注3类文件,配置文件,启动脚本文件以 ...

  2. 一篇关于Maven项目的jar包Shell启动脚本

    使用Maven作为项目jar包依赖的管理,常常会遇到命令行启动,笔者也是哥菜鸟,在做微服务,以及服务器端开发的过程中,常常会遇到项目的启动需要使用main方法,笔者潜心的研究了很多博客,发现大多写的都 ...

  3. shell 启动脚本

    启动脚本是bash启动时自动执行的脚本.用户可以把一些环境变量的设置和alias.umask设置放在启动脚本中,这样每次启动Shell时这些设置都自动生效.思考一下,bash在执行启动脚本时是以for ...

  4. (转)mysql5.6.7多实例安装、配置的详细讲解分析及shell启动脚本的编写

    一.mysql安装 1.下载mysql数据库源码包: wget http://cdn.mysql.com/Downloads/MySQL-5.6/mysql-5.6.27.tar.gz 2.安装mys ...

  5. Spark Shell启动时遇到<console>:14: error: not found: value spark import spark.implicits._ <console>:14: error: not found: value spark import spark.sql错误的解决办法(图文详解)

    不多说,直接上干货! 最近,开始,进一步学习spark的最新版本.由原来经常使用的spark-1.6.1,现在来使用spark-2.2.0-bin-hadoop2.6.tgz. 前期博客 Spark ...

  6. Spark-class启动脚本解读

    #!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one or more # contrib ...

  7. Linux 下 Redis 服务 Shell启动脚本

    # chkconfig: 2345 10 90 # description: Start and Stop redis  PATH=/usr/local/bin:/sbin:/usr/bin:/bin ...

  8. Spark学习之路 (十五)SparkCore的源码解读(一)启动脚本

    一.启动脚本分析 独立部署模式下,主要由master和slaves组成,master可以利用zk实现高可用性,其driver,work,app等信息可以持久化到zk上:slaves由一台至多台主机构成 ...

  9. linux shell 之尝试编写 企业级 启动脚本

    企业Shell面试题10:开发企业级MySQL启动脚本 说明: MySQL启动命令为: 1 /bin/sh mysqld_safe --pid-file=$mysqld_pid_file_path 2 ...

随机推荐

  1. sshd_config_for_centos

    # $OpenBSD: sshd_config,v // :: djm Exp $ # This is the sshd server system-wide configuration file. ...

  2. CodeForces-1061D TV Shows

    题目链接 https://vjudge.net/problem/CodeForces-1061D 题面 Description There are nn TV shows you want to wa ...

  3. java细节篇(==和equals的区别)

    1)当==两边是对象时(String,Integer...),两边比的都是地址2)当==两边是基本类型时(int,float),两边比的都是值3)默认equals比的是对象的地址,但是重写的话可以改变 ...

  4. HDU 4346 The Beautiful Road ( 反向考虑 思路题 )

    考虑对立情况,不漂亮的串的形式必然为GGGGR……R……RGGGG 相邻R之间的距离为奇数且相等. #include <cstdio> #include <cstring> # ...

  5. 【python基础】--常用数据结构

    list tuple dict set四种常用数据结构 list list 有序的集合,可以随时添加.删除其中元素值; 支持list嵌套模式, >>> p = ['a','b']&g ...

  6. Android记事本07

    昨天: activity横竖屏切换的生命周期 今天: Anr异常的原因和解决方案 遇到的问题: 无.

  7. 锚点自适应 map area coords

    最近做MOBILE的HTML5开发,人体图和页面一样,需要自适应不同的手机屏幕,蛋疼的是coords里面的标记是固定的,图片自适应后,锚点的标记就会产生空白区域,看了下https://github.c ...

  8. IDEA 14注册码

    用户: share密钥:78689-AFOCD-P3SDN-83DEC-BQ3UC-V6UK7用户: for密钥:13768-8VXX0-YL2BG-YBD88-2M3HN-CAOQ5用户: you密 ...

  9. BZOJ4032 [HEOI2015]最短不公共子串 【后缀自动机 + 序列自动机 + dp】

    题目链接 BZOJ4032 题解 首先膜\(hb\) 空手切神题 一问\(hash\),二问枚举 三问\(trie\)树,四问\(dp\) 南二巨佬神\(hb\) 空手吊打自动机 \(orz orz ...

  10. 那些神奇的DP建模

    (1). 迎接仪式 思路:性质,状态1拆为2,进行匹配 (2). 数字序列 思路:转换DP方程,玄学 (3). 序列分割 思路:性质,斜率优化 (4). 经营与开发 思路:倒序,秦久韶公式 (5). ...