os.environ

赋值

ssh://root@192.168.2.51:22/usr/bin/python -u /home/data/tmp_test/trunk/personas/tmp_spark_mongo_relset_javahome.py
JAVA_HOME is not set
Traceback (most recent call last):
File "/home/data/tmp_test/trunk/personas/tmp_spark_mongo_relset_javahome.py", line 16, in <module>
.appName("PythonPi")\
File "/root/.local/lib/python3.6/site-packages/pyspark/sql/session.py", line 169, in getOrCreate
sc = SparkContext.getOrCreate(sparkConf)
File "/root/.local/lib/python3.6/site-packages/pyspark/context.py", line 334, in getOrCreate
SparkContext(conf=conf or SparkConf())
File "/root/.local/lib/python3.6/site-packages/pyspark/context.py", line 115, in __init__
SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
File "/root/.local/lib/python3.6/site-packages/pyspark/context.py", line 283, in _ensure_initialized
SparkContext._gateway = gateway or launch_gateway(conf)
File "/root/.local/lib/python3.6/site-packages/pyspark/java_gateway.py", line 95, in launch_gateway
raise Exception("Java gateway process exited before sending the driver its port number")
Exception: Java gateway process exited before sending the driver its port number

  


查源码

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# import atexit
import os
import sys
import select
import signal
import shlex
import socket
import platform
from subprocess import Popen, PIPE if sys.version >= '':
xrange = range from py4j.java_gateway import java_import, JavaGateway, GatewayClient
from pyspark.find_spark_home import _find_spark_home
from pyspark.serializers import read_int def launch_gateway(conf=None):
"""
launch jvm gateway
:param conf: spark configuration passed to spark-submit
:return:
"""
if "PYSPARK_GATEWAY_PORT" in os.environ:
gateway_port = int(os.environ["PYSPARK_GATEWAY_PORT"])
else:
SPARK_HOME = _find_spark_home()
# Launch the Py4j gateway using Spark's run command so that we pick up the
# proper classpath and settings from spark-env.sh
on_windows = platform.system() == "Windows"
script = "./bin/spark-submit.cmd" if on_windows else "./bin/spark-submit"
command = [os.path.join(SPARK_HOME, script)]
if conf:
for k, v in conf.getAll():
command += ['--conf', '%s=%s' % (k, v)]
submit_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
if os.environ.get("SPARK_TESTING"):
submit_args = ' '.join([
"--conf spark.ui.enabled=false",
submit_args
])
command = command + shlex.split(submit_args) # Start a socket that will be used by PythonGatewayServer to communicate its port to us
callback_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
callback_socket.bind(('127.0.0.1', ))
callback_socket.listen()
callback_host, callback_port = callback_socket.getsockname()
env = dict(os.environ)
env['_PYSPARK_DRIVER_CALLBACK_HOST'] = callback_host
env['_PYSPARK_DRIVER_CALLBACK_PORT'] = str(callback_port) # Launch the Java gateway.
# We open a pipe to stdin so that the Java gateway can die when the pipe is broken
if not on_windows:
# Don't send ctrl-c / SIGINT to the Java gateway:
def preexec_func():
signal.signal(signal.SIGINT, signal.SIG_IGN)
proc = Popen(command, stdin=PIPE, preexec_fn=preexec_func, env=env)
else:
# preexec_fn not supported on Windows
proc = Popen(command, stdin=PIPE, env=env) gateway_port = None
# We use select() here in order to avoid blocking indefinitely if the subprocess dies
# before connecting
while gateway_port is None and proc.poll() is None:
timeout = # (seconds)
readable, _, _ = select.select([callback_socket], [], [], timeout)
if callback_socket in readable:
gateway_connection = callback_socket.accept()[]
# Determine which ephemeral port the server started on:
gateway_port = read_int(gateway_connection.makefile(mode="rb"))
gateway_connection.close()
callback_socket.close()
if gateway_port is None:
raise Exception("Java gateway process exited before sending the driver its port number") # In Windows, ensure the Java child processes do not linger after Python has exited.
# In UNIX-based systems, the child process can kill itself on broken pipe (i.e. when
# the parent process' stdin sends an EOF). In Windows, however, this is not possible
# because java.lang.Process reads directly from the parent process' stdin, contending
# with any opportunity to read an EOF from the parent. Note that this is only best
# effort and will not take effect if the python process is violently terminated.
if on_windows:
# In Windows, the child process here is "spark-submit.cmd", not the JVM itself
# (because the UNIX "exec" command is not available). This means we cannot simply
# call proc.kill(), which kills only the "spark-submit.cmd" process but not the
# JVMs. Instead, we use "taskkill" with the tree-kill option "/t" to terminate all
# child processes in the tree (http://technet.microsoft.com/en-us/library/bb491009.aspx)
def killChild():
Popen(["cmd", "/c", "taskkill", "/f", "/t", "/pid", str(proc.pid)])
atexit.register(killChild) # Connect to the gateway
gateway = JavaGateway(GatewayClient(port=gateway_port), auto_convert=True) # Import the classes used by PySpark
java_import(gateway.jvm, "org.apache.spark.SparkConf")
java_import(gateway.jvm, "org.apache.spark.api.java.*")
java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "org.apache.spark.ml.python.*")
java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
# TODO(davies): move into sql
java_import(gateway.jvm, "org.apache.spark.sql.*")
java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
java_import(gateway.jvm, "scala.Tuple2") return gateway

解决pyspark-linux-windowsIDE JAVA_HOME not set的更多相关文章

  1. 解决Kali Linux没有声音

    解决Kali Linux没有声音   Kali Linux系统默认状态下,root用户是无法使用声卡的,也就没有声音.启用的方法如下: (1)在终端执行命令:systemctl --user enab ...

  2. 解决redhat linux下IP地址可以ping通,域名无法ping通问题

    解决redhat linux下IP地址可以ping通,域名无法ping通 在/etc/resolv.conf中添点东西 格式如下: nameserver xxx.xxx.xxx.xxx nameser ...

  3. 解决虚拟机linux端mysql数据库无法远程访问

    解决虚拟机linux端mysql数据库无法远程访问 1. 在控制台执行 mysql -u root -p mysql,CentOS系统提示输入数据库root用户的密码,输入完成后即进入mysql控制台 ...

  4. 解决在Linux下安装Oracle时的中文乱码问题

    本帖最后由 TsengYia 于 2012-2-22 17:06 编辑 解决在Linux下安装Oracle时的中文乱码问题 操作系统:Red Hat Enterprise Linux 6.1数据库:O ...

  5. 解决在linux环境下面不显示验证码的问题

    详见:http://blog.yemou.net/article/query/info/tytfjhfascvhzxcyt235 解决在linux环境下面不显示验证码的问题1.tomcat      ...

  6. 解决遇到Linux网络配置,从熟悉网络配置文件入手

    如果接触过Linux,网络配置是一个比较棘手的问题.但是Linux是文件为基础来构建的系统,包括我们windows中设备,Linux也视为文件.所以只要我们明白文件的作用.就能对Linux更加的熟悉, ...

  7. 终于解决了Linux下运行OCCI程序一直报Error while trying to retrieve text for error ORA-01804错误

    终于解决了Linux下运行OCCI程序一直报Error while trying to retrieve text for error ORA-01804错误 http://blog.csdn.net ...

  8. 快速解决Ubuntu/linux 环境下QT生成没有可执行文件(application/x-executable)

    快速解决Ubuntu/linux 环境下QT生成没有可执行文件(application/x-executable)(转载)   问题描述 与windows环境下不同,linux选择debug构建时并不 ...

  9. tomcat(不仅仅是tomcat)通过熵池解决在linux启动应用慢

    tomcat启动过程中报错 -Jul- ::] org.apache.catalina.startup.HostConfig.deployDirectory Deploying web applica ...

  10. 360:且用且珍惜!解决虚拟机linux启动缓慢以及ssh端卡顿的问题!

    优化软件以及杀毒软件想必大家都是用过的,小编自用的第一台电脑自带安装的是金山毒霸,随着时间的偏移渐渐用过小红伞,卡巴斯基,优化大师,鲁大师到后来的360优化杀毒套装,优化软件给大家带来了方便,尤其是上 ...

随机推荐

  1. 九度oj 题目1447:最短路

    题目描述: 在每年的校赛里,所有进入决赛的同学都会获得一件很漂亮的t-shirt.但是每当我们的工作人员把上百件的衣服从商店运回到赛场的时候,却是非常累的!所以现在他们想要寻找最短的从商店到赛场的路线 ...

  2. Codeforces 333E Summer Earnings ——Bitset

    [题目分析] 找一个边长最大的三元环. 把边排序,然后依次加入.加入(i,j)时,把i和j取一个交集,看看是否存在,存在就找到了最大的三元环. 输出即可,n^3/64水过. [代码] #include ...

  3. 【深度学习一】tensorflow安装

    一. 安装Anaconda https://mirrors.tuna.tsinghua.edu.cn/help/anaconda/ 二.安装tensorflow conda install --cha ...

  4. poj-2728Desert King(最优比率生成树)

    David the Great has just become the king of a desert country. To win the respect of his people, he d ...

  5. 网上找的一篇博文,原文搞错了,应该是\r\n,本文已改正!——回车CR和换行line feed

    "回车"(carriage return)和"换行"(line feed)与 ASCII表 关于“回车”(carriage return)和“换行”(line  ...

  6. sqlplus 命令 错误

    SP2-1503: 无法初始化 Oracle 调用界面 用管理员运行就可以了

  7. SQL行转列 (及EAV模型获取数据)

    参考文章: http://www.williamsang.com/archives/1508.html 情景简介 学校里面记录成绩,每个人的选课不一样,而且以后会添加课程,所以不需要把所有课程当作列. ...

  8. HDU4850 构造一个长度为n的串,要求任意长度为4的子串不相同

    n<=50W.(使用26个字母) 构造方法:26个,最多构造出26^4种不同的串,长度最长是26^4+3,大于是输出"impossble",用四维数组判重.每次向前构造一位( ...

  9. HDU - 5973 Game of Taking Stones (威佐夫博弈 高精度)

    题目描述: Two people face two piles of stones and make a game. They take turns to take stones. As game r ...

  10. Chrome常用URL命令(伪URL)

    在Chrome地址栏输入chrome://chrome-urls/可以看到所有的Chrome支持的伪RUL 1.chrome://accessibility/ 可达性分析,默认是关闭的,点击acces ...