From 134bdc9eae09aaf61f0e851d1de44de1c4da4f73 Mon Sep 17 00:00:00 2001 From: AbhishekKr Date: Sun, 13 Apr 2014 13:40:47 +0530 Subject: [PATCH] pyspark require Python2, failing if system default is Py3 from shell.py --- python/pyspark/shell.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py index 61613dbed8dce..e8ba050655a30 100644 --- a/python/pyspark/shell.py +++ b/python/pyspark/shell.py @@ -20,6 +20,14 @@ This file is designed to be launched as a PYTHONSTARTUP script. """ + +import sys +if sys.version_info.major != 2: + print("Error: Default Python used is Python%s" % sys.version_info.major) + print("\tSet env variable PYSPARK_PYTHON to Python2 binary and re-run it.") + sys.exit(1) + + import os import platform import pyspark @@ -34,21 +42,21 @@ sc = SparkContext(os.environ.get("MASTER", "local[*]"), "PySparkShell", pyFiles=add_files) -print """Welcome to +print("""Welcome to ____ __ / __/__ ___ _____/ /__ _\ \/ _ \/ _ `/ __/ '_/ /__ / .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT /_/ -""" -print "Using Python version %s (%s, %s)" % ( +""") +print("Using Python version %s (%s, %s)" % ( platform.python_version(), platform.python_build()[0], - platform.python_build()[1]) -print "Spark context available as sc." + platform.python_build()[1])) + print("Spark context available as sc.") if add_files != None: - print "Adding files: [%s]" % ", ".join(add_files) + print("Adding files: [%s]" % ", ".join(add_files)) # The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP, # which allows us to execute the user's PYTHONSTARTUP file: