From 8fd5c7bc00b1104e4282959ec95b699955ded976 Mon Sep 17 00:00:00 2001
From: Andre Schumacher <schumach@icsi.berkeley.edu>
Date: Mon, 12 Aug 2013 18:00:35 -0700
Subject: [PATCH] Implementing SPARK-865: Add the equivalent of ADD_JARS to
 PySpark

Now ADD_FILES uses a comma as file name separator.
---
 python/pyspark/shell.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 54ff1bf8e7..c8297b662e 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -7,10 +7,15 @@ import os
 import pyspark
 from pyspark.context import SparkContext
 
+# this is the equivalent of ADD_JARS
+add_files = os.environ.get("ADD_FILES").split(',') if os.environ.get("ADD_FILES") != None else None
 
-sc = SparkContext(os.environ.get("MASTER", "local"), "PySparkShell")
+sc = SparkContext(os.environ.get("MASTER", "local"), "PySparkShell", pyFiles=add_files)
 print "Spark context avaiable as sc."
 
+if add_files != None:
+    print "Adding files: [%s]" % ", ".join(add_files)
+
 # The ./pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
 # which allows us to execute the user's PYTHONSTARTUP file:
 _pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
-- 
GitLab