diff --git a/bin/run-example b/bin/run-example index 5af95a08c6c41..b2999198a8d41 100755 --- a/bin/run-example +++ b/bin/run-example @@ -75,7 +75,6 @@ fi # Set JAVA_OPTS to be able to load native libraries and to set heap size JAVA_OPTS="$SPARK_JAVA_OPTS" -JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH" # Load extra JAVA_OPTS from conf/java-opts, if it exists if [ -e "$FWDIR/conf/java-opts" ] ; then JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`" diff --git a/bin/spark-class b/bin/spark-class index 1b0d309cc5b1c..ea97e3d7768a7 100755 --- a/bin/spark-class +++ b/bin/spark-class @@ -98,7 +98,6 @@ fi # Set JAVA_OPTS to be able to load native libraries and to set heap size JAVA_OPTS="$OUR_JAVA_OPTS" -JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH" JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM" # Load extra JAVA_OPTS from conf/java-opts, if it exists if [ -e "$FWDIR/conf/java-opts" ] ; then diff --git a/docs/configuration.md b/docs/configuration.md index cd9f1a2d2cf7f..642d4644b37b0 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -650,8 +650,9 @@ Apart from these, the following properties are also available, and may be useful spark.executor.extraJavaOptions (none) - A string of extra JVM options to pass to executors. For instance, GC settings. Note that - it is illegal to set Spark properties or heap size settings with this flag. + A string of extra JVM options to pass to executors. For instance, GC settings or custom + paths for native code. Note that it is illegal to set Spark properties or heap size + settings with this option. @@ -678,7 +679,6 @@ The following variables can be set in `spark-env.sh`: * `JAVA_HOME`, the location where Java is installed (if it's not on your default `PATH`) * `PYSPARK_PYTHON`, the Python binary to use for PySpark * `SPARK_LOCAL_IP`, to configure which IP address of the machine to bind to. -* `SPARK_LIBRARY_PATH`, to add search directories for native libraries. * `SPARK_CLASSPATH`, to add elements to Spark's classpath that you want to be present for _all_ applications. Note that applications can also add dependencies for themselves through `SparkContext.addJar` -- we recommend doing that when possible.