diff --git a/examples/src/main/python/mllib/gradient_boosting_classification_example.py b/examples/src/main/python/mllib/gradient_boosting_classification_example.py index 9f2cd2ab3fee6..a94ea0d582e59 100644 --- a/examples/src/main/python/mllib/gradient_boosting_classification_example.py +++ b/examples/src/main/python/mllib/gradient_boosting_classification_example.py @@ -27,7 +27,9 @@ from pyspark.mllib.tree import GradientBoostedTrees, GradientBoostedTreesModel from pyspark.mllib.util import MLUtils # $example off$ + if __name__ == "__main__": + sc = SparkContext(appName="PythonGradientBoostedTreesClassificationExample") # $example on$ # Load and parse the data file. data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_libsvm_data.txt") diff --git a/examples/src/main/python/mllib/gradient_boosting_regression_example.py b/examples/src/main/python/mllib/gradient_boosting_regression_example.py index b2cba31b5ea37..86040799dc1d9 100644 --- a/examples/src/main/python/mllib/gradient_boosting_regression_example.py +++ b/examples/src/main/python/mllib/gradient_boosting_regression_example.py @@ -27,8 +27,9 @@ from pyspark.mllib.tree import GradientBoostedTrees, GradientBoostedTreesModel from pyspark.mllib.util import MLUtils # $example off$ -# $example off# + if __name__ == "__main__": + sc = SparkContext(appName="PythonGradientBoostedTreesRegressionExample") # $example on$ # Load and parse the data file. data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_libsvm_data.txt") @@ -44,8 +45,8 @@ # Evaluate model on test instances and compute test error predictions = model.predict(testData.map(lambda x: x.features)) labelsAndPredictions = testData.map(lambda lp: lp.label).zip(predictions) - testMSE = labelsAndPredictions.map(lambda (v, p): (v - p) * (v - p)).sum() / - float(testData.count()) + testMSE = labelsAndPredictions.map(lambda (v, p): (v - p) * (v - p)).sum() /\ + float(testData.count()) print('Test Mean Squared Error = ' + str(testMSE)) print('Learned regression GBT model:') print(model.toDebugString()) diff --git a/examples/src/main/python/mllib/random_forest_classification_example.py b/examples/src/main/python/mllib/random_forest_classification_example.py index b162e4d1f2cb4..324ba50625d25 100644 --- a/examples/src/main/python/mllib/random_forest_classification_example.py +++ b/examples/src/main/python/mllib/random_forest_classification_example.py @@ -27,7 +27,9 @@ from pyspark.mllib.tree import RandomForest, RandomForestModel from pyspark.mllib.util import MLUtils # $example off$ + if __name__ == "__main__": + sc = SparkContext(appName="PythonRandomForestClassificationExample") # $example on$ # Load and parse the data file into an RDD of LabeledPoint. data = MLUtils.loadLibSVMFile(sc, 'data/mllib/sample_libsvm_data.txt') diff --git a/examples/src/main/python/mllib/random_forest_regression_example.py b/examples/src/main/python/mllib/random_forest_regression_example.py index bdf7166e63c2b..f7aa6114eceb3 100644 --- a/examples/src/main/python/mllib/random_forest_regression_example.py +++ b/examples/src/main/python/mllib/random_forest_regression_example.py @@ -27,7 +27,9 @@ from pyspark.mllib.tree import RandomForest, RandomForestModel from pyspark.mllib.util import MLUtils # $example off$ + if __name__ == "__main__": + sc = SparkContext(appName="PythonRandomForestRegressionExample") # $example on$ # Load and parse the data file into an RDD of LabeledPoint. data = MLUtils.loadLibSVMFile(sc, 'data/mllib/sample_libsvm_data.txt') @@ -45,8 +47,8 @@ # Evaluate model on test instances and compute test error predictions = model.predict(testData.map(lambda x: x.features)) labelsAndPredictions = testData.map(lambda lp: lp.label).zip(predictions) - testMSE = labelsAndPredictions.map(lambda (v, p): (v - p) * (v - p)).sum() / - float(testData.count()) + testMSE = labelsAndPredictions.map(lambda (v, p): (v - p) * (v - p)).sum() /\ + float(testData.count()) print('Test Mean Squared Error = ' + str(testMSE)) print('Learned regression forest model:') print(model.toDebugString()) diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala index e2fb7e5e76891..139e1f909bdce 100644 --- a/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala @@ -27,7 +27,7 @@ import org.apache.spark.mllib.util.MLUtils // $example off$ object GradientBoostingClassificationExample { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("GradientBoostedTreesClassificationExample") val sc = new SparkContext(conf) // $example on$ diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala index 69aa3796ffc34..3dc86da8e4d2b 100644 --- a/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala @@ -27,7 +27,7 @@ import org.apache.spark.mllib.util.MLUtils // $example off$ object GradientBoostingRegressionExample { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("GradientBoostedTreesRegressionExample") val sc = new SparkContext(conf) // $example on$ diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala index 70cf0e845e02c..5e55abd5121c4 100644 --- a/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala @@ -26,7 +26,7 @@ import org.apache.spark.mllib.util.MLUtils // $example off$ object RandomForestClassificationExample { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("RandomForestClassificationExample") val sc = new SparkContext(conf) // $example on$ diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestRegressionExample.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestRegressionExample.scala index c0597d27557c2..a54fb3ab7e37a 100644 --- a/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestRegressionExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestRegressionExample.scala @@ -26,7 +26,7 @@ import org.apache.spark.mllib.util.MLUtils // $example off$ object RandomForestRegressionExample { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("RandomForestRegressionExample") val sc = new SparkContext(conf) // $example on$