Skip to content

Commit

Permalink
[SPARK-4047] - Generate runtime warnings for example implementation o…
Browse files Browse the repository at this point in the history
…f PageRank
  • Loading branch information
varadharajan committed Oct 22, 2014
1 parent f05e09b commit 5c2bf54
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 0 deletions.
8 changes: 8 additions & 0 deletions examples/src/main/python/pagerank.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@
# limitations under the License.
#

"""
This is an example implementation of PageRank. For more conventional use,
Please refer to PageRank implementation provided by graphx
"""

import re
import sys
from operator import add
Expand All @@ -40,6 +45,9 @@ def parseNeighbors(urls):
print >> sys.stderr, "Usage: pagerank <file> <iterations>"
exit(-1)

print >> sys.stderr, """WARN: This is a naive implementation of PageRank and is
given as an example! Please refer to PageRank implementation provided by graphx"""

# Initialize the spark context.
sc = SparkContext(appName="PythonPageRank")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,28 @@ import org.apache.spark.{SparkConf, SparkContext}
* URL neighbor URL
* ...
* where URL and their neighbors are separated by space(s).
*
* This is an example implementation for learning how to use Spark. For more conventional use,
* please refer to org.apache.spark.graphx.lib.PageRank
*/
object SparkPageRank {

def showWarning() {
System.err.println(
"""WARN: This is a naive implementation of PageRank and is given as an example!
|Please use the PageRank implementation found in org.apache.spark.mllib.classification
|for more conventional use.
""".stripMargin)
}

def main(args: Array[String]) {
if (args.length < 1) {
System.err.println("Usage: SparkPageRank <file> <iter>")
System.exit(1)
}

showWarning()

val sparkConf = new SparkConf().setAppName("PageRank")
val iters = if (args.length > 0) args(1).toInt else 10
val ctx = new SparkContext(sparkConf)
Expand Down

0 comments on commit 5c2bf54

Please sign in to comment.