diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala index f02cfa7587b4a..59c8a3047eaa6 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala @@ -27,11 +27,13 @@ import org.apache.spark.deploy.DeployMessages.{KillDriverResponse, MasterStateRe import org.apache.spark.deploy.JsonProtocol import org.apache.spark.deploy.StandaloneResourceUtils._ import org.apache.spark.deploy.master._ +import org.apache.spark.internal.config.UI.MASTER_UI_TITLE import org.apache.spark.ui.{UIUtils, WebUIPage} import org.apache.spark.util.Utils private[ui] class MasterPage(parent: MasterWebUI) extends WebUIPage("") { private val master = parent.masterEndpointRef + private val title = parent.master.conf.get(MASTER_UI_TITLE) private val jsonFieldPattern = "/json/([a-zA-Z]+).*".r def getMasterState: MasterStateResponse = { @@ -267,7 +269,7 @@ private[ui] class MasterPage(parent: MasterWebUI) extends WebUIPage("") { } ; - UIUtils.basicSparkPage(request, content, "Spark Master at " + state.uri) + UIUtils.basicSparkPage(request, content, title.getOrElse("Spark Master at " + state.uri)) } private def workerRow(showResourceColumn: Boolean): WorkerInfo => Seq[Node] = worker => { diff --git a/core/src/main/scala/org/apache/spark/internal/config/UI.scala b/core/src/main/scala/org/apache/spark/internal/config/UI.scala index 2ca92cbab06b2..917163f542262 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/UI.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/UI.scala @@ -238,6 +238,13 @@ private[spark] object UI { .checkValues(Set("ALLOW", "LOCAL", "DENY")) .createWithDefault("LOCAL") + val MASTER_UI_TITLE = ConfigBuilder("spark.master.ui.title") + .version("4.0.0") + .doc("Specifies the title of the Master UI page. If unset, `Spark Master at ` " + + "is used by default.") + .stringConf + .createOptional + val UI_SQL_GROUP_SUB_EXECUTION_ENABLED = ConfigBuilder("spark.ui.groupSQLSubExecutionEnabled") .doc("Whether to group sub executions together in SQL UI when they belong to the same " + "root execution")