From d1cad414ca4fbf88a410571277ad49a87c14cecc Mon Sep 17 00:00:00 2001
From: soleksy-splunk <143183665+soleksy-splunk@users.noreply.github.com>
Date: Tue, 21 Jan 2025 15:28:36 +0100
Subject: [PATCH] feat(dashboard): zero line when no data found (#1480)
**Issue number:**
https://splunk.atlassian.net/browse/ADDON-76195
### PR Type
**What kind of change does this PR introduce?**
* [x] Feature
* [ ] Bug Fix
* [ ] Refactoring (no functional or API changes)
* [ ] Documentation Update
* [ ] Maintenance (dependency updates, CI, etc.)
## Summary
Zero line is displayed when no data found for all dashboard charts,
except those in resource tab.
Pros:
- zero line chart displayed instead of information 'No data found"
Cons:
- it definitely will generate some costs due to it making some
computations to generate default zero data, but comparing to general
search operations it should be negligible, as even for whole previous
year it generates event every 5 min and locally it takes about 0,12 sec,
plus query is cached to 10 min, so if there aren't real time queries it
shouldn't generate more computations even after page refreshes.
Sum up:
Comparing to data search this operation shouldn't be a problem.
Some info regarding Smart Sources
(https://splunkui.splunk.com/Packages/dashboard-docs/Smartsources ).
```
enableSmartSourceDS: true,
```
it is a feature that enables referencing already finished search jobs
and its results inside next searches (here it is used to verify if there
are any results for table data, if not then default values are applied
if there are next code is ignored).
### Changes
For data charts where "No data found" message is displayed there is
displayed chart with zero line, simulating none of the events to appear.
### User experience
Please describe the user experience before and after this change.
Screenshots are welcome for additional context.
#### Overview before changes:
#### Overview after changes:
#### Data Ingestion tab before changes:

#### Data Ingestion tab after changes:
#### Error tab before changes:

#### Error tab after changes:
#### Tables for errors is filled with 0 data when data volume found but
number of events got no data.
#### Resource consumption tab after changes remains the same:
## Checklist
If an item doesn't apply to your changes, leave it unchecked.
* [ ] I have performed a self-review of this change according to the
[development
guidelines](https://splunk.github.io/addonfactory-ucc-generator/contributing/#development-guidelines)
* [ ] Tests have been added/modified to cover the changes [(testing
doc)](https://splunk.github.io/addonfactory-ucc-generator/contributing/#build-and-test)
* [ ] Changes are documented
* [x] PR title and description follows the [contributing
principles](https://splunk.github.io/addonfactory-ucc-generator/contributing/#pull-requests)
---
splunk_add_on_ucc_framework/dashboard.py | 92 ++++++++++++++++---
.../data_ingestion_tab_definition.json | 30 +++++-
.../templates/errors_tab_definition.json | 15 ++-
.../templates/overview_definition.json | 30 +++++-
.../data_ingestion_tab_definition.json | 44 +++++++--
.../errors_tab_definition.json | 17 +++-
.../expected_results/overview_definition.json | 32 ++++++-
ui/src/pages/Dashboard/DashboardModal.tsx | 3 +-
ui/src/pages/Dashboard/DataIngestion.tsx | 2 +
ui/src/pages/Dashboard/Error.tsx | 2 +
ui/src/pages/Dashboard/Overview.tsx | 2 +
ui/src/pages/Dashboard/consts.ts | 14 +++
ui/src/pages/Dashboard/dashboardStyle.css | 2 +-
13 files changed, 253 insertions(+), 32 deletions(-)
create mode 100644 ui/src/pages/Dashboard/consts.ts
diff --git a/splunk_add_on_ucc_framework/dashboard.py b/splunk_add_on_ucc_framework/dashboard.py
index 076429f63..c9ff85ba0 100644
--- a/splunk_add_on_ucc_framework/dashboard.py
+++ b/splunk_add_on_ucc_framework/dashboard.py
@@ -42,6 +42,14 @@
)
SUPPORTED_PANEL_NAMES_READABLE = ", ".join(SUPPORTED_PANEL_NAMES)
+# default sparkline with 0 values as text
+DEFAULT_SPARK_LINE = '\\"##__SPARKLINE__##,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\\"'
+
+# query to fill table cell chart with zero line if no data is available
+FILL_DEFAULT_SPARKLINE_AND_VALUE = (
+ f"| fillnull value={DEFAULT_SPARK_LINE} sparkevent | fillnull value=0 events "
+)
+
default_definition_json_filename = {
"overview": "overview_definition.json",
"data_ingestion_tab": "data_ingestion_tab_definition.json",
@@ -63,7 +71,37 @@
"| join _time [search index=_internal source=*{addon_name}* action=events_ingested "
'| timechart sum(n_events) as \\"Number of events\\" ]'
)
-errors_count = "index=_internal source=*{addon_name}* log_level IN ({log_lvl}) | timechart count as Errors by exc_l"
+
+errors_count = "index=_internal source=*{addon_name}* log_level IN ({log_lvl}) | timechart count as Errors by exc_l "
+
+# query generate data if there is 0 data in basic query
+# | head (${basic_query_token}:job.resultCount$==0)]" is used to check if there is 0 data in basic query
+# requires smart sources enabled,
+zero_line_search_query = (
+ "| append [ gentimes increment=5m [ makeresults "
+ "| eval start=strftime( "
+ 'if(\\"${time_token}.earliest$\\"=\\"now\\"'
+ ",now(),"
+ 'if(match(\\"${time_token}.earliest$\\",\\"^\\\\d+-\\\\d+-\\\\d+(T?\\\\d+:\\\\d+:\\\\d+(\\\\.\\\\d{{3}}Z)?)$\\"),'
+ 'strptime(\\"${time_token}.earliest$\\", \\"%Y-%m-%dT%H:%M:%S.%N\\")'
+ ',relative_time(now(), \\"${time_token}.earliest$\\")'
+ ")"
+ "), "
+ '\\"%m/%d/%Y:%T\\")'
+ "| eval end=strftime("
+ 'if(\\"${time_token}.latest$\\"=\\"now\\",'
+ "now(),"
+ 'if(match(\\"${time_token}.latest$\\",\\"^\\\\d+-\\\\d+-\\\\d+(T?\\\\d+:\\\\d+:\\\\d+(\\\\.\\\\d{{3}}Z)?)$\\"),'
+ 'strptime(\\"${time_token}.latest$\\", \\"%Y-%m-%dT%H:%M:%S.%N\\") '
+ ',relative_time(now(), \\"${time_token}.latest$\\")'
+ ")"
+ "), "
+ '\\"%m/%d/%Y:%T\\")'
+ "| return start end] "
+ "| eval {value_label} = 0 | fields - endhuman starthuman starttime "
+ "| rename endtime as _time | head (${basic_query_token}:job.resultCount$==0)]"
+)
+
events_count = (
"index=_internal source=*{addon_name}* action=events_ingested | "
'timechart sum(n_events) as \\"Number of events\\"'
@@ -71,23 +109,28 @@
table_sourcetype_query = (
"index=_internal source=*license_usage.log type=Usage ({determine_by} IN ({lic_usg_condition})) "
- "| stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st "
+ "| fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st "
"| join type=left st [search index = _internal source=*{addon_name}* action=events_ingested "
"| stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, "
"sum(n_events) as events by sourcetype_ingested "
- '| rename sourcetype_ingested as st ] | makemv delim=\\",\\" sparkevent '
+ "| rename sourcetype_ingested as st ] "
+ f"{FILL_DEFAULT_SPARKLINE_AND_VALUE}"
+ '| makemv delim=\\",\\" sparkevent '
'| eval \\"Last event\\" = strftime(le, \\"%e %b %Y %I:%M%p\\") '
'| table st, Bytes, sparkvolume, events, sparkevent, \\"Last event\\" '
'| rename st as \\"Source type\\", Bytes as \\"Data volume\\", events as \\"Number of events\\", '
'sparkvolume as \\"Volume trendline (Bytes)\\", sparkevent as \\"Event trendline\\"'
)
+
table_source_query = (
"index=_internal source=*license_usage.log type=Usage ({determine_by} IN ({lic_usg_condition})) "
- "| stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by s "
+ "| fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by s "
"| join type=left s [search index = _internal source=*{addon_name}* action=events_ingested "
"| stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, "
"sum(n_events) as events by modular_input_name "
- '| rename modular_input_name as s ] | makemv delim=\\",\\" sparkevent '
+ "| rename modular_input_name as s ] "
+ f"{FILL_DEFAULT_SPARKLINE_AND_VALUE}"
+ '| makemv delim=\\",\\" sparkevent '
'| eval \\"Last event\\" = strftime(le, \\"%e %b %Y %I:%M%p\\") '
'| table s, Bytes, sparkvolume, events, sparkevent, \\"Last event\\" '
'| rename s as \\"Source\\", Bytes as \\"Data volume\\", events as \\"Number of events\\", '
@@ -96,17 +139,19 @@
table_host_query = (
"index=_internal source=*license_usage.log type=Usage "
"({determine_by} IN ({lic_usg_condition})) "
- "| stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by h "
+ "| fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by h "
"| table h, Bytes, sparkvolume "
'| rename h as \\"Host\\", Bytes as \\"Data volume\\", sparkvolume as \\"Volume trendline (Bytes)\\"'
)
table_index_query = (
"index=_internal source=*license_usage.log type=Usage ({determine_by} IN ({lic_usg_condition})) "
- "| stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by idx "
+ "| fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by idx "
"| join type=left idx [search index = _internal source=*{addon_name}* action=events_ingested "
"| stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, "
"sum(n_events) as events by event_index "
- '| rename event_index as idx ] | makemv delim=\\",\\" sparkevent '
+ "| rename event_index as idx ] "
+ f"{FILL_DEFAULT_SPARKLINE_AND_VALUE}"
+ '| makemv delim=\\",\\" sparkevent '
'| eval \\"Last event\\" = strftime(le, \\"%e %b %Y %I:%M%p\\") '
'| table idx, Bytes, sparkvolume, events, sparkevent, \\"Last event\\" '
'| rename idx as \\"Index\\", Bytes as \\"Data volume\\", events as \\"Number of events\\", '
@@ -114,6 +159,7 @@
)
table_account_query = (
"index = _internal source=*{addon_name}* action=events_ingested "
+ "| fillnull value=0 n_events "
"| stats latest(_time) as le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_account "
'| eval \\"Last event\\" = strftime(le, \\"%e %b %Y %I:%M%p\\") '
'| table event_account, events, sparkevent, \\"Last event\\" '
@@ -127,7 +173,9 @@
'| table title, Active | rename title as \\"event_input\\" | join type=left event_input [ '
"search index = _internal source=*{addon_name_lowercase}* action=events_ingested "
"| stats latest(_time) as le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_input "
- '| eval \\"Last event\\" = strftime(le, \\"%e %b %Y %I:%M%p\\") ] | makemv delim=\\",\\" sparkevent '
+ '| eval \\"Last event\\" = strftime(le, \\"%e %b %Y %I:%M%p\\") ] '
+ f"{FILL_DEFAULT_SPARKLINE_AND_VALUE}"
+ '| makemv delim=\\",\\" sparkevent '
'| table event_input, Active, events, sparkevent, \\"Last event\\" '
'| rename event_input as \\"Input\\", events as \\"Number of events\\", sparkevent as \\"Event trendline\\"'
)
@@ -174,6 +222,16 @@ def generate_dashboard_content(
errors_count=errors_count.format(
addon_name=addon_name.lower(), log_lvl=error_panel_log_lvl
),
+ errors_count_zero_line=zero_line_search_query.format(
+ value_label="Errors",
+ basic_query_token="error_count",
+ time_token="overview_time",
+ ),
+ data_ingestion_and_events_zero_line=zero_line_search_query.format(
+ value_label="Number of events",
+ basic_query_token="data_volume",
+ time_token="overview_time",
+ ),
events_count=events_count.format(addon_name=addon_name.lower()),
)
)
@@ -186,10 +244,17 @@ def generate_dashboard_content(
data_ingestion=data_ingestion.format(
lic_usg_condition=lic_usg_condition, determine_by=determine_by
),
- errors_count=errors_count.format(
- addon_name=addon_name.lower(), log_lvl=error_panel_log_lvl
+ data_ingestion_volume_zero_line=zero_line_search_query.format(
+ value_label="Data volume",
+ basic_query_token="data_volume",
+ time_token="data_ingestion_time",
),
events_count=events_count.format(addon_name=addon_name.lower()),
+ data_ingestion_event_count_zero_line=zero_line_search_query.format(
+ value_label="Number of events",
+ basic_query_token="data_ingestion_events_count",
+ time_token="data_ingestion_time",
+ ),
table_sourcetype=table_sourcetype_query.format(
lic_usg_condition=lic_usg_condition,
addon_name=addon_name.lower(),
@@ -225,6 +290,11 @@ def generate_dashboard_content(
errors_count=errors_count.format(
addon_name=addon_name.lower(), log_lvl=error_panel_log_lvl
),
+ errors_count_tab_zero_line=zero_line_search_query.format(
+ value_label="Errors",
+ basic_query_token="error_count_tab",
+ time_token="errors_tab_time",
+ ),
errors_list=errors_list_query.format(
addon_name=addon_name.lower(), log_lvl=error_panel_log_lvl
),
diff --git a/splunk_add_on_ucc_framework/templates/data_ingestion_tab_definition.json b/splunk_add_on_ucc_framework/templates/data_ingestion_tab_definition.json
index 3528f4955..7e16e483e 100644
--- a/splunk_add_on_ucc_framework/templates/data_ingestion_tab_definition.json
+++ b/splunk_add_on_ucc_framework/templates/data_ingestion_tab_definition.json
@@ -46,7 +46,7 @@
},
"title": "Data volume",
"dataSources": {
- "primary": "data_ingestion_data_volume_ds"
+ "primary": "data_ingestion_data_volume_ds_chain"
}
},
"data_ingestion_events_count_viz": {
@@ -61,7 +61,7 @@
},
"title": "Number of events",
"dataSources": {
- "primary": "data_ingestion_events_count_ds"
+ "primary": "data_ingestion_events_count_ds_chain"
}
},
"data_ingestion_table_viz": {
@@ -136,6 +136,19 @@
"type": "ds.search",
"options": {
"query": "{{data_ingestion}}",
+ "queryParameters": {
+ "earliest": "$data_ingestion_time.earliest$",
+ "latest": "$data_ingestion_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "data_volume"
+ },
+ "data_ingestion_data_volume_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "data_ingestion_data_volume_ds",
+ "query": "{{data_ingestion_volume_zero_line}}",
"queryParameters": {
"earliest": "$data_ingestion_time.earliest$",
"latest": "$data_ingestion_time.latest$"
@@ -146,6 +159,19 @@
"type": "ds.search",
"options": {
"query": "{{events_count}}",
+ "queryParameters": {
+ "earliest": "$data_ingestion_time.earliest$",
+ "latest": "$data_ingestion_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "data_ingestion_events_count"
+ },
+ "data_ingestion_events_count_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "data_ingestion_events_count_ds",
+ "query": "{{data_ingestion_event_count_zero_line}}",
"queryParameters": {
"earliest": "$data_ingestion_time.earliest$",
"latest": "$data_ingestion_time.latest$"
diff --git a/splunk_add_on_ucc_framework/templates/errors_tab_definition.json b/splunk_add_on_ucc_framework/templates/errors_tab_definition.json
index e37605929..1c02b97e9 100644
--- a/splunk_add_on_ucc_framework/templates/errors_tab_definition.json
+++ b/splunk_add_on_ucc_framework/templates/errors_tab_definition.json
@@ -45,7 +45,7 @@
},
"title": "Errors count",
"dataSources": {
- "primary": "errors_tab_errors_count_ds"
+ "primary": "errors_tab_errors_count_ds_chain"
}
},
"errors_tab_errors_list_viz": {
@@ -81,6 +81,19 @@
"type": "ds.search",
"options": {
"query": "{{errors_count}}",
+ "queryParameters": {
+ "earliest": "$errors_tab_time.earliest$",
+ "latest": "$errors_tab_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "error_count_tab"
+ },
+ "errors_tab_errors_count_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "errors_tab_errors_count_ds",
+ "query": "{{errors_count_tab_zero_line}}",
"queryParameters": {
"earliest": "$errors_tab_time.earliest$",
"latest": "$errors_tab_time.latest$"
diff --git a/splunk_add_on_ucc_framework/templates/overview_definition.json b/splunk_add_on_ucc_framework/templates/overview_definition.json
index 935e62b1b..e409d2dce 100644
--- a/splunk_add_on_ucc_framework/templates/overview_definition.json
+++ b/splunk_add_on_ucc_framework/templates/overview_definition.json
@@ -62,7 +62,7 @@
},
"title": "Data ingestion",
"dataSources": {
- "primary": "overview_data_volume_ds"
+ "primary": "overview_data_volume_ds_chain"
}
},
"overview_errors_viz": {
@@ -74,7 +74,7 @@
},
"title": "Errors",
"dataSources": {
- "primary": "overview_errors_count_ds"
+ "primary": "overview_errors_count_ds_chain"
}
}
},
@@ -103,6 +103,19 @@
"type": "ds.search",
"options": {
"query": "{{data_ingestion_and_events}}",
+ "queryParameters": {
+ "earliest": "$overview_time.earliest$",
+ "latest": "$overview_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "data_volume"
+ },
+ "overview_data_volume_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "overview_data_volume_ds",
+ "query": "{{data_ingestion_and_events_zero_line}}",
"queryParameters": {
"earliest": "$overview_time.earliest$",
"latest": "$overview_time.latest$"
@@ -113,6 +126,19 @@
"type": "ds.search",
"options": {
"query": "{{errors_count}}",
+ "queryParameters": {
+ "earliest": "$overview_time.earliest$",
+ "latest": "$overview_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "error_count"
+ },
+ "overview_errors_count_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "overview_errors_count_ds",
+ "query": "{{errors_count_zero_line}}",
"queryParameters": {
"earliest": "$overview_time.earliest$",
"latest": "$overview_time.latest$"
diff --git a/tests/unit/expected_results/data_ingestion_tab_definition.json b/tests/unit/expected_results/data_ingestion_tab_definition.json
index ae470a03d..aa69c7f1a 100644
--- a/tests/unit/expected_results/data_ingestion_tab_definition.json
+++ b/tests/unit/expected_results/data_ingestion_tab_definition.json
@@ -46,7 +46,7 @@
},
"title": "Data volume",
"dataSources": {
- "primary": "data_ingestion_data_volume_ds"
+ "primary": "data_ingestion_data_volume_ds_chain"
}
},
"data_ingestion_events_count_viz": {
@@ -61,7 +61,7 @@
},
"title": "Number of events",
"dataSources": {
- "primary": "data_ingestion_events_count_ds"
+ "primary": "data_ingestion_events_count_ds_chain"
}
},
"data_ingestion_table_viz": {
@@ -136,6 +136,19 @@
"type": "ds.search",
"options": {
"query": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | timechart sum(b) as Usage | rename Usage as \"Data volume\"",
+ "queryParameters": {
+ "earliest": "$data_ingestion_time.earliest$",
+ "latest": "$data_ingestion_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "data_volume"
+ },
+ "data_ingestion_data_volume_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "data_ingestion_data_volume_ds",
+ "query": "| append [ gentimes increment=5m [ makeresults | eval start=strftime( if(\"$data_ingestion_time.earliest$\"=\"now\",now(),if(match(\"$data_ingestion_time.earliest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$data_ingestion_time.earliest$\", \"%Y-%m-%dT%H:%M:%S.%N\"),relative_time(now(), \"$data_ingestion_time.earliest$\"))), \"%m/%d/%Y:%T\")| eval end=strftime(if(\"$data_ingestion_time.latest$\"=\"now\",now(),if(match(\"$data_ingestion_time.latest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$data_ingestion_time.latest$\", \"%Y-%m-%dT%H:%M:%S.%N\") ,relative_time(now(), \"$data_ingestion_time.latest$\"))), \"%m/%d/%Y:%T\")| return start end] | eval Data volume = 0 | fields - endhuman starthuman starttime | rename endtime as _time | head ($data_volume:job.resultCount$==0)]",
"queryParameters": {
"earliest": "$data_ingestion_time.earliest$",
"latest": "$data_ingestion_time.latest$"
@@ -146,6 +159,19 @@
"type": "ds.search",
"options": {
"query": "index=_internal source=*splunk_ta_uccexample* action=events_ingested | timechart sum(n_events) as \"Number of events\"",
+ "queryParameters": {
+ "earliest": "$data_ingestion_time.earliest$",
+ "latest": "$data_ingestion_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "data_ingestion_events_count"
+ },
+ "data_ingestion_events_count_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "data_ingestion_events_count_ds",
+ "query": "| append [ gentimes increment=5m [ makeresults | eval start=strftime( if(\"$data_ingestion_time.earliest$\"=\"now\",now(),if(match(\"$data_ingestion_time.earliest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$data_ingestion_time.earliest$\", \"%Y-%m-%dT%H:%M:%S.%N\"),relative_time(now(), \"$data_ingestion_time.earliest$\"))), \"%m/%d/%Y:%T\")| eval end=strftime(if(\"$data_ingestion_time.latest$\"=\"now\",now(),if(match(\"$data_ingestion_time.latest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$data_ingestion_time.latest$\", \"%Y-%m-%dT%H:%M:%S.%N\") ,relative_time(now(), \"$data_ingestion_time.latest$\"))), \"%m/%d/%Y:%T\")| return start end] | eval Number of events = 0 | fields - endhuman starthuman starttime | rename endtime as _time | head ($data_ingestion_events_count:job.resultCount$==0)]",
"queryParameters": {
"earliest": "$data_ingestion_time.earliest$",
"latest": "$data_ingestion_time.latest$"
@@ -179,30 +205,30 @@
"items": [
{
"label": "Source type",
- "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
+ "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | fillnull value=\"##__SPARKLINE__##,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\" sparkevent | fillnull value=0 events | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
},
{
"label": "Source",
- "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by s | join type=left s [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by modular_input_name | rename modular_input_name as s ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table s, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename s as \"Source\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
+ "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by s | join type=left s [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by modular_input_name | rename modular_input_name as s ] | fillnull value=\"##__SPARKLINE__##,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\" sparkevent | fillnull value=0 events | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table s, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename s as \"Source\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
},
{
"label": "Host",
- "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by h | table h, Bytes, sparkvolume | rename h as \"Host\", Bytes as \"Data volume\", sparkvolume as \"Volume trendline (Bytes)\""
+ "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by h | table h, Bytes, sparkvolume | rename h as \"Host\", Bytes as \"Data volume\", sparkvolume as \"Volume trendline (Bytes)\""
},
{
"label": "Index",
- "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by idx | join type=left idx [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_index | rename event_index as idx ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table idx, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename idx as \"Index\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
+ "value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by idx | join type=left idx [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_index | rename event_index as idx ] | fillnull value=\"##__SPARKLINE__##,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\" sparkevent | fillnull value=0 events | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table idx, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename idx as \"Index\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
},
{
"label": "Account",
- "value": "index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) as le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_account | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table event_account, events, sparkevent, \"Last event\" | rename event_account as \"Account\", events as \"Number of events\", sparkevent as \"Event trendline\""
+ "value": "index = _internal source=*splunk_ta_uccexample* action=events_ingested | fillnull value=0 n_events | stats latest(_time) as le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_account | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table event_account, events, sparkevent, \"Last event\" | rename event_account as \"Account\", events as \"Number of events\", sparkevent as \"Event trendline\""
},
{
"label": "Input",
- "value": "| rest splunk_server=local /services/data/inputs/all | where $eai:acl.app$ = \"Splunk_TA_UCCExample\" | eval Active=if(lower(disabled) IN (\"1\", \"true\", \"t\"), \"no\", \"yes\") | table title, Active | rename title as \"event_input\" | join type=left event_input [ search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) as le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_input | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") ] | makemv delim=\",\" sparkevent | table event_input, Active, events, sparkevent, \"Last event\" | rename event_input as \"Input\", events as \"Number of events\", sparkevent as \"Event trendline\""
+ "value": "| rest splunk_server=local /services/data/inputs/all | where $eai:acl.app$ = \"Splunk_TA_UCCExample\" | eval Active=if(lower(disabled) IN (\"1\", \"true\", \"t\"), \"no\", \"yes\") | table title, Active | rename title as \"event_input\" | join type=left event_input [ search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) as le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_input | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") ] | fillnull value=\"##__SPARKLINE__##,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\" sparkevent | fillnull value=0 events | makemv delim=\",\" sparkevent | table event_input, Active, events, sparkevent, \"Last event\" | rename event_input as \"Input\", events as \"Number of events\", sparkevent as \"Event trendline\""
}
],
- "defaultValue": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\"",
+ "defaultValue": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | fillnull value=0 b | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | fillnull value=\"##__SPARKLINE__##,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0\" sparkevent | fillnull value=0 events | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\"",
"token": "table_view_by"
},
"title": "View by"
diff --git a/tests/unit/expected_results/errors_tab_definition.json b/tests/unit/expected_results/errors_tab_definition.json
index d0863e824..3c23590ad 100644
--- a/tests/unit/expected_results/errors_tab_definition.json
+++ b/tests/unit/expected_results/errors_tab_definition.json
@@ -45,7 +45,7 @@
},
"title": "Errors count",
"dataSources": {
- "primary": "errors_tab_errors_count_ds"
+ "primary": "errors_tab_errors_count_ds_chain"
}
},
"errors_tab_errors_list_viz": {
@@ -80,7 +80,20 @@
"errors_tab_errors_count_ds": {
"type": "ds.search",
"options": {
- "query": "index=_internal source=*splunk_ta_uccexample* log_level IN (ERROR, CRITICAL) | timechart count as Errors by exc_l",
+ "query": "index=_internal source=*splunk_ta_uccexample* log_level IN (ERROR, CRITICAL) | timechart count as Errors by exc_l ",
+ "queryParameters": {
+ "earliest": "$errors_tab_time.earliest$",
+ "latest": "$errors_tab_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "error_count_tab"
+ },
+ "errors_tab_errors_count_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "errors_tab_errors_count_ds",
+ "query": "| append [ gentimes increment=5m [ makeresults | eval start=strftime( if(\"$errors_tab_time.earliest$\"=\"now\",now(),if(match(\"$errors_tab_time.earliest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$errors_tab_time.earliest$\", \"%Y-%m-%dT%H:%M:%S.%N\"),relative_time(now(), \"$errors_tab_time.earliest$\"))), \"%m/%d/%Y:%T\")| eval end=strftime(if(\"$errors_tab_time.latest$\"=\"now\",now(),if(match(\"$errors_tab_time.latest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$errors_tab_time.latest$\", \"%Y-%m-%dT%H:%M:%S.%N\") ,relative_time(now(), \"$errors_tab_time.latest$\"))), \"%m/%d/%Y:%T\")| return start end] | eval Errors = 0 | fields - endhuman starthuman starttime | rename endtime as _time | head ($error_count_tab:job.resultCount$==0)]",
"queryParameters": {
"earliest": "$errors_tab_time.earliest$",
"latest": "$errors_tab_time.latest$"
diff --git a/tests/unit/expected_results/overview_definition.json b/tests/unit/expected_results/overview_definition.json
index 27bb620c4..be76046ad 100644
--- a/tests/unit/expected_results/overview_definition.json
+++ b/tests/unit/expected_results/overview_definition.json
@@ -62,7 +62,7 @@
},
"title": "Data ingestion",
"dataSources": {
- "primary": "overview_data_volume_ds"
+ "primary": "overview_data_volume_ds_chain"
}
},
"overview_errors_viz": {
@@ -74,7 +74,7 @@
},
"title": "Errors",
"dataSources": {
- "primary": "overview_errors_count_ds"
+ "primary": "overview_errors_count_ds_chain"
}
}
},
@@ -103,6 +103,19 @@
"type": "ds.search",
"options": {
"query": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | timechart sum(b) as Usage | rename Usage as \"Data volume\" | join _time [search index=_internal source=*splunk_ta_uccexample* action=events_ingested | timechart sum(n_events) as \"Number of events\" ]",
+ "queryParameters": {
+ "earliest": "$overview_time.earliest$",
+ "latest": "$overview_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "data_volume"
+ },
+ "overview_data_volume_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "overview_data_volume_ds",
+ "query": "| append [ gentimes increment=5m [ makeresults | eval start=strftime( if(\"$overview_time.earliest$\"=\"now\",now(),if(match(\"$overview_time.earliest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$overview_time.earliest$\", \"%Y-%m-%dT%H:%M:%S.%N\"),relative_time(now(), \"$overview_time.earliest$\"))), \"%m/%d/%Y:%T\")| eval end=strftime(if(\"$overview_time.latest$\"=\"now\",now(),if(match(\"$overview_time.latest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$overview_time.latest$\", \"%Y-%m-%dT%H:%M:%S.%N\") ,relative_time(now(), \"$overview_time.latest$\"))), \"%m/%d/%Y:%T\")| return start end] | eval Number of events = 0 | fields - endhuman starthuman starttime | rename endtime as _time | head ($data_volume:job.resultCount$==0)]",
"queryParameters": {
"earliest": "$overview_time.earliest$",
"latest": "$overview_time.latest$"
@@ -112,7 +125,20 @@
"overview_errors_count_ds": {
"type": "ds.search",
"options": {
- "query": "index=_internal source=*splunk_ta_uccexample* log_level IN (ERROR, CRITICAL) | timechart count as Errors by exc_l",
+ "query": "index=_internal source=*splunk_ta_uccexample* log_level IN (ERROR, CRITICAL) | timechart count as Errors by exc_l ",
+ "queryParameters": {
+ "earliest": "$overview_time.earliest$",
+ "latest": "$overview_time.latest$"
+ },
+ "enableSmartSources": true
+ },
+ "name": "error_count"
+ },
+ "overview_errors_count_ds_chain": {
+ "type": "ds.chain",
+ "options": {
+ "extend": "overview_errors_count_ds",
+ "query": "| append [ gentimes increment=5m [ makeresults | eval start=strftime( if(\"$overview_time.earliest$\"=\"now\",now(),if(match(\"$overview_time.earliest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$overview_time.earliest$\", \"%Y-%m-%dT%H:%M:%S.%N\"),relative_time(now(), \"$overview_time.earliest$\"))), \"%m/%d/%Y:%T\")| eval end=strftime(if(\"$overview_time.latest$\"=\"now\",now(),if(match(\"$overview_time.latest$\",\"^\\d+-\\d+-\\d+(T?\\d+:\\d+:\\d+(\\.\\d{3}Z)?)$\"),strptime(\"$overview_time.latest$\", \"%Y-%m-%dT%H:%M:%S.%N\") ,relative_time(now(), \"$overview_time.latest$\"))), \"%m/%d/%Y:%T\")| return start end] | eval Errors = 0 | fields - endhuman starthuman starttime | rename endtime as _time | head ($error_count:job.resultCount$==0)]",
"queryParameters": {
"earliest": "$overview_time.earliest$",
"latest": "$overview_time.latest$"
diff --git a/ui/src/pages/Dashboard/DashboardModal.tsx b/ui/src/pages/Dashboard/DashboardModal.tsx
index 96d72aaf2..30403f285 100644
--- a/ui/src/pages/Dashboard/DashboardModal.tsx
+++ b/ui/src/pages/Dashboard/DashboardModal.tsx
@@ -15,6 +15,7 @@ import {
queryMap,
} from './utils';
import { FieldValue, SearchResponse } from './DataIngestion.types';
+import { FEATURE_FLAGS } from './consts';
/**
* @param {object} props
@@ -196,7 +197,7 @@ export const DashboardModal = ({
}, []);
return dataIngestionModalDef ? (
-
+
= {
Input: 'Volume metrics are not available when the Input view is selected.',
@@ -153,6 +154,7 @@ export const DataIngestionDashboard = ({
preset={EnterpriseViewOnlyPreset}
initialDefinition={dashboardDefinition}
dashboardPlugin={dashboardPlugin}
+ featureFlags={FEATURE_FLAGS}
>
<>
<>
diff --git a/ui/src/pages/Dashboard/consts.ts b/ui/src/pages/Dashboard/consts.ts
new file mode 100644
index 000000000..e4c79af01
--- /dev/null
+++ b/ui/src/pages/Dashboard/consts.ts
@@ -0,0 +1,14 @@
+import { FeatureFlags, Preset } from '@splunk/dashboard-context';
+
+/**
+ * Feature flags for the dashboard.
+ *
+ * enableSmartSourceDS - enables to reference job search results in other searches
+ * here usage is, if 0 results then apply default data, if any results then ignore
+ * https://splunkui.splunk.com/Packages/dashboard-docs/Smartsources
+ *
+ */
+
+export const FEATURE_FLAGS = {
+ enableSmartSourceDS: true,
+} satisfies FeatureFlags & ConstructorParameters[0]['featureFlags'];
diff --git a/ui/src/pages/Dashboard/dashboardStyle.css b/ui/src/pages/Dashboard/dashboardStyle.css
index cc25f6011..11a760bf4 100644
--- a/ui/src/pages/Dashboard/dashboardStyle.css
+++ b/ui/src/pages/Dashboard/dashboardStyle.css
@@ -333,7 +333,7 @@
}
#errors_tab_errors_list_viz svg[data-test='placeholder-icon'] {
- max-height: 400px;
+ max-height: 100px;
}
[data-test-panel-id='dataIngestionTabPanel'] [data-test='grid-layout-canvas'] {