Skip to content

Commit

Permalink
test: fix
Browse files Browse the repository at this point in the history
  • Loading branch information
soleksy-splunk committed Nov 24, 2024
1 parent 11095ea commit 278b327
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
12 changes: 6 additions & 6 deletions tests/unit/expected_results/data_ingestion_tab_definition.json
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@
"data_ingestion_data_volume_ds": {
"type": "ds.search",
"options": {
"query": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*,example_input_three*,example_input_four*,service_hidden_for_cloud*,service_hidden_for_enterprise*)) | timechart sum(b) as Usage | rename Usage as \"Data volume\"",
"query": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | timechart sum(b) as Usage | rename Usage as \"Data volume\"",
"queryParameters": {
"earliest": "$data_ingestion_time.earliest$",
"latest": "$data_ingestion_time.latest$"
Expand Down Expand Up @@ -205,19 +205,19 @@
"items": [
{
"label": "Source type",
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*,example_input_three*,example_input_four*,service_hidden_for_cloud*,service_hidden_for_enterprise*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
},
{
"label": "Source",
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*,example_input_three*,example_input_four*,service_hidden_for_cloud*,service_hidden_for_enterprise*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by s | join type=left s [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by modular_input_name | rename modular_input_name as s ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table s, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename s as \"Source\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by s | join type=left s [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by modular_input_name | rename modular_input_name as s ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table s, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename s as \"Source\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
},
{
"label": "Host",
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*,example_input_three*,example_input_four*,service_hidden_for_cloud*,service_hidden_for_enterprise*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by h | table h, Bytes, sparkvolume | rename h as \"Host\", Bytes as \"Data volume\", sparkvolume as \"Volume trendline (Bytes)\""
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by h | table h, Bytes, sparkvolume | rename h as \"Host\", Bytes as \"Data volume\", sparkvolume as \"Volume trendline (Bytes)\""
},
{
"label": "Index",
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*,example_input_three*,example_input_four*,service_hidden_for_cloud*,service_hidden_for_enterprise*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by idx | join type=left idx [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_index | rename event_index as idx ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table idx, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename idx as \"Index\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
"value": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by idx | join type=left idx [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_index | rename event_index as idx ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table idx, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename idx as \"Index\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\""
},
{
"label": "Account",
Expand All @@ -228,7 +228,7 @@
"value": "| rest splunk_server=local /services/data/inputs/all | where $eai:acl.app$ = \"Splunk_TA_UCCExample\" | eval Active=if(lower(disabled) IN (\"1\", \"true\", \"t\"), \"no\", \"yes\") | table title, Active | rename title as \"event_input\" | join type=left event_input [ search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) as le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by event_input | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") ] | makemv delim=\",\" sparkevent | table event_input, Active, events, sparkevent, \"Last event\" | rename event_input as \"Input\", events as \"Number of events\", sparkevent as \"Event trendline\""
}
],
"defaultValue": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*,example_input_three*,example_input_four*,service_hidden_for_cloud*,service_hidden_for_enterprise*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\"",
"defaultValue": "index=_internal source=*license_usage.log type=Usage (s IN (example_input_one*,example_input_two*)) | stats sparkline(sum(b)) as sparkvolume, sum(b) as Bytes by st | join type=left st [search index = _internal source=*splunk_ta_uccexample* action=events_ingested | stats latest(_time) AS le, sparkline(sum(n_events)) as sparkevent, sum(n_events) as events by sourcetype_ingested | rename sourcetype_ingested as st ] | makemv delim=\",\" sparkevent | eval \"Last event\" = strftime(le, \"%e %b %Y %I:%M%p\") | table st, Bytes, sparkvolume, events, sparkevent, \"Last event\" | rename st as \"Source type\", Bytes as \"Data volume\", events as \"Number of events\", sparkvolume as \"Volume trendline (Bytes)\", sparkevent as \"Event trendline\"",
"token": "table_view_by"
},
"title": "View by"
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/expected_results/errors_tab_definition.json
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@
"errors_tab_errors_list_ds": {
"type": "ds.search",
"options": {
"query": "index=_internal source=*splunk_ta_uccexample* log_level IN (ERROR) exc_l IN ($error_types$)",
"query": "index=_internal source=*splunk_ta_uccexample* log_level IN (ERROR, CRITICAL) exc_l IN ($error_types$)",
"queryParameters": {
"earliest": "$errors_tab_time.earliest$",
"latest": "$errors_tab_time.latest$"
Expand Down

0 comments on commit 278b327

Please sign in to comment.