Skip to content

Commit

Permalink
Improve error handling in metric API
Browse files Browse the repository at this point in the history
  • Loading branch information
jacekwegr committed Dec 2, 2022
1 parent eec76fe commit 14cda31
Show file tree
Hide file tree
Showing 3 changed files with 156 additions and 22 deletions.
133 changes: 125 additions & 8 deletions big_tests/tests/graphql_metric_SUITE.erl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
-compile([export_all, nowarn_export_all]).

-import(distributed_helper, [mim/0, require_rpc_nodes/1, rpc/4]).
-import(graphql_helper, [execute_command/4, get_ok_value/2, get_unauthorized/1]).
-import(graphql_helper, [execute_command/4, get_ok_value/2, get_unauthorized/1,
get_err_msg/1, get_err_code/1]).

suite() ->
MIM2NodeName = maps:get(node, distributed_helper:mim2()),
Expand All @@ -27,15 +28,27 @@ metrics_tests() ->
[get_all_metrics,
get_all_metrics_check_by_type,
get_by_name_global_erlang_metrics,
get_metrics_by_name_empty_args,
get_metrics_by_name_empty_string,
get_metrics_by_nonexistent_name,
get_process_queue_length,
get_inet_stats,
get_vm_stats_memory,
get_all_metrics_as_dicts,
get_by_name_metrics_as_dicts,
get_metrics_as_dicts_by_nonexistent_name,
get_metrics_as_dicts_with_key_one,
get_metrics_as_dicts_with_nonexistent_key,
get_metrics_as_dicts_empty_args,
get_metrics_as_dicts_empty_strings,
get_cluster_metrics,
get_by_name_cluster_metrics_as_dicts,
get_mim2_cluster_metrics].
get_mim2_cluster_metrics,
get_cluster_metrics_for_nonexistent_nodes,
get_cluster_metrics_by_nonexistent_name,
get_cluster_metrics_with_nonexistent_key,
get_cluster_metrics_empty_args,
get_cluster_metrics_empty_strings].

domain_admin_metrics_tests() ->
[domain_admin_get_metrics,
Expand Down Expand Up @@ -126,6 +139,21 @@ get_by_name_global_erlang_metrics(Config) ->
%% Other metrics are filtered out
undef = maps:get(ReadsKey, Map, undef).

get_metrics_by_name_empty_args(Config) ->
Result = get_metrics([], Config),
ParsedResult = get_ok_value([data, metric, getMetrics], Result),
lists:foreach(fun check_metric_by_type/1, ParsedResult).

get_metrics_by_name_empty_string(Config) ->
Result = get_metrics([<<>>], Config),
ParsedResult = get_ok_value([data, metric, getMetrics], Result),
lists:foreach(fun check_metric_by_type/1, ParsedResult).

get_metrics_by_nonexistent_name(Config) ->
Result = get_metrics([<<"not_existing">>], Config),
ParsedResult = get_ok_value([data, metric, getMetrics], Result),
lists:foreach(fun check_metric_by_type/1, ParsedResult).

get_process_queue_length(Config) ->
Result = get_metrics([<<"global">>, <<"processQueueLengths">>], Config),
ParsedResult = get_ok_value([data, metric, getMetrics], Result),
Expand Down Expand Up @@ -167,13 +195,42 @@ get_by_name_metrics_as_dicts(Config) ->
check_spiral_dict(Dict)
end, ParsedResult).

get_metrics_as_dicts_by_nonexistent_name(Config) ->
Result = get_metrics_as_dicts_by_name([<<"not_existing">>], Config),
get_ok_value([data, metric, getMetricsAsDicts], Result).

get_metrics_as_dicts_with_key_one(Config) ->
Result = get_metrics_as_dicts_with_keys([<<"one">>], Config),
ParsedResult = get_ok_value([data, metric, getMetricsAsDicts], Result),
Map = dict_objects_to_map(ParsedResult),
SentName = [metric_host_type(), <<"xmppStanzaSent">>],
[#{<<"key">> := <<"one">>, <<"value">> := One}] = maps:get(SentName, Map),
true = is_integer(One).
?assert(is_integer(One)).

get_metrics_as_dicts_with_nonexistent_key(Config) ->
Result = get_metrics_as_dicts_with_keys([<<"not_existing">>], Config),
get_ok_value([data, metric, getMetricsAsDicts], Result).

get_metrics_as_dicts_empty_args(Config) ->
%% Empty name
Result = get_metrics_as_dicts([], [<<"median">>], Config),
ParsedResult = get_ok_value([data, metric, getMetricsAsDicts], Result),
Map = dict_objects_to_map(ParsedResult),
SentName = [<<"global">>, <<"data">>, <<"xmpp">>, <<"received">>, <<"encrypted_size">>],
[#{<<"key">> := <<"median">>, <<"value">> := Median}] = maps:get(SentName, Map),
?assert(is_integer(Median)),
%% Empty keys
Result2 = get_metrics_as_dicts([<<"global">>, <<"erlang">>], [], Config),
ParsedResult2 = get_ok_value([data, metric, getMetricsAsDicts], Result2),
?assert(length(ParsedResult2) == 2).

get_metrics_as_dicts_empty_strings(Config) ->
%% Name is empty string
Result = get_metrics_as_dicts([<<>>], [<<"median">>], Config),
get_ok_value([data, metric, getMetricsAsDicts], Result),
%% Key is empty string
Result2 = get_metrics_as_dicts([<<"global">>, <<"erlang">>], [<<>>], Config),
get_ok_value([data, metric, getMetricsAsDicts], Result2).

get_cluster_metrics(Config) ->
%% We will have at least these two nodes
Expand All @@ -190,7 +247,7 @@ get_by_name_cluster_metrics_as_dicts(Config) ->
NodeResult = get_ok_value([data, metric, getClusterMetricsAsDicts], Result),
Map = node_objects_to_map(NodeResult),
%% Contains data for at least two nodes
true = maps:size(Map) > 1,
?assert(maps:size(Map) > 1),
%% Only xmppStanzaSent type
maps:map(fun(_Node, [_|_] = NodeRes) ->
lists:foreach(fun(#{<<"dict">> := Dict,
Expand All @@ -205,6 +262,54 @@ get_mim2_cluster_metrics(Config) ->
[#{<<"node">> := Node, <<"result">> := ResList}] = ParsedResult,
check_node_result_is_valid(ResList, true).

get_cluster_metrics_for_nonexistent_nodes(Config) ->
Result = get_cluster_metrics_as_dicts_for_nodes([<<"nonexistent">>], Config),
ParsedResult = get_ok_value([data, metric, getClusterMetricsAsDicts], Result),
[#{<<"node">> := _, <<"result">> := ResList}] = ParsedResult,
[#{<<"dict">> := [],<<"name">> := ErrorResult}] = ResList,
?assert(ErrorResult == [<<"error">>,<<"nodedown">>]).

get_cluster_metrics_by_nonexistent_name(Config) ->
Result = get_cluster_metrics_as_dicts_by_name([<<"nonexistent">>], Config),
get_ok_value([data, metric, getClusterMetricsAsDicts], Result).

get_cluster_metrics_with_nonexistent_key(Config) ->
Result = get_cluster_metrics_as_dicts_with_keys([<<"nonexistent">>], Config),
get_ok_value([data, metric, getClusterMetricsAsDicts], Result).

get_cluster_metrics_empty_args(Config) ->
Node = atom_to_binary(maps:get(node, distributed_helper:mim2())),
%% Empty name
Result = get_cluster_metrics_as_dicts([], [<<"one">>], [Node], Config),
ParsedResult = get_ok_value([data, metric, getClusterMetricsAsDicts], Result),
[#{<<"node">> := Node, <<"result">> := ResList}] = ParsedResult,
Map = dict_objects_to_map(ResList),
SentName = [<<"global">>, <<"xmppStanzaSent">>],
[#{<<"key">> := <<"one">>, <<"value">> := One}] = maps:get(SentName, Map),
?assert(is_integer(One)),
%% Empty keys
Result2 = get_cluster_metrics_as_dicts([<<"_">>], [], [Node], Config),
ParsedResult2 = get_ok_value([data, metric, getClusterMetricsAsDicts], Result2),
[#{<<"node">> := Node, <<"result">> := ResList2}] = ParsedResult2,
check_node_result_is_valid(ResList2, true),
%% Empty nodes
Result3 = get_cluster_metrics_as_dicts([<<"_">>, <<"erlang">>], [<<"ets_limit">>], [], Config),
ParsedResult3 = get_ok_value([data, metric, getClusterMetricsAsDicts], Result3),
NodeMap = node_objects_to_map(ParsedResult3),
?assert(maps:size(NodeMap) > 1).

get_cluster_metrics_empty_strings(Config) ->
Node = atom_to_binary(maps:get(node, distributed_helper:mim2())),
%% Name is empty string
Result = get_cluster_metrics_as_dicts([<<>>], [<<"median">>], [Node], Config),
get_ok_value([data, metric, getClusterMetricsAsDicts], Result),
%% Key is empty string
Result2 = get_cluster_metrics_as_dicts([<<"_">>], [<<>>], [Node], Config),
get_ok_value([data, metric, getClusterMetricsAsDicts], Result2),
%% Node is empty string
Result3 = get_cluster_metrics_as_dicts([<<"_">>], [<<"median">>], [<<>>], Config),
get_ok_value([data, metric, getClusterMetricsAsDicts], Result3).

check_node_result_is_valid(ResList, MetricsAreGlobal) ->
%% Check that result contains something
Map = dict_objects_to_map(ResList),
Expand All @@ -215,7 +320,7 @@ check_node_result_is_valid(ResList, MetricsAreGlobal) ->
check_spiral_dict(maps:get(SentName, Map)),
[#{<<"key">> := <<"value">>,<<"value">> := V}] =
maps:get([<<"global">>,<<"uniqueSessionCount">>], Map),
true = is_integer(V),
?assert(is_integer(V)),
HistObjects = maps:get([<<"global">>, <<"data">>, <<"xmpp">>,
<<"sent">>, <<"compressed_size">>], Map),
check_histogram(kv_objects_to_map(HistObjects)).
Expand Down Expand Up @@ -277,6 +382,10 @@ get_metrics(Name, Config) ->
get_metrics_as_dicts(Config) ->
execute_command(<<"metric">>, <<"getMetricsAsDicts">>, #{}, Config).

get_metrics_as_dicts(Name, Keys, Config) ->
Vars = #{<<"name">> => Name, <<"keys">> => Keys},
execute_command(<<"metric">>, <<"getMetricsAsDicts">>, Vars, Config).

get_metrics_as_dicts_by_name(Name, Config) ->
Vars = #{<<"name">> => Name},
execute_command(<<"metric">>, <<"getMetricsAsDicts">>, Vars, Config).
Expand All @@ -288,6 +397,10 @@ get_metrics_as_dicts_with_keys(Keys, Config) ->
get_cluster_metrics_as_dicts(Config) ->
execute_command(<<"metric">>, <<"getClusterMetricsAsDicts">>, #{}, Config).

get_cluster_metrics_as_dicts(Name, Keys, Nodes, Config) ->
Vars = #{<<"name">> => Name, <<"nodes">> => Nodes, <<"keys">> => Keys},
execute_command(<<"metric">>, <<"getClusterMetricsAsDicts">>, Vars, Config).

get_cluster_metrics_as_dicts_by_name(Name, Config) ->
Vars = #{<<"name">> => Name},
execute_command(<<"metric">>, <<"getClusterMetricsAsDicts">>, Vars, Config).
Expand All @@ -296,16 +409,20 @@ get_cluster_metrics_as_dicts_for_nodes(Nodes, Config) ->
Vars = #{<<"nodes">> => Nodes},
execute_command(<<"metric">>, <<"getClusterMetricsAsDicts">>, Vars, Config).

get_cluster_metrics_as_dicts_with_keys(Keys, Config) ->
Vars = #{<<"keys">> => Keys},
execute_command(<<"metric">>, <<"getClusterMetricsAsDicts">>, Vars, Config).

%% Helpers

check_spiral_dict(Dict) ->
[#{<<"key">> := <<"count">>, <<"value">> := Count},
#{<<"key">> := <<"one">>, <<"value">> := One}] = Dict,
true = is_integer(Count),
true = is_integer(One).
?assert(is_integer(Count)),
?assert(is_integer(One)).

values_are_integers(Map, Keys) ->
lists:foreach(fun(Key) -> true = is_integer(maps:get(Key, Map)) end, Keys).
lists:foreach(fun(Key) -> ?assert(is_integer(maps:get(Key, Map))) end, Keys).

metric_host_type() ->
binary:replace(domain_helper:host_type(), <<" ">>, <<"_">>, [global]).
2 changes: 0 additions & 2 deletions src/graphql/admin/mongoose_graphql_metric_admin_query.erl
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@

-ignore_xref([execute/4]).

-include("mongoose_logger.hrl").

execute(_Ctx, _Obj, <<"getMetrics">>, Args) ->
Name = get_name(Args),
mongoose_metrics_api:get_metrics(Name);
Expand Down
43 changes: 31 additions & 12 deletions src/metrics/mongoose_metrics_api.erl
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
get_cluster_metrics_as_dicts/3]).

-include("mongoose_logger.hrl").
-include("mongoose.hrl").

-type name() :: [atom() | integer()].
-type key() :: atom().
Expand All @@ -18,21 +19,31 @@

-spec get_metrics(Name :: name()) -> {ok, [metric_result()]}.
get_metrics(Name) ->
Values = exometer:get_values(Name),
PrepName = prepare_host_types(Name),
Values = mongoose_metrics:get_metric_values(PrepName),
{ok, lists:map(fun make_metric_result/1, Values)}.

-spec get_metrics_as_dicts(Name :: name(), Keys :: [key()]) ->
{ok, [metric_dict_result()]}.
get_metrics_as_dicts(Name, Keys) ->
Values = exometer:get_values(Name),
PrepName = prepare_host_types(Name),
Values = mongoose_metrics:get_metric_values(PrepName),
{ok, [make_metric_dict_result(V, Keys) || V <- Values]}.

-spec get_cluster_metrics_as_dicts(Name :: name(), Keys :: [key()],
Nodes :: [node()]) ->
{ok, [metric_node_dict_result()]}.
get_cluster_metrics_as_dicts(Name, Keys, Nodes) ->
Nodes2 = existing_nodes(Nodes),
F = fun(Node) -> rpc:call(Node, exometer, get_values, [Name]) end,
PrepName = prepare_host_types(Name),
Nodes2 = prepare_nodes_arg(Nodes),
F = fun(Node) ->
case rpc:call(Node, mongoose_metrics, get_metric_values, [PrepName]) of
{badrpc, Reason} ->
[{[error, Reason], []}];
Result ->
Result
end
end,
Results = mongoose_lib:pmap(F, Nodes2),
{ok, [make_node_result(Node, Result, Keys)
|| {Node, Result} <- lists:zip(Nodes2, Results)]}.
Expand All @@ -50,14 +61,10 @@ filter_keys(Dict, []) ->
filter_keys(Dict, Keys) ->
[KV || KV = {Key, _} <- Dict, lists:member(Key, Keys)].

existing_nodes(Nodes) ->
AllNodes = [node()|nodes()],
filter_nodes(AllNodes, Nodes).

filter_nodes(AllNodes, []) ->
AllNodes;
filter_nodes(AllNodes, AllowedNodes) ->
[Node || Node <- AllNodes, lists:member(Node, AllowedNodes)].
prepare_nodes_arg([]) ->
[node()|nodes()];
prepare_nodes_arg(Nodes) ->
Nodes.

make_metric_result({Name, Dict}) ->
PreparedName = format_name(Name),
Expand Down Expand Up @@ -154,3 +161,15 @@ format_vm_system_info(#{port_count := PortCount, port_limit := PortLimit,
format_probe_queues(#{fsm := FSM, regular := Regular, total := Total}) ->
#{<<"type">> => <<"probe_queues">>,
<<"fsm">> => FSM, <<"regular">> => Regular, <<"total">> => Total}.

prepare_host_types(Name) ->
lists:map(
fun(Ele) ->
case lists:member(atom_to_binary(Ele), ?ALL_HOST_TYPES) of
true ->
binary:replace(atom_to_binary(Ele), <<" ">>, <<"_">>);
false ->
Ele
end
end,
Name).

0 comments on commit 14cda31

Please sign in to comment.