diff --git a/.gitignore b/.gitignore index a1848d6..f134d60 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.erlang.mk/ .eunit .edts deps diff --git a/Makefile b/Makefile index a9e6647..989167b 100644 --- a/Makefile +++ b/Makefile @@ -1,19 +1,35 @@ PROJECT = lasse -TEST_DEPS = cowboy lager sync gun meck +CONFIG ?= rel/sys.config -dep_cowboy = https://github.com/extend/cowboy.git master -dep_lager = https://github.com/basho/lager.git master -dep_sync = https://github.com/rustyio/sync.git master -dep_gun = https://github.com/extend/gun.git master -dep_meck = https://github.com/inaka/meck master +SHELL_DEPS = sync +TEST_DEPS = cowboy lager shotgun meck katana xref_runner + +dep_meck = git https://github.com/eproxus/meck.git 0.8.2 +dep_cowboy = git https://github.com/extend/cowboy.git 1.0.1 +dep_shotgun = git https://github.com/inaka/shotgun.git 0.1.11 +dep_katana = git https://github.com/inaka/erlang-katana.git 0.2.5 +dep_xref_runner = git https://github.com/inaka/xref_runner.git 0.2.2 + +DIALYZER_DIRS := ebin/ +DIALYZER_OPTS := --verbose --statistics -Werror_handling \ + -Wrace_conditions #-Wunmatched_returns include erlang.mk +ERLC_OPTS := +warn_unused_vars +warn_export_all +warn_shadow_vars +warn_unused_import +warn_unused_function +ERLC_OPTS += +warn_bif_clash +warn_unused_record +warn_deprecated_function +warn_obsolete_guard +strict_validation +ERLC_OPTS += +warn_export_vars +warn_exported_vars +warn_missing_spec +warn_untyped_record +debug_info + TEST_ERLC_OPTS += +'{parse_transform, lager_transform}' +CT_OPTS += -cover test/${PROJECT}.coverspec -vvv -erl_args -config ${CONFIG} + +SHELL_OPTS += -name ${PROJECT}@`hostname` -config ${CONFIG} -s lager -s sync -CT_SUITES = lasse_handler -CT_OPTS = -cover test/lasse_handler.coverspec -cover_stop false -erl_args -config rel/sys.config +quicktests: app + @$(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + @mkdir -p logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) -test-shell: build-tests - erl -pa ebin -pa deps/*/ebin -pa test -s sync -config rel/sys.config +erldocs: + erldocs . -o docs diff --git a/erlang.mk b/erlang.mk index d29632c..466b74a 100644 --- a/erlang.mk +++ b/erlang.mk @@ -1,4 +1,4 @@ -# Copyright (c) 2013-2014, Loïc Hoguin +# Copyright (c) 2013-2015, Loïc Hoguin # # Permission to use, copy, modify, and/or distribute this software for any # purpose with or without fee is hereby granted, provided that the above @@ -12,77 +12,166 @@ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -# Project. +.PHONY: all deps app rel docs install-docs tests check clean distclean help erlang-mk + +ERLANG_MK_VERSION = 1.2.0-560-g947eac7 + +# Core configuration. PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) -# Packages database file. +# Verbosity. -PKG_FILE ?= $(CURDIR)/.erlang.mk.packages.v1 -export PKG_FILE +V ?= 0 -PKG_FILE_URL ?= https://raw.githubusercontent.com/extend/erlang.mk/master/packages.v1.tsv +gen_verbose_0 = @echo " GEN " $@; +gen_verbose = $(gen_verbose_$(V)) -define get_pkg_file - wget --no-check-certificate -O $(PKG_FILE) $(PKG_FILE_URL) || rm $(PKG_FILE) -endef +# Temporary files directory. -# Verbosity and tweaks. +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP -V ?= 0 +# "erl" command. -appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; -appsrc_verbose = $(appsrc_verbose_$(V)) +ERL = erl +A0 -noinput -boot start_clean -erlc_verbose_0 = @echo " ERLC " $(filter %.erl %.core,$(?F)); -erlc_verbose = $(erlc_verbose_$(V)) +# Platform detection. +# @todo Add Windows/Cygwin detection eventually. -xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); -xyrl_verbose = $(xyrl_verbose_$(V)) +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) -dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); -dtl_verbose = $(dtl_verbose_$(V)) +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif -gen_verbose_0 = @echo " GEN " $@; -gen_verbose = $(gen_verbose_$(V)) +export PLATFORM +endif -.PHONY: rel clean-rel all clean-all app clean deps clean-deps \ - docs clean-docs build-tests tests build-plt dialyze +# Core targets. -# Release. +ifneq ($(words $(MAKECMDGOALS)),1) +.NOTPARALLEL: +endif -RELX_CONFIG ?= $(CURDIR)/relx.config +all:: deps + @$(MAKE) --no-print-directory app + @$(MAKE) --no-print-directory rel -ifneq ($(wildcard $(RELX_CONFIG)),) +# Noop to avoid a Make warning when there's nothing to do. +rel:: + @echo -n -RELX ?= $(CURDIR)/relx -export RELX +check:: clean app tests -RELX_URL ?= https://github.com/erlware/relx/releases/download/v1.0.2/relx -RELX_OPTS ?= -RELX_OUTPUT_DIR ?= _rel +clean:: clean-crashdump -ifneq ($(firstword $(subst -o,,$(RELX_OPTS))),) - RELX_OUTPUT_DIR = $(firstword $(subst -o,,$(RELX_OPTS))) +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump endif -define get_relx - wget -O $(RELX) $(RELX_URL) || rm $(RELX) - chmod +x $(RELX) -endef +distclean:: clean + +help:: + @printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2014 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [-jNUM] [target]" \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " deps Fetch dependencies (if needed) and compile them" \ + " app Compile the project" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " tests Run the tests for this project" \ + " check Compile and run all tests and analysis for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + "" \ + "The target clean only removes files that are commonly removed." \ + "Dependencies and releases are left untouched." \ + "" \ + "Setting V=1 when calling $(MAKE) enables verbose mode." \ + "Parallel execution is supported through the -j $(MAKE) flag." + +# Core functions. + +define newline -rel: clean-rel all $(RELX) - @$(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) -$(RELX): - @$(call get_relx) +endef -clean-rel: - $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) -pa $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef +define core_http_get + $(call erlang,$(call core_http_get.erl,$(1),$(2))) +endef endif -# Deps directory. +# Automated update. + +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone https://github.com/ninenines/erlang.mk $(ERLANG_MK_BUILD_DIR) + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR); fi + cd $(ERLANG_MK_BUILD_DIR) && $(MAKE) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps distclean-pkg pkg-list pkg-search + +# Configuration. + +IGNORE_DEPS ?= DEPS_DIR ?= $(CURDIR)/deps export DEPS_DIR @@ -90,10 +179,7 @@ export DEPS_DIR REBAR_DEPS_DIR = $(DEPS_DIR) export REBAR_DEPS_DIR -ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DEPS)) -ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) - -# Application. +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(filter-out $(IGNORE_DEPS),$(DEPS))) ifeq ($(filter $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) ifeq ($(ERL_LIBS),) @@ -104,26 +190,644 @@ endif endif export ERL_LIBS -ERLC_OPTS ?= -Werror +debug_info +warn_export_all +warn_export_vars \ - +warn_shadow_vars +warn_obsolete_guard # +bin_opt_info +warn_missing_spec +PKG_FILE2 ?= $(CURDIR)/.erlang.mk.packages.v2 +export PKG_FILE2 + +PKG_FILE_URL ?= https://raw.githubusercontent.com/ninenines/erlang.mk/master/packages.v2.tsv + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) + @for dep in $(ALL_DEPS_DIRS) ; do \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ] ; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$? ; \ + else \ + echo "ERROR: No Makefile to build dependency $$dep." ; \ + exit 1 ; \ + fi ; \ + done +endif + +distclean:: distclean-deps distclean-pkg + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk | xargs grep -ci rebar` ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + rm -f $(DEPS_DIR)/$(1)/erlang.mk; \ + cd $(DEPS_DIR)/$(1)/ && ln -s ../../erlang.mk +endef +else +define dep_autopatch_erlang_mk + echo -n +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + make; \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))) +endef + +define dep_autopatch_rebar.erl + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(DEPS_DIR)/$(1)/rebar.config") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(DEPS_DIR)/$(1)/rebar.config.script") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(DEPS_DIR)/$(1)/rebar.config.script", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(DEPS_DIR)/$(1)/rebar.config.script", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(DEPS_DIR)/$(1)/Makefile", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$$$", "\$$$$$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS = edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(DEPS_DIR)/$(1)/include/" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(DEPS_DIR)/$(1)/src/" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(DEPS_DIR)/$(1)/include/" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(DEPS_DIR)/$(1)/include/" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(DEPS_DIR)/$(1)/src/*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(DEPS_DIR)/$(1)/src/" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ _ -> Escape(Cmd); + "gmake -C" ++ _ -> Escape(Cmd); + "make " ++ Cmd1 -> "make -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "gmake -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(DEPS_DIR)/$(1)/c_src") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(DEPS_DIR)/$(1)/c_src/Makefile.erlang.mk", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(DEPS_DIR)/$(1)/" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$$$\(CC) -c -o $$$$\@ $$$$\< $$$$\(CFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$$$\(CXX) -c -o $$$$\@ $$$$\< $$$$\(CXXFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$$$\(CXX) -c -o $$$$\@ $$$$\< $$$$\(CXXFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$$$\(CXX) -c -o $$$$\@ $$$$\< $$$$\(CXXFLAGS) $$$$\(ERL_CFLAGS) $$$$\(DRV_CFLAGS) $$$$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$$$\(foreach ext,.c .C .cc .cpp,", + "$$$$\(patsubst %$$$$\(ext),%.o,$$$$\(filter %$$$$\(ext),$$$$\(wildcard", Input, "))))\n", + "\t$$$$\(CC) -o $$$$\@ $$$$\? $$$$\(LDFLAGS) $$$$\(ERL_LDFLAGS) $$$$\(DRV_LDFLAGS) $$$$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude ../../erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(DEPS_DIR)/$(1)/"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(DEPS_DIR)/" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(DEPS_DIR)/$(1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(DEPS_DIR)/$(1)/" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(App), + Mods = filelib:fold_files("$(DEPS_DIR)/$(1)/src", "\\\\.erl$$$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, $(1), L}])) + end + end, + UpdateModules("$(DEPS_DIR)/$(1)/ebin/$(1).app"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(DEPS_DIR)/$(1)/src/$(1).app.src", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(DEPS_DIR)/$(1)/ebin/$(1).app"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {vsn, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L2}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch + if [ "$$$$VS" = "git" ]; then \ + git clone -q -n -- $$$$REPO $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $$$$COMMIT; \ + elif [ "$$$$VS" = "hg" ]; then \ + hg clone -q -U $$$$REPO $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && hg update -q $$$$COMMIT; \ + elif [ "$$$$VS" = "svn" ]; then \ + svn checkout -q $$$$REPO $(DEPS_DIR)/$(1); \ + elif [ "$$$$VS" = "cp" ]; then \ + cp -R $$$$REPO $(DEPS_DIR)/$(1); \ + else \ + echo "Unknown or invalid dependency: $(1). Please consult the erlang.mk README for instructions." >&2; \ + exit 78; \ + fi +endef + +define dep_target +$(DEPS_DIR)/$(1): + @mkdir -p $(DEPS_DIR) +ifeq (,$(dep_$(1))) + @if [ ! -f $(PKG_FILE2) ]; then $(call core_http_get,$(PKG_FILE2),$(PKG_FILE_URL)); fi + $(dep_verbose) DEPPKG=$$$$(awk 'BEGIN { FS = "\t" }; $$$$1 == "$(1)" { print $$$$2 " " $$$$3 " " $$$$4 }' $(PKG_FILE2);); \ + VS=$$$$(echo $$$$DEPPKG | cut -d " " -f1); \ + REPO=$$$$(echo $$$$DEPPKG | cut -d " " -f2); \ + COMMIT=$$$$(echo $$$$DEPPKG | cut -d " " -f3); \ + $(call dep_fetch,$(1)) +else +ifeq (1,$(words $(dep_$(1)))) + $(dep_verbose) VS=git; \ + REPO=$(dep_$(1)); \ + COMMIT=master; \ + $(call dep_fetch,$(1)) +else +ifeq (2,$(words $(dep_$(1)))) + $(dep_verbose) VS=git; \ + REPO=$(word 1,$(dep_$(1))); \ + COMMIT=$(word 2,$(dep_$(1))); \ + $(call dep_fetch,$(1)) +else + $(dep_verbose) VS=$(word 1,$(dep_$(1))); \ + REPO=$(word 2,$(dep_$(1))); \ + COMMIT=$(word 3,$(dep_$(1))); \ + $(call dep_fetch,$(1)) +endif +endif +endif + @if [ -f $(DEPS_DIR)/$(1)/configure.ac -o -f $(DEPS_DIR)/$(1)/configure.in ]; then \ + echo " AUTO " $(1); \ + cd $(DEPS_DIR)/$(1) && autoreconf -Wall -vif -I m4; \ + fi + -@if [ -f $(DEPS_DIR)/$(1)/configure ]; then \ + echo " CONF " $(1); \ + cd $(DEPS_DIR)/$(1) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + @$(call dep_autopatch,$(1)) +endif +endef + +$(foreach dep,$(DEPS),$(eval $(call dep_target,$(dep)))) + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) + +# Packages related targets. + +$(PKG_FILE2): + @$(call core_http_get,$(PKG_FILE2),$(PKG_FILE_URL)) + +pkg-list: $(PKG_FILE2) + @cat $(PKG_FILE2) | awk 'BEGIN { FS = "\t" }; { print \ + "Name:\t\t" $$1 "\n" \ + "Repository:\t" $$3 "\n" \ + "Website:\t" $$5 "\n" \ + "Description:\t" $$6 "\n" }' + +ifdef q +pkg-search: $(PKG_FILE2) + @cat $(PKG_FILE2) | grep -i ${q} | awk 'BEGIN { FS = "\t" }; { print \ + "Name:\t\t" $$1 "\n" \ + "Repository:\t" $$3 "\n" \ + "Website:\t" $$5 "\n" \ + "Description:\t" $$6 "\n" }' +else +pkg-search: + $(error Usage: $(MAKE) pkg-search q=STRING) +endif + +ifeq ($(PKG_FILE2),$(CURDIR)/.erlang.mk.packages.v2) +distclean-pkg: + $(gen_verbose) rm -f $(PKG_FILE2) +endif + +help:: + @printf "%s\n" "" \ + "Package-related targets:" \ + " pkg-list List all known packages" \ + " pkg-search q=STRING Search for STRING in the package index" + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + @mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + @rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(shell find src -type f -name \*.proto 2>/dev/null) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec COMPILE_FIRST ?= COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose = $(appsrc_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose = $(asn1_verbose_$(V)) -all: deps app +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose = $(mib_verbose_$(V)) -clean-all: clean clean-deps clean-docs - $(gen_verbose) rm -rf .$(PROJECT).plt $(DEPS_DIR) logs +# Targets. -app: ebin/$(PROJECT).app +ifeq ($(wildcard ebin/test),) +app:: app-build +else +app:: clean app-build +endif + +app-build: erlc-include ebin/$(PROJECT).app $(eval MODULES := $(shell find ebin -type f -name \*.beam \ | sed "s/ebin\//'/;s/\.beam/',/" | sed '$$s/.$$//')) + @if [ -z "$$(grep -E '^[^%]*{modules,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) $(appsrc_verbose) cat src/$(PROJECT).app.src \ | sed "s/{modules,[[:space:]]*\[\]}/{modules, \[$(MODULES)\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ > ebin/$(PROJECT).app +erlc-include: + -@if [ -d ebin/ ]; then \ + find include/ src/ -type f -name \*.hrl -newer ebin -exec touch $(shell find src/ -type f -name "*.erl") \; 2>/dev/null || printf ''; \ + fi + define compile_erl $(erlc_verbose) erlc -v $(ERLC_OPTS) -o ebin/ \ - -pa ebin/ -I include/ $(COMPILE_FIRST_PATHS) $(1) + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),\ + $(COMPILE_FIRST_PATHS) $(1)) endef define compile_xyrl @@ -132,165 +836,1369 @@ define compile_xyrl @rm ebin/*.erl endef -define compile_dtl - $(dtl_verbose) erl -noshell -pa ebin/ $(DEPS_DIR)/erlydtl/ebin/ -eval ' \ - Compile = fun(F) -> \ - Module = list_to_atom( \ - string:to_lower(filename:basename(F, ".dtl")) ++ "_dtl"), \ - erlydtl:compile(F, Module, [{out_dir, "ebin/"}]) \ - end, \ - _ = [Compile(F) || F <- string:tokens("$(1)", " ")], \ - init:stop()' +define compile_asn1 + $(asn1_verbose) erlc -v -I include/ -o ebin/ $(1) + @mv ebin/*.hrl include/ + @mv ebin/*.asn1db include/ + @rm ebin/*.erl +endef + +define compile_mib + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ \ + -I priv/mibs/ $(COMPILE_MIB_FIRST_PATHS) $(1) + $(mib_verbose) erlc -o include/ -- priv/mibs/*.bin endef -ebin/$(PROJECT).app: $(shell find src -type f -name \*.erl) \ - $(shell find src -type f -name \*.core) \ - $(shell find src -type f -name \*.xrl) \ - $(shell find src -type f -name \*.yrl) \ - $(shell find templates -type f -name \*.dtl 2>/dev/null) +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: @mkdir -p ebin/ - $(if $(strip $(filter %.erl %.core,$?)), \ - $(call compile_erl,$(filter %.erl %.core,$?))) - $(if $(strip $(filter %.xrl %.yrl,$?)), \ - $(call compile_xyrl,$(filter %.xrl %.yrl,$?))) - $(if $(strip $(filter %.dtl,$?)), \ - $(call compile_dtl,$(filter %.dtl,$?))) -clean: - $(gen_verbose) rm -rf ebin/ test/*.beam erl_crash.dump +ifneq ($(wildcard asn1/),) +ebin/$(PROJECT).app:: $(shell find asn1 -type f -name \*.asn1) + @mkdir -p include + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +ifneq ($(wildcard mibs/),) +ebin/$(PROJECT).app:: $(shell find mibs -type f -name \*.mib) + @mkdir -p priv/mibs/ include + $(if $(strip $?),$(call compile_mib,$?)) +endif -# Dependencies. +ebin/$(PROJECT).app:: $(shell find src -type f -name \*.erl -o -name \*.core) + $(if $(strip $?),$(call compile_erl,$?)) -define get_dep - @mkdir -p $(DEPS_DIR) -ifeq (,$(findstring pkg://,$(word 1,$(dep_$(1))))) - git clone -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1) +ebin/$(PROJECT).app:: $(shell find src -type f -name \*.xrl -o -name \*.yrl) + $(if $(strip $?),$(call compile_xyrl,$?)) +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf ebin/ priv/mibs/ \ + $(addprefix include/,$(addsuffix .hrl,$(notdir $(basename $(wildcard mibs/*.mib))))) + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: else - @if [ ! -f $(PKG_FILE) ]; then $(call get_pkg_file); fi - git clone -n -- `awk 'BEGIN { FS = "\t" }; \ - $$$$1 == "$(subst pkg://,,$(word 1,$(dep_$(1))))" { print $$$$2 }' \ - $(PKG_FILE)` $(DEPS_DIR)/$(1) +doc-deps: $(ALL_DOC_DEPS_DIRS) + @for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done endif - cd $(DEPS_DIR)/$(1) ; git checkout -q $(word 2,$(dep_$(1))) -endef -define dep_target -$(DEPS_DIR)/$(1): - $(call get_dep,$(1)) -endef +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. -$(foreach dep,$(DEPS),$(eval $(call dep_target,$(dep)))) +.PHONY: test-deps test-dir test-build clean-test-dir -deps: $(ALL_DEPS_DIRS) - @for dep in $(ALL_DEPS_DIRS) ; do \ - if [ -f $$dep/Makefile ] ; then \ - $(MAKE) -C $$dep ; \ - else \ - echo "include $(CURDIR)/erlang.mk" | $(MAKE) -f - -C $$dep ; \ - fi ; \ +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + @for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +ifneq ($(strip $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(wildcard $(TEST_DIR)/*.erl $(TEST_DIR)/*/*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps + @$(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps + @$(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ done -clean-deps: - @for dep in $(ALL_DEPS_DIRS) ; do \ - if [ -f $$dep/Makefile ] ; then \ - $(MAKE) -C $$dep clean ; \ - else \ - echo "include $(CURDIR)/erlang.mk" | $(MAKE) -f - -C $$dep clean ; \ - fi ; \ +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ done +endif -# Documentation. +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + @printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $(PROJECT), [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$(PROJECT)_app, []}}, + {env, []} +]}. +endef -EDOC_OPTS ?= +define bs_appsrc_lib +{application, $(PROJECT), [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef -docs: clean-docs - $(gen_verbose) erl -noshell \ - -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), init:stop().' +define bs_Makefile +PROJECT = $(PROJECT) +include erlang.mk +endef -clean-docs: - $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info +define bs_app +-module($(PROJECT)_app). +-behaviour(application). -# Tests. +-export([start/2]). +-export([stop/1]). -$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) +start(_Type, _Args) -> + $(PROJECT)_sup:start_link(). -TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard -TEST_ERLC_OPTS += -DTEST=1 -DEXTRA=1 +'{parse_transform, eunit_autoexport}' +stop(_State) -> + ok. +endef -build-test-deps: $(ALL_TEST_DEPS_DIRS) - @for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +define bs_relx_config +{release, {$(PROJECT)_release, "1"}, [$(PROJECT)]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef -build-tests: build-test-deps - $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -o test/ \ - $(wildcard test/*.erl test/*/*.erl) -pa ebin/ +define bs_sys_config +[ +]. +endef -CT_OPTS ?= -CT_RUN = ct_run \ - -no_auto_compile \ - -noshell \ - -pa $(realpath ebin) $(DEPS_DIR)/*/ebin \ - -dir test \ - -logdir logs - -CT_SUITES ?= - -define test_target -test_$(1): ERLC_OPTS = $(TEST_ERLC_OPTS) -test_$(1): clean deps app build-tests - @if [ -d "test" ] ; \ - then \ - mkdir -p logs/ ; \ - $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) ; \ - fi - $(gen_verbose) rm -f test/*.beam +define bs_vm_args +-name $(PROJECT)@127.0.0.1 +-setcookie $(PROJECT) +-heart endef -$(foreach test,$(CT_SUITES),$(eval $(call test_target,$(test)))) +# Normal templates. -tests: ERLC_OPTS = $(TEST_ERLC_OPTS) -tests: clean deps app build-tests - @if [ -d "test" ] ; \ - then \ - mkdir -p logs/ ; \ - $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) ; \ - fi - $(gen_verbose) rm -f test/*.beam +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). -# Dialyzer. +-export([start_link/0]). +-export([init/1]). -DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt -export DIALYZER_PLT +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). -PLT_APPS ?= -DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ - -Wunmatched_returns # -Wunderspecs +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef -build-plt: deps app - @dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(ALL_DEPS_DIRS) +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). -dialyze: - @dialyzer --src src --no_native $(DIALYZER_OPTS) +%% API. +-export([start_link/0]). -# Packages. +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). -$(PKG_FILE): - @$(call get_pkg_file) +-record(state, { +}). -pkg-list: $(PKG_FILE) - @cat $(PKG_FILE) | awk 'BEGIN { FS = "\t" }; { print \ - "Name:\t\t" $$1 "\n" \ - "Repository:\t" $$2 "\n" \ - "Website:\t" $$3 "\n" \ - "Description:\t" $$4 "\n" }' +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + @echo "$${$(1)}" > $(2) +endef + +$(foreach template,$(filter bs_%,$(.VARIABLES)),$(eval export $(template))) +$(foreach template,$(filter tpl_%,$(.VARIABLES)),$(eval export $(template))) + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(call render_template,bs_Makefile,Makefile) + @mkdir src/ + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(eval n := $(PROJECT)_sup) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(call render_template,bs_Makefile,Makefile) + @mkdir src/ + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(call render_template,bs_relx_config,relx.config) + @mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new: +ifeq ($(wildcard src/),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME) +endif + $(call render_template,tpl_$(t),src/$(n).erl) + +list-templates: + @echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +ifeq ($(C_SRC_TYPE),shared) +LDFLAGS += -shared +endif + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(shell find $(C_SRC_DIR) -type f \( -name "*.c" -o -name "*.C" -o -name "*.cc" -o -name "*.cpp" \)) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + @mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) $(LDFLAGS) $(LDLIBS) -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + @$(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(KERL) $(addprefix ci-,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + -$(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +$(CI_INSTALL_DIR)/$(1): + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +define kerl_fetch + $(call core_http_get,$(KERL),$(KERL_URL)) + chmod +x $(KERL) +endef + +$(KERL): + @$(call kerl_fetch) + +help:: + @printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(shell find $(TEST_DIR) -type f -name \*_SUITE.erl -exec basename {} \;))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + @printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + @mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + @mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + @printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + @dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(ALL_DEPS_DIRS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + @dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2014, Juan Facorro +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.3/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.3/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + @printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + @$(call core_http_get,$(ELVIS),$(ELVIS_URL)) + @chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + @$(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + @$(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= 0 + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define compile_erlydtl + $(dtl_verbose) $(ERL) -pa ebin/ $(DEPS_DIR)/erlydtl/ebin/ -eval ' \ + Compile = fun(F) -> \ + S = fun (1) -> re:replace(filename:rootname(string:sub_string(F, 11), ".dtl"), "/", "_", [{return, list}, global]); \ + (0) -> filename:basename(F, ".dtl") \ + end, \ + Module = list_to_atom(string:to_lower(S($(DTL_FULL_PATH))) ++ "_dtl"), \ + {ok, _} = erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) \ + end, \ + _ = [Compile(F) || F <- string:tokens("$(1)", " ")], \ + halt().' +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(shell find templates -type f -name \*.dtl 2>/dev/null) + $(if $(strip $?),$(call compile_erlydtl,$?)) +endif + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + @printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2014, Enrique Fernandez +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: eunit + +# Configuration + +# All modules in TEST_DIR +ifeq ($(strip $(TEST_DIR)),) +TEST_DIR_MODS = +else +TEST_DIR_MODS = $(notdir $(basename $(shell find $(TEST_DIR) -type f -name *.beam))) +endif + +# All modules in 'ebin' +EUNIT_EBIN_MODS = $(notdir $(basename $(shell find ebin -type f -name *.beam))) +# Only those modules in TEST_DIR with no matching module in 'ebin'. +# This is done to avoid some tests being executed twice. +EUNIT_MODS = $(filter-out $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(TEST_DIR_MODS)) +TAGGED_EUNIT_TESTS = $(foreach mod,$(EUNIT_EBIN_MODS) $(EUNIT_MODS),{module,$(mod)}) + +EUNIT_OPTS ?= + +# Utility functions + +define str-join + $(shell echo '$(strip $(1))' | sed -e "s/ /,/g") +endef + +# Core targets. + +tests:: eunit + +help:: + @printf "%s\n" "" \ + "EUnit targets:" \ + " eunit Run all the EUnit tests for this project" + +# Plugin-specific targets. + +EUNIT_RUN_BEFORE ?= +EUNIT_RUN_AFTER ?= +EUNIT_RUN = $(ERL) \ + -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin \ + -pz ebin \ + $(EUNIT_RUN_BEFORE) \ + -eval 'case eunit:test([$(call str-join,$(TAGGED_EUNIT_TESTS))],\ + [$(EUNIT_OPTS)]) of ok -> ok; error -> halt(1) end.' \ + $(EUNIT_RUN_AFTER) \ + -eval 'halt(0).' + +eunit: test-build + $(gen_verbose) $(EUNIT_RUN) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX ?= $(CURDIR)/relx +export RELX + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v2.0.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: distclean-relx-rel relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +define relx_fetch + $(call core_http_get,$(RELX),$(RELX_URL)) + chmod +x $(RELX) +endef + +$(RELX): + @$(call relx_fetch) + +relx-rel: $(RELX) + @$(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + @$(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + @printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_PATH ?= -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + @printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + @for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) erl $(SHELL_PATH) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifneq ($(wildcard $(DEPS_DIR)/triq),) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(MODULES)]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + @$(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + @echo Testing $(t)/0 + @$(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(shell find ebin -type f -name \*.beam \ + | sed "s/ebin\//'/;s/\.beam/',/" | sed '$$s/.$$//')) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined)) +endif +endif + +# Copyright (c) 2015, Euen Lopez +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.0/xrefr + +# Core targets. + +help:: + @printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + @$(call core_http_get,$(XREFR),$(XREFR_URL)) + @chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# utility variables for representing special symbols +empty := +space := $(empty) $(empty) +comma := , + +# Hook in coverage to eunit + +ifdef COVER +ifdef EUNIT_RUN +EUNIT_RUN_BEFORE += -eval \ + 'case cover:compile_beam_directory("ebin") of \ + {error, _} -> halt(1); \ + _ -> ok \ + end.' +EUNIT_RUN_AFTER += -eval 'cover:export("eunit.coverdata").' +endif +endif + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN + +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(shell echo ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + @echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + @$(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + @printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) -ifdef q -pkg-search: $(PKG_FILE) - @cat $(PKG_FILE) | grep -i ${q} | awk 'BEGIN { FS = "\t" }; { print \ - "Name:\t\t" $$1 "\n" \ - "Repository:\t" $$2 "\n" \ - "Website:\t" $$3 "\n" \ - "Description:\t" $$4 "\n" }' -else -pkg-search: - @echo "Usage: make pkg-search q=STRING" endif +endif # ifneq ($(COVER_REPORT_DIR),) diff --git a/rebar.config b/rebar.config deleted file mode 100644 index 7b30a8f..0000000 --- a/rebar.config +++ /dev/null @@ -1 +0,0 @@ -{deps, []}. diff --git a/src/lasse.app.src b/src/lasse.app.src index d173455..d386ea5 100644 --- a/src/lasse.app.src +++ b/src/lasse.app.src @@ -3,7 +3,8 @@ [ {description, "Lasse: Server-Sent Event handler for Cowboy."}, {vsn, "0.2.0"}, - {modules, [lasse_handler]}, - {applications, [kernel, stdlib]} + {modules, []}, + {applications, [kernel, stdlib]}, + {registered, []} ] }. diff --git a/src/lasse_handler.erl b/src/lasse_handler.erl index dab1e9b..70f13c8 100644 --- a/src/lasse_handler.erl +++ b/src/lasse_handler.erl @@ -15,35 +15,37 @@ %% Records %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% --record(state, +-record(state, { module :: module(), state :: any() }). +-type state() :: #state{}. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Behavior definition %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% --type event_value() :: - {'id', binary()} | - {'event', binary()} | - {'data', binary()} | - {'retry', binary()}. - --type event() :: [event_value(), ...]. +-type event() :: + #{ id => binary() + , event => binary() + , data => binary() + , retry => binary() + , comment | '' => binary() + }. -type result() :: {'send', Event :: event(), NewState :: any()} | {'nosend', NewState :: any()} | {'stop', NewState :: any()}. --callback init(InitArgs :: any(), LastEvtId :: any(), Req :: cowboy_req:req()) -> +-callback init(InitArgs::any(), LastEvtId::any(), Req::cowboy_req:req()) -> {ok, NewReq :: cowboy_req:req(), State :: any()} | + {ok, NewReq :: cowboy_req:req(), Events :: [event()], State :: any()} | {no_content, NewReq :: cowboy_req:req()} | { - shutdown, - StatusCode :: cowboy:http_status(), + shutdown, + StatusCode :: cowboy:http_status(), Headers :: cowboy:http_headers(), Body :: iodata(), NewReq :: cowboy_req:req() @@ -67,21 +69,32 @@ %% Cowboy callbacks %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% --type lasse_handler_option() :: {'module', module()} | {'init_args', any()}. --type lasse_handler_options() :: [module()] | [lasse_handler_option(), ...]. - --spec init(any(), any(), lasse_handler_options()) -> {loop, any(), record(state)}. +-type lasse_handler_options() :: + module() | + #{ module => module() + , init_args => any() + }. + +-spec init(any(), cowboy_req:req(), lasse_handler_options()) -> + {loop, any(), state()}. +init(Transport, Req, []) -> + init(Transport, Req, #{}); +init(Transport, Req, [Module]) when is_atom(Module) -> + init(Transport, Req, #{module => Module}); init(_Transport, Req, Opts) -> - Module = case get_value(module, Opts, Opts) of - Name when is_atom(Name) -> Name; - [Name] when is_atom(Name) -> Name; - _ -> throw(module_option_missing) - end, - InitArgs = get_value(init_args, Opts, []), - {LastEventId, Req} = cowboy_req:header(<<"last-event-id">>, Req), - InitResult = Module:init(InitArgs, LastEventId, Req), - handle_init(InitResult, Module). + try + #{module := Module} = Opts, + InitArgs = maps:get(init_args, Opts, []), + {LastEventId, Req} = cowboy_req:header(<<"last-event-id">>, Req), + InitResult = Module:init(InitArgs, LastEventId, Req), + handle_init(InitResult, Module) + catch + _:{badmatch, #{}} -> + throw(module_option_missing) + end. +-spec info(term(), cowboy_req:req(), state()) -> + {ok|loop, cowboy_req:req(), state()}. info({message, Msg}, Req, State) -> Module = State#state.module, ModuleState = State#state.state, @@ -93,16 +106,17 @@ info(Msg, Req, State) -> Result = Module:handle_info(Msg, ModuleState), process_result(Result, Req, State). -terminate(Reason, Req, State) -> +-spec terminate(term(), cowboy_req:req(), state()) -> ok. +terminate(Reason, Req, State = #state{}) -> Module = State#state.module, ModuleState = State#state.state, Module:terminate(Reason, Req, ModuleState), ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% API +%%% API %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - +-spec notify(atom() | pid(), term()) -> ok. notify(Pid, Msg) -> Pid ! {message, Msg}, ok. @@ -110,7 +124,6 @@ notify(Pid, Msg) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Helper functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - handle_init({ok, Req, State}, Module) -> handle_init({ok, Req, [], State}, Module); handle_init({ok, Req, InitialEvents, State}, Module) -> @@ -157,46 +170,32 @@ process_result({nosend, NewState}, Req, State) -> process_result({stop, NewState}, Req, State) -> {ok, Req, State#state{state = NewState}}. -get_value(Key, PropList) -> - case lists:keyfind(Key, 1, PropList) of - {Key, Value} -> Value; - _ -> undefined - end. - -get_value(Key, PropList, NotFound) -> - case get_value(Key, PropList) of - undefined -> NotFound; - Value -> Value - end. - send_event(Event, Req) -> EventMsg = build_event(Event), + ct:pal("Sending ~p", [EventMsg]), cowboy_req:chunk(EventMsg, Req). build_event(Event) -> - [build_comments(Event), - build_field(<<"id: ">>, get_value(id, Event)), - build_field(<<"event: ">>, get_value(name, Event)), - build_data(get_value(data, Event)), - build_field(<<"retry: ">>, get_value(retry, Event)), + [build_comment(maps:get(comment, Event, undefined)), + build_comment(maps:get('', Event, undefined)), + build_field(<<"id: ">>, maps:get(id, Event, undefined)), + build_field(<<"event: ">>, maps:get(event, Event, undefined)), + build_data(maps:get(data, Event, undefined)), + build_field(<<"retry: ">>, maps:get(retry, Event, undefined)), <<"\n">>]. -build_comments(Event) -> - Keys = [id, data, name, retry], - Comments = lists:foldl(fun proplists:delete/2, Event, Keys), - [build_comment(Val) || {_, Val} <- Comments]. - +build_comment(undefined) -> + []; build_comment(Comment) -> [[<<": ">>, X, <<"\n">>] || X <- binary:split(Comment, <<"\n">>, [global])]. build_field(_, undefined) -> []; -build_field(_, "") -> - []; build_field(Name, Value) -> [Name, Value, <<"\n">>]. build_data(undefined) -> throw(data_required); build_data(Data) -> - [[<<"data: ">>, X, <<"\n">>] || X <- binary:split(Data, <<"\n">>, [global])]. + [[<<"data: ">>, X, <<"\n">>] + || X <- binary:split(Data, <<"\n">>, [global])]. diff --git a/test/events_handler.erl b/test/events_handler.erl index cfcd689..546c6cf 100644 --- a/test/events_handler.erl +++ b/test/events_handler.erl @@ -12,8 +12,8 @@ init(_InitArgs, LastEventId, Req) -> % Take process name from the "process-name" header. case cowboy_req:header(<<"process-name">>, Req) of - {ProcNameBin, Req} -> - ProcName = binary_to_term(ProcNameBin), + {ProcNameBin, Req} when ProcNameBin =/= <<"undefined">> -> + ProcName = binary_to_atom(ProcNameBin, utf8), register(ProcName, self()), lager:info("Initiating a ~p in ~p", [ProcName, whereis(ProcName)]); {undefined, Req} -> @@ -24,36 +24,33 @@ init(_InitArgs, LastEventId, Req) -> {ok, Req, LastEventId}. handle_notify(send, State) -> - {send, [{data, <<"notify chunk">>}], State}; + {send, #{data => <<"notify chunk">>}, State}; handle_notify(send_id, State) -> - Event = [ - {id, <<"1">>}, - {data, <<"notify chunk">>}, - {name, ""} - ], + Event = #{id => <<"1">>, + data => <<"notify chunk">>, + event => <<"">> + }, {send, Event, State}; handle_notify(no_data, State) -> - Event = [ - {id, <<"1">>}, - {name, "no_data"} - ], + Event = #{id => <<"1">>, + event => "no_data" + }, {send, Event, State}; handle_notify(nosend, State) -> {nosend, State}; handle_notify(comments, State) -> - Event = [ - {comments, <<"Comment 1\nComment 2">>}, - {data, <<"some data">>} - ], + Event = #{comment => <<"Comment 1\nComment 2">>, + data => <<"some data">> + }, {send, Event, State}; handle_notify(last_event_id, State) -> - Event = [{data, State}], + Event = #{data => State}, {send, Event, State}; handle_notify(stop, State) -> {stop, State}. handle_info(send, State) -> - {send, [{data, <<"info chunk">>}], State}; + {send, #{data => <<"info chunk">>}, State}; handle_info(nosend, State) -> {nosend, State}; handle_info(stop, State) -> diff --git a/test/initial_events_handler.erl b/test/initial_events_handler.erl index 8b61ab0..8f5443f 100644 --- a/test/initial_events_handler.erl +++ b/test/initial_events_handler.erl @@ -10,10 +10,7 @@ ]). init(_InitArgs, _LastEventId, Req) -> - InitialEvents = [ - [{data, <<"initial 1">>}], - [{data, <<"initial 2">>}] - ], + InitialEvents = [#{data => <<"initial 1">>}, #{data => <<"initial 2">>}], {ok, Req, InitialEvents, {}}. handle_info(_, _) -> diff --git a/test/lasse_handler.coverspec b/test/lasse.coverspec similarity index 100% rename from test/lasse_handler.coverspec rename to test/lasse.coverspec diff --git a/test/lasse_client.erl b/test/lasse_client.erl deleted file mode 100644 index adf4b64..0000000 --- a/test/lasse_client.erl +++ /dev/null @@ -1,208 +0,0 @@ -%%% @doc HTTP client for testing using gun. --module(lasse_client). --behavior(gen_fsm). - -%%% Public API --export([ - connect/2, - close/1, - start_get/2, - start_get/3, - start_post/2, - start_post/3, - pop/1 - ]). - -%%% gen_fsm callbacks --export([ - init/1, - handle_event/3, - handle_sync_event/4, - handle_info/3, - terminate/3, - code_change/4 - ]). - -%%% State functions --export([ - open/2, - wait_response/2, - receive_data/2, - receive_chunk/2 - ]). - --type response() :: {response, binary()} | {chunk, binary()}. - --record (state, - { - pid :: pid(), - stream :: reference(), - data = <<"">> :: binary(), - responses = queue:new() :: queue:queue(response()) - }). - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% API functions -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - --spec connect(Host :: string(), Port :: integer()) -> {ok, pid()}. -connect(Host, Port) -> - gen_fsm:start(lasse_client, [Host, Port], []). - --spec close(pid()) -> ok. -close(Pid) -> - gen_fsm:send_all_state_event(Pid, 'shutdown'), - ok. - --spec start_get(Pid :: pid(), Url :: string()) -> ok. -start_get(Pid, Url) -> - start_get(Pid, Url, []), - ok. - --type headers() :: [{binary(), binary()}]. - --spec start_get(Pid :: pid(), Url :: string(), Headers :: headers()) -> ok. -start_get(Pid, Url, Headers) -> - gen_fsm:send_event(Pid, {get, Url, Headers}). - --spec start_post(Pid :: pid(), Url :: string()) -> ok. -start_post(Pid, Url) -> - start_post(Pid, Url, []), - ok. - --spec start_post(Pid :: pid(), Url :: string(), Headers :: headers()) -> ok. -start_post(Pid, Url, Headers) -> - gen_fsm:send_event(Pid, {post, Url, Headers}). - --spec pop(Pid :: pid()) -> {response, binary()} | {chunk, binary()}. -pop(Pid) -> - gen_fsm:sync_send_all_state_event(Pid, get_response). - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% gen_fsm callbacks -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -init([Host, Port]) -> - Opts = [ - {type, tcp}, - {retry, 1}, - {retry_timeout, 1} - ], - {ok, Pid} = gun:open(Host, Port, Opts), - lager:info("gun pid: ~p - fsm pid: ~p", [Pid, self()]), - {ok, open, #state{pid = Pid}}. - -handle_event(shutdown, _StateName, StateData) -> - {stop, normal, StateData}. - -handle_sync_event(get_response, _From, StateName, State) -> - {Reply, Responses} = case queue:out(State#state.responses) of - {{value, Response}, NewQueue} -> - {Response, NewQueue}; - {empty, Queue} -> - {no_data, Queue} - end, - NewState = State#state{responses = Responses}, - {reply, Reply, StateName, NewState}. - -handle_info(Event, StateName, StateData) -> - lager:info("Forwarding info event to state ~p", [StateName]), - ?MODULE:StateName(Event, StateData). - -code_change(_OldVsn, StateName, StateData, _Extra) -> - {ok, StateName, StateData}. - -terminate(_Reason, _StateName, #state{pid = Pid}) -> - gun:shutdown(Pid), - ok. - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% State functions -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -open({get, Url, Headers}, State = #state{pid = Pid}) -> - StreamRef = gun:get(Pid, Url, Headers), - lager:info("Getting ~p, ref ~p", [Url, StreamRef]), - - {next_state, wait_response, State#state{stream = StreamRef}}; -open({post, Url, Headers}, State = #state{pid = Pid}) -> - StreamRef = gun:post(Pid, Url, Headers), - lager:info("Posting ~p, ref ~p", [Url, StreamRef]), - - {next_state, wait_response, State#state{stream = StreamRef}}. - -wait_response({'DOWN', _, _, _, Reason}, _State) -> - exit(Reason); -wait_response({gun_response, Pid, StreamRef, fin, StatusCode, _}, - State = #state{pid = Pid, stream = StreamRef}) -> - lager:info("~p: ~p", [StatusCode, response_fin]), - - Responses = State#state.responses, - NewResponses = queue:in({no_response, StatusCode}, Responses), - - NewState = State#state{responses = NewResponses}, - - {next_state, done, NewState}; -wait_response({gun_response, Pid, StreamRef, nofin, _, Headers}, - State = #state{pid = Pid, stream = StreamRef}) -> - lager:info("~p", [response_nofin]), - StateName = case lists:keyfind(<<"transfer-encoding">>, 1, Headers) of - {<<"transfer-encoding">>, <<"chunked">>} -> - receive_chunk; - _ -> - receive_data - end, - {next_state, StateName, State}; -wait_response(Event, State) -> - lager:info("~p", Event), - lager:info("~p", State), - {stop, unexpected, State}. - - -%% Regular response -receive_data({'DOWN', _, _, _, _Reason}, _State) -> - error(incomplete); -receive_data({gun_data, Pid, StreamRef, nofin, Data}, - State = #state{pid = Pid, stream = StreamRef, data = DataAcc}) -> - NewData = <>, - NewState = State#state{data = NewData}, - - {next_state, receive_data, NewState}; -receive_data({gun_data, Pid, StreamRef, fin, Data}, - State = #state{pid = Pid, stream = StreamRef, data = DataAcc}) -> - NewData = <>, - - Responses = State#state.responses, - NewResponses = queue:in({response, NewData}, Responses), - - NewState = State#state{responses = NewResponses}, - - {next_state, open, NewState}; -receive_data({gun_error, Pid, StreamRef, Reason}, - State = #state{pid = Pid, stream = StreamRef}) -> - lager:error("gun_error: ~p", [Reason]), - {next_state, open, State}. - -%% Chunked data response -receive_chunk({'DOWN', _, _, _, _Reason}, _State) -> - error(incomplete); -receive_chunk({gun_data, Pid, StreamRef, nofin, Data}, - State = #state{pid = Pid, stream = StreamRef}) -> - Responses = State#state.responses, - NewResponses = queue:in({chunk, Data}, Responses), - - NewState = State#state{responses = NewResponses}, - - {next_state, receive_chunk, NewState}; -receive_chunk({gun_data, Pid, StreamRef, fin, Data}, - State = #state{pid = Pid, stream = StreamRef}) -> - Responses = State#state.responses, - NewResponses = queue:in({chunk, Data}, Responses), - - NewState = State#state{responses = NewResponses}, - - {next_state, open, NewState}; -receive_chunk({gun_error, Pid, StreamRef, Reason}, - State = #state{pid = Pid, stream = StreamRef}) -> - lager:error("gun_error: ~p", [Reason]), - {next_state, open, State}. diff --git a/test/lasse_handler_SUITE.erl b/test/lasse_handler_SUITE.erl index ef5d15a..de635c1 100644 --- a/test/lasse_handler_SUITE.erl +++ b/test/lasse_handler_SUITE.erl @@ -19,49 +19,32 @@ do_not_send_data/1, send_post_and_fail/1, check_no_content/1, - send_last_event_id_and_check_response/1, + send_last_event_id/1, cause_chunk_to_fail/1, - shutdown_check_response/1, + shutdown/1, init_without_module_option/1, init_with_module_option/1 ]). --define(current_function(), - element(2, element(2, process_info(self(), current_function)))). - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% Common test functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% @private -spec all() -> [atom()]. all() -> - [ - send_and_receive_two_chunks, - send_and_do_not_receive_anything, - send_and_receive_initial_events, - send_data_and_id, - send_comments_and_data, - do_not_send_data, - send_post_and_fail, - check_no_content, - send_last_event_id_and_check_response, - cause_chunk_to_fail, - shutdown_check_response, - init_without_module_option, - init_with_module_option - ]. + ExcludedFuns = [module_info, init_per_suite, end_per_suite, group], + [F || {F, 1} <- module_info(exports), not lists:member(F, ExcludedFuns)]. -spec init_per_suite(config()) -> config(). init_per_suite(Config) -> - {ok, _Started} = application:ensure_all_started(lasse_server), - + {ok, [_|_]} = application:ensure_all_started(lasse_server), + {ok, [_|_]} = application:ensure_all_started(shotgun), Config. -spec end_per_suite(config()) -> config(). end_per_suite(Config) -> application:stop(lasse_server), - + application:stop(shotgun), Config. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -73,133 +56,107 @@ send_and_receive_two_chunks(_Config) -> % since doing it in init_per_suite and % providing the resulting Pid doesn't work.' Pid = open_conn(), - ProcName = ?current_function(), + ProcName = send_and_receive_two_chunks, get(Pid, ProcName, "/events"), % first chunk lasse_handler:notify(ProcName, send), - check_response(Pid, {chunk, <<"data: notify chunk\n\n">>}), + [#{data := [<<"notify chunk">>]}] = get_events(Pid), % second chunk ProcName ! send, - check_response(Pid, {chunk, <<"data: info chunk\n\n">>}), + [#{data := [<<"info chunk">>]}] = get_events(Pid), lasse_handler:notify(ProcName, stop), - lasse_client:close(Pid). + close_conn(Pid). send_and_do_not_receive_anything(_Config) -> Pid = open_conn(), - ProcName = ?current_function(), + ProcName = send_and_do_not_receive_anything, get(Pid, ProcName, "/events"), % first chunk lasse_handler:notify(ProcName, nosend), - - ok = try - check_response(Pid, {chunk, <<"data: notify chunk\n\n">>}), - fail - catch - error:timeout_while_waiting -> ok - end, + ok = get_no_events(Pid), % second chunk ProcName ! nosend, - ok = try - check_response(Pid, {chunk, <<"data: info chunk\n\n">>}), - fail - catch - error:timeout_while_waiting -> ok - end, + ok = get_no_events(Pid), lasse_handler:notify(ProcName, stop), - lasse_client:close(Pid). + close_conn(Pid). send_and_receive_initial_events(_Config) -> Pid = open_conn(), get(Pid, undefined, "/initial-events"), - Chunk = <<"data: initial 1\n\n", "data: initial 2\n\n">>, - check_response(Pid, {chunk, Chunk}), + [ + #{data := [<<"initial 1">>]}, + #{data := [<<"initial 2">>]} + ] = get_events(Pid), - lasse_client:close(Pid). + close_conn(Pid). send_data_and_id(_Config) -> Pid = open_conn(), - ProcName = ?current_function(), + ProcName = send_data_and_id, get(Pid, ProcName, "/events"), lasse_handler:notify(ProcName, send_id), - - check_response(Pid, {chunk, <<"id: 1\ndata: notify chunk\n\n">>}), + [#{id := <<"1">>, data := [<<"notify chunk">>]}] = get_events(Pid), lasse_handler:notify(ProcName, stop), - lasse_client:close(Pid). + close_conn(Pid). send_comments_and_data(_Config) -> Pid = open_conn(), - ProcName = ?current_function(), + ProcName = send_comments_and_data, get(Pid, ProcName, "/events"), lasse_handler:notify(ProcName, comments), Chunk = << ": Comment 1\n", ": Comment 2\n", - "data: some data\n\n" + "data: some data" >>, - check_response(Pid, {chunk, Chunk}), + [{_, _, Chunk}] = get_raw_events(Pid), lasse_handler:notify(ProcName, stop), - lasse_client:close(Pid). + close_conn(Pid). do_not_send_data(_Config) -> Pid = open_conn(), - ProcName = ?current_function(), + ProcName = do_not_send_data, get(Pid, ProcName, "/events"), lasse_handler:notify(ProcName, no_data), - ok = try - check_response(Pid, {chunk, <<"id: 1\ndata: notify chunk\n\n">>}), - fail - catch - error:timeout_while_waiting -> ok - end, - - lasse_client:close(Pid). + ok = get_no_events(Pid), + close_conn(Pid). send_post_and_fail(_Config) -> Pid = open_conn(), - ProcName = ?current_function(), + ProcName = send_post_and_fail, - post(Pid, ProcName, "/events"), - check_response(Pid, {no_response, 405}), + #{status_code := 405} = post(Pid, ProcName, "/events"), - lasse_client:close(Pid). + close_conn(Pid). check_no_content(_Config) -> Pid = open_conn(), - ProcName = ?current_function(), - ok = try - get(Pid, ProcName, "/no_content"), - fail - catch - error:timeout_while_waiting -> ok - end, + #{status_code := 204} = raw_get(Pid, "/no_content"), - check_response(Pid, {no_response, 204}), + close_conn(Pid). - lasse_client:close(Pid). - -send_last_event_id_and_check_response(_Config) -> +send_last_event_id(_Config) -> Pid = open_conn(), - ProcName = ?current_function(), + ProcName = send_last_event_id, - LastEventId = <<"42">>, - get(Pid, ProcName, "/events", [{<<"last-event-id">>, LastEventId}]), + get(Pid, ProcName, "/events", #{<<"last-event-id">> => <<"42">>}), lasse_handler:notify(ProcName, last_event_id), - check_response(Pid, {chunk, <<"data: ", LastEventId/binary, "\n\n">>}), + [#{data := [<<"42">>]}] = get_events(Pid), - lasse_client:close(Pid). + close_conn(Pid). cause_chunk_to_fail(_Config) -> try @@ -213,13 +170,18 @@ cause_chunk_to_fail(_Config) -> catch meck:unload(cowboy_req) end. -shutdown_check_response(_Config) -> +shutdown(_Config) -> Pid = open_conn(), - ok = lasse_client:start_get(Pid, "/shutdown"), - check_response(Pid, {response, <<"Sorry, shutdown!">>}), + #{status_code := 404, + body := <<"Sorry, shutdown!">>} = raw_get(Pid, "/shutdown"), + + {ok, _Ref} = + shotgun:get(Pid, "/shutdown", #{}, #{async => true, async_mode => sse}), - lasse_client:close(Pid). + get_no_events(Pid), + + close_conn(Pid). init_without_module_option(_Config) -> ok = try @@ -230,7 +192,7 @@ init_without_module_option(_Config) -> throw:module_option_missing -> ok end, ok = try - Opts2 = [{init_args, []}], + Opts2 = #{init_args => []}, lasse_handler:init({}, {}, Opts2), fail catch @@ -248,7 +210,7 @@ init_with_module_option(_Config) -> meck:expect(cowboy_req, chunked_reply, ChunkedReply), meck:expect(cowboy_req, header, fun(_, Req) -> {undefined, Req} end), - Opts = [{module, dummy_handler}], + Opts = #{module => dummy_handler}, {loop, Request, State} = lasse_handler:init({}, {}, Opts) after catch meck:unload(cowboy_req) @@ -258,55 +220,65 @@ init_with_module_option(_Config) -> %%% Auxiliary functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% --spec open_conn() -> pid(). open_conn() -> {ok, Port} = application:get_env(cowboy, http_port), - Host = "localhost", - {ok, Pid} = lasse_client:connect(Host, Port), + {ok, Pid} = shotgun:open("localhost", Port), Pid. +close_conn(Pid) -> shotgun:close(Pid). + get(Pid, Name, Url) -> - get(Pid, Name, Url, []). + get(Pid, Name, Url, #{}). --spec get(Pid :: pid(), Name :: atom(), - Url :: string(), Header :: [{binary(), binary()}]) -> ok. -get(Pid, Name, Url, Headers) -> - NewHeaders = process_name(Headers, Name), - ok = lasse_client:start_get(Pid, Url, NewHeaders), +-spec get(pid(), atom(), string(), map()) -> ok. +get(Pid, Name, Uri, Headers) -> + NewHeaders = Headers#{<<"process-name">> => atom_to_binary(Name, utf8)}, + {ok, _Ref} = + shotgun:get(Pid, Uri, NewHeaders, #{async => true, async_mode => sse}), case Name of undefined -> ok; - _ -> - Fun = fun() -> whereis(Name) =/= undefined end, - wait_for(Fun, 100) + Name -> + true = + ktn_task:wait_for( + fun() -> whereis(Name) =/= undefined end, + true) end. -post(Pid, Name, Url) -> - Headers = process_name([], Name), - ok = lasse_client:start_post(Pid, Url, Headers). - -process_name(Headers, undefined) -> - Headers; -process_name(Headers, Name) -> - [{<<"process-name">>, term_to_binary(Name)}] ++ Headers. - -%% @doc Checks if the function Fun evaluates to true every 10ms until -%% it timeouts. -wait_for(Fun, Timeout) -> - SleepTime = 10, - Retries = Timeout div SleepTime, - wait_for(Fun, SleepTime, Retries). - -wait_for(_Fun, _SleepTime, 0) -> - error(timeout_while_waiting); -wait_for(Fun, SleepTime, Retries) -> - case Fun() of - true -> ok; - _ -> - timer:sleep(SleepTime), - wait_for(Fun, SleepTime, Retries - 1) - end. +raw_get(Pid, Uri) -> + {ok, Response} = shotgun:get(Pid, Uri), + Response. -check_response(Pid, Response) -> - Fun = fun() -> Response =:= lasse_client:pop(Pid) end, - wait_for(Fun, 100). +post(Pid, Name, Url) -> + Headers = #{<<"process-name">> => atom_to_binary(Name, utf8)}, + {ok, Response} = shotgun:post(Pid, Url, Headers, [], #{}), + Response. + +get_events(Pid) -> + ktn_task:wait_for_success( + fun() -> + try + ct:pal("waiting for events at ~p", [self()]), + timer:sleep(100), + Events = shotgun:events(Pid), + ct:pal("Events: ~p", [Events]), + [_|_] = [shotgun:parse_event(Bin) || {_, _, Bin} <- Events] + catch + _:Error -> + ct:pal("Failed: ~p", [Error]), + throw(Error) + end + end). + +get_raw_events(Pid) -> + ktn_task:wait_for_success( + fun() -> + ct:pal("waiting for events at ~p", [self()]), + Events = shotgun:events(Pid), + ct:pal("Events: ~p", [Events]), + [_|_] = Events + end). + +get_no_events(Pid) -> + {error, {timeout, {badmatch, []}}} = get_events(Pid), + ok. diff --git a/test/lasse_xref_SUITE.erl b/test/lasse_xref_SUITE.erl new file mode 100644 index 0000000..a9a0662 --- /dev/null +++ b/test/lasse_xref_SUITE.erl @@ -0,0 +1,21 @@ +-module(lasse_xref_SUITE). +-author('elbrujohalcon@inaka.net'). + +-ignore_xref([all/0]). +-ignore_xref([xref/1]). + +-export([all/0]). +-export([xref/1]). + +-spec all() -> [xref,...]. +all() -> [xref]. + +-spec xref(spts_test_utils:config()) -> {comment, []}. +xref(_Config) -> + Dirs = [filename:absname("../../ebin")], + [] = xref_runner:check(undefined_function_calls, #{dirs => Dirs}), + [] = xref_runner:check(undefined_functions, #{dirs => Dirs}), + [] = xref_runner:check(locals_not_used, #{dirs => Dirs}), + [] = xref_runner:check(deprecated_function_calls, #{dirs => Dirs}), + [] = xref_runner:check(deprecated_functions, #{dirs => Dirs}), + {comment, ""}.