Explorar el Código

Merge pull request #2236 from ferd/epp-analysis

Switch to EPP analysis covering all apps at once when compiling
pull/2245/head
Fred Hebert hace 5 años
cometido por GitHub
padre
commit
be7ad3c737
No se encontró ninguna clave conocida en la base de datos para esta firma ID de clave GPG: 4AEE18F83AFDEB23
Se han modificado 10 ficheros con 1532 adiciones y 153 borrados
  1. +4
    -0
      bootstrap
  2. +1
    -0
      rebar.config
  3. +127
    -37
      src/rebar_compiler.erl
  4. +278
    -88
      src/rebar_compiler_dag.erl
  5. +176
    -0
      src/rebar_compiler_epp.erl
  6. +46
    -4
      src/rebar_compiler_erl.erl
  7. +55
    -24
      src/rebar_prv_compile.erl
  8. +92
    -0
      test/rebar_compile_SUITE.erl
  9. +453
    -0
      test/rebar_compiler_dag_SUITE.erl
  10. +300
    -0
      test/rebar_compiler_epp_SUITE.erl

+ 4
- 0
bootstrap Ver fichero

@ -19,6 +19,9 @@ main(_) ->
%% manages to discover those in _build/prod from previous builds and
%% cause weird failures when compilers get modified between releases.
rm_rf("_build/prod"),
%% The same pattern happens with default/ as well, particularly when
%% developig new things.
rm_rf("_build/default"),
%% We fetch a few deps from hex for boostraping,
%% so we must compile r3_safe_erl_term.xrl which
@ -631,6 +634,7 @@ format_error(AbsSource, Extra, {Mod, Desc}) ->
additional_defines() ->
[{d, D} || {Re, D} <- [{"^[0-9]+", namespaced_types},
{"^18", no_maps_update_with},
{"^R1[4|5]", deprecated_crypto},
{"^2", unicode_str},
{"^(R|1|20)", fun_stacktrace},

+ 1
- 0
rebar.config Ver fichero

@ -30,6 +30,7 @@
{"rebar/priv/templates/*", "_build/default/lib/"}]}.
{erl_opts, [{platform_define, "^(19|2)", rand_only},
{platform_define, "^18", no_maps_update_with},
{platform_define, "^2", unicode_str},
{platform_define, "^(2[1-9])|(20\\\\.3)", filelib_find_source},
{platform_define, "^(R|1|20)", fun_stacktrace},

+ 127
- 37
src/rebar_compiler.erl Ver fichero

@ -1,6 +1,8 @@
-module(rebar_compiler).
-export([compile_all/2,
-export([analyze_all/2,
compile_analyzed/3,
compile_all/2,
clean/2,
needs_compile/3,
@ -15,10 +17,11 @@
-type extension() :: string().
-type out_mappings() :: [{extension(), file:filename()}].
-callback context(rebar_app_info:t()) -> #{src_dirs => [file:dirname()],
include_dirs => [file:dirname()],
src_ext => extension(),
out_mappings => out_mappings()}.
-callback context(rebar_app_info:t()) -> #{src_dirs => [file:dirname()], % mandatory
include_dirs => [file:dirname()], % mandatory
src_ext => extension(), % mandatory
out_mappings => out_mappings(), % mandatory
dependencies_opts => term()}. % optional
-callback needed_files(digraph:graph(), [file:filename()], out_mappings(),
rebar_app_info:t()) ->
{{[file:filename()], term()}, % ErlFirstFiles (erl_opts global priority)
@ -26,69 +29,153 @@
{[file:filename()], [file:filename()]}, % {Sequential, Parallel}
term()}}.
-callback dependencies(file:filename(), file:dirname(), [file:dirname()]) -> [file:filename()].
-callback dependencies(file:filename(), file:dirname(), [file:dirname()], term()) -> [file:filename()].
-callback compile(file:filename(), out_mappings(), rebar_dict(), list()) ->
ok | {ok, [string()]} | {ok, [string()], [string()]}.
-callback clean([file:filename()], rebar_app_info:t()) -> _.
-optional_callbacks([dependencies/4]).
-define(RE_PREFIX, "^(?!\\._)").
-spec compile_all([{module(), digraph:graph()}, ...], rebar_app_info:t()) -> ok
; ([module(), ...], rebar_app_info:t()) -> ok.
compile_all(DAGs, AppInfo) when is_tuple(hd(DAGs)) -> % > 3.13.0
prepare_compiler_env(AppInfo),
lists:foreach(fun({Compiler, G}) ->
run(G, Compiler, AppInfo),
%% TODO: disable default recursivity in extra_src_dirs compiling to
%% prevent compiling sample modules in _SUITE_data/ directories
%% in CT.
ExtraApps = annotate_extras(AppInfo),
[run(G, Compiler, ExtraAppInfo) || ExtraAppInfo <- ExtraApps],
ok
end,
DAGs);
%% @doc analysis by the caller, in order to let an OTP app
%% find and resolve all its dependencies as part of compile_all's new
%% API, which presumes a partial analysis is done ahead of time
-spec analyze_all(DAG, [App, ...]) -> ok when
DAG :: {module(), digraph:graph()},
App :: rebar_app_info:t().
analyze_all({Compiler, G}, Apps) ->
prepare_compiler_env(Compiler, Apps),
%% Analyze apps one by one
%% then cover the include files in the digraph to update them
%% then propagate?
Contexts = gather_contexts(Compiler, Apps),
AppRes = [analyze_app({Compiler, G}, Contexts, AppInfo) || AppInfo <- Apps],
{AppOutPaths, AbsSources} = lists:unzip(AppRes),
SrcExt = maps:get(src_ext, Contexts),
OutExt = maps:get(artifact_exts, Contexts),
rebar_compiler_dag:prune(
G, SrcExt, OutExt, lists:append(AbsSources), AppOutPaths
),
rebar_compiler_dag:populate_deps(G, SrcExt, OutExt),
rebar_compiler_dag:propagate_stamps(G),
AppPaths = [{rebar_app_info:name(AppInfo),
rebar_utils:to_list(rebar_app_info:dir(AppInfo))}
|| AppInfo <- Apps],
AppNames = rebar_compiler_dag:compile_order(G, AppPaths),
{Contexts, sort_apps(AppNames, Apps)}.
gather_contexts(Compiler, Apps) ->
Default = default_ctx(),
Contexts = [{rebar_app_info:name(AppInfo),
maps:merge(Default, Compiler:context(AppInfo))}
|| AppInfo <- Apps],
ContextMap = maps:from_list(Contexts),
%% only support one extension type at once for now
[{_, #{src_ext := SrcExt}} | _] = Contexts,
%% gather multi-app stuff once to avoid recomputing it
ArtifactExts = lists:usort(
[Ext || {_, #{out_mappings := Mappings}} <- Contexts,
{Ext, _Dir} <- Mappings]
),
InDirs = gather_in_dirs(lists:zip(Apps, [Context || {_, Context} <- Contexts])),
ContextMap#{src_ext => SrcExt,
artifact_exts => ArtifactExts,
in_dirs => InDirs}.
gather_in_dirs(AppCtx) ->
gather_in_dirs(AppCtx, []).
gather_in_dirs([], Paths) ->
lists:usort(Paths);
gather_in_dirs([{AppInfo, Ctx} | Rest], Acc) ->
#{include_dirs := InclDirs,
src_dirs := SrcDirs} = Ctx,
BaseDir = rebar_utils:to_list(rebar_app_info:dir(AppInfo)),
AbsIncl = [filename:join(BaseDir, InclDir) || InclDir <- InclDirs],
AbsSrc = [filename:join(BaseDir, SrcDir) || SrcDir <- SrcDirs],
gather_in_dirs(Rest, AbsSrc ++ AbsIncl ++ Acc).
analyze_app({Compiler, G}, Contexts, AppInfo) ->
AppName = rebar_app_info:name(AppInfo),
BaseDir = rebar_utils:to_list(rebar_app_info:dir(AppInfo)),
OutDir = rebar_utils:to_list(rebar_app_info:out_dir(AppInfo)),
BaseOpts = rebar_app_info:opts(AppInfo),
#{src_dirs := SrcDirs,
src_ext := SrcExt,
out_mappings := [{_OutExt, OutPath}|_], % prune one dir for now (compat mode!)
dependencies_opts := DepOpts} = maps:get(AppName, Contexts),
%% Local resources
ArtifactDir = filename:join([OutDir, OutPath]),
AbsSources = find_source_files(BaseDir, SrcExt, SrcDirs, BaseOpts),
%% Multi-app resources
InDirs = maps:get(in_dirs, Contexts),
%% Run the analysis
rebar_compiler_dag:populate_sources(
G, Compiler, InDirs, AbsSources, DepOpts
),
{{BaseDir, ArtifactDir}, AbsSources}.
sort_apps(Names, Apps) ->
NamedApps = [{rebar_app_info:name(App), App} || App <- Apps],
[App || Name <- Names,
{_, App} <- [lists:keyfind(Name, 1, NamedApps)]].
-spec compile_analyzed({module(), digraph:graph()}, rebar_app_info:t(), map()) -> ok.
compile_analyzed({Compiler, G}, AppInfo, Contexts) -> % > 3.13.0
run(G, Compiler, AppInfo, Contexts),
%% Extras are tricky and get their own mini-analysis
ExtraApps = annotate_extras(AppInfo),
[begin
{ExtraCtx, [SortedExtra]} = analyze_all({Compiler, G}, [ExtraAppInfo]),
run(G, Compiler, SortedExtra, ExtraCtx)
end || ExtraAppInfo <- ExtraApps],
ok.
-spec compile_all([module(), ...], rebar_app_info:t()) -> ok.
compile_all(Compilers, AppInfo) -> % =< 3.13.0 interface; plugins use this!
%% Support the old-style API by re-declaring a local DAG for the
%% compile steps needed.
lists:foreach(fun(Compiler) ->
OutDir = rebar_app_info:out_dir(AppInfo),
G = rebar_compiler_dag:init(OutDir, Compiler, undefined, []),
compile_all([{Compiler, G}], AppInfo),
Ctx = analyze_all({Compiler, G}, [AppInfo]),
compile_analyzed({Compiler, G}, AppInfo, Ctx),
rebar_compiler_dag:maybe_store(G, OutDir, Compiler, undefined, []),
rebar_compiler_dag:terminate(G)
end, Compilers).
prepare_compiler_env(AppInfo) ->
EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)),
%% Make sure that outdir is on the path
ok = rebar_file_utils:ensure_dir(EbinDir),
true = code:add_patha(filename:absname(EbinDir)),
prepare_compiler_env(Compiler, Apps) ->
lists:foreach(
fun(AppInfo) ->
EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)),
%% Make sure that outdir is on the path
ok = rebar_file_utils:ensure_dir(EbinDir),
true = code:add_patha(filename:absname(EbinDir))
end,
Apps
),
%% necessary for erlang:function_exported/3 to work as expected
%% called here for clarity as it's required by both opts_changed/2
%% and erl_compiler_opts_set/0 in needed_files
_ = code:ensure_loaded(compile),
_ = code:ensure_loaded(Compiler),
ok.
run(G, CompilerMod, AppInfo) ->
run(G, CompilerMod, AppInfo, Contexts) ->
Name = rebar_app_info:name(AppInfo),
#{src_dirs := SrcDirs,
include_dirs := InclDirs,
src_ext := SrcExt,
out_mappings := Mappings} = CompilerMod:context(AppInfo),
out_mappings := Mappings} = maps:get(Name, Contexts),
BaseDir = rebar_utils:to_list(rebar_app_info:dir(AppInfo)),
EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)),
BaseOpts = rebar_app_info:opts(AppInfo),
AbsInclDirs = [filename:join(BaseDir, InclDir) || InclDir <- InclDirs],
FoundFiles = find_source_files(BaseDir, SrcExt, SrcDirs, BaseOpts),
AbsSrcDirs = [filename:join(BaseDir, SrcDir) || SrcDir <- SrcDirs],
InDirs = lists:usort(AbsInclDirs ++ AbsSrcDirs),
rebar_compiler_dag:prune(G, AbsSrcDirs, EbinDir, FoundFiles),
rebar_compiler_dag:update(G, CompilerMod, InDirs, FoundFiles),
{{FirstFiles, FirstFileOpts},
{RestFiles, Opts}} = CompilerMod:needed_files(G, FoundFiles, Mappings, AppInfo),
@ -253,3 +340,6 @@ add_to_includes(AppInfo, Dirs) ->
NewErlOpts = [{i, Dir} || Dir <- Dirs] ++ List,
NewOpts = rebar_opts:set(Opts, erl_opts, NewErlOpts),
rebar_app_info:opts(AppInfo, NewOpts).
default_ctx() ->
#{dependencies_opts => []}.

+ 278
- 88
src/rebar_compiler_dag.erl Ver fichero

@ -1,7 +1,9 @@
%%% Module handling the directed graph required for the analysis
%%% of all top-level applications by the various compiler plugins.
-module(rebar_compiler_dag).
-export([init/4, prune/4, update/4, maybe_store/5, terminate/1]).
-export([init/4, maybe_store/5, terminate/1]).
-export([prune/5, populate_sources/5, populate_deps/3, propagate_stamps/1,
compile_order/2]).
-include("rebar.hrl").
@ -17,7 +19,7 @@
-record(dag, {vsn = ?DAG_VSN :: pos_integer(),
info = {[], [], []} :: dag_rec()}).
%% You should initialize one DAG per compiler module.
%% @doc You should initialize one DAG per compiler module.
%% `CritMeta' is any contextual information that, if it is found to change,
%% must invalidate the DAG loaded from disk.
-spec init(file:filename_all(), atom(), string() | undefined, critical_meta()) -> dag().
@ -35,64 +37,145 @@ init(Dir, Compiler, Label, CritMeta) ->
end,
G.
-spec prune(dag(), file:filename_all(), file:filename_all(), [file:filename_all()]) -> ok.
prune(G, SrcDirs, EbinDir, Erls) ->
%% A source file may have been renamed or deleted. Remove it from the graph
%% and remove any beam file for that source if it exists.
Vertices = digraph:vertices(G),
SrcParts = [filename:split(SrcDir) || SrcDir <- SrcDirs],
[maybe_rm_beam_and_edge(G, EbinDir, File)
|| File <- lists:sort(Vertices) -- lists:sort(Erls),
filename:extension(File) =:= ".erl",
lists:any(fun(Src) -> lists:prefix(Src, filename:split(File)) end,
SrcParts)],
ok.
%% @doc Clear up inactive (deleted) source files from a given project.
%% The file must be in one of the directories that may contain source files
%% for an OTP application; source files found in the DAG `G' that lie outside
%% of these directories may be used in other circumstances (i.e. options affecting
%% visibility).
%% Prune out files that have no corresponding sources
prune(G, SrcExt, ArtifactExt, Sources, AppPaths) ->
%% Collect source files that may have been removed. These files:
%% * are not in Sources
%% * have SrcExt
%% In the process, prune header files - those don't have ArtifactExt
%% extension - using side effect in is_deleted_source/5.
case [Del || Del <- (digraph:vertices(G) -- Sources),
is_deleted_source(G, Del, filename:extension(Del), SrcExt, ArtifactExt)] of
[] ->
ok; %% short circuit without sorting AppPaths
Deleted ->
prune_source_files(G, SrcExt, ArtifactExt,
lists:sort(AppPaths), lists:sort(Deleted))
end.
is_deleted_source(_G, _F, Extension, Extension, _ArtifactExt) ->
%% source file
true;
is_deleted_source(_G, _F, Extension, _SrcExt, Extension) ->
%% artifact file - skip
false;
is_deleted_source(G, F, _Extension, _SrcExt, _ArtifactExt) ->
%% must be header file
digraph:in_edges(G, F) == [] andalso maybe_rm_vertex(G, F),
false.
%% This can be implemented using smarter trie, but since the
%% whole procedure is rare, don't bother with optimisations.
%% AppDirs & Fs are sorted, and to check if File is outside of
%% App, lists:prefix is checked. When the App with File in it
%% exists, verify file is still there on disk.
prune_source_files(_G, _SrcExt, _ArtifactExt, [], _) ->
ok;
prune_source_files(_G, _SrcExt, _ArtifactExt, _, []) ->
ok;
prune_source_files(G, SrcExt, ArtifactExt, [AppDir | AppTail], Fs) when is_atom(AppDir) ->
%% dirty bit shenanigans
prune_source_files(G, SrcExt, ArtifactExt, AppTail, Fs);
prune_source_files(G, SrcExt, ArtifactExt, [{App, Out} | AppTail] = AppPaths, [File | FTail]) ->
case lists:prefix(App, File) of
true ->
maybe_rm_artifact_and_edge(G, Out, SrcExt, ArtifactExt, File),
prune_source_files(G, SrcExt, ArtifactExt, AppPaths, FTail);
false when App < File ->
prune_source_files(G, SrcExt, ArtifactExt, AppTail, [File|FTail]);
false ->
prune_source_files(G, SrcExt, ArtifactExt, AppPaths, FTail)
end.
%% @doc this function scans all the source files found and looks into
%% all the `InDirs' for deps (other erl or .hrl files) that are related
%% to them (by calling `CompileMod:dependencies()' on them).
%%
%% The trick here is that change detection, done with last_modified stamps,
%% takes place at the same time as the graph propagation (finding deps)
%% themselves. As such, this is a confusing mutually recursive depth-first
%% search function that relies on side-effects and precise order-of-traversal
%% to propagate file changes.
%%
%% To be replaced by a more declarative EPP-based flow.
-spec update(dag(), module(), [file:filename_all()], [file:filename_all()]) -> ok.
update(_, _, _, []) ->
%% all the `InDirs' for deps (other source files, or files that aren't source
%% but still returned by the compiler module) that are related
%% to them.
populate_sources(_G, _Compiler, _InDirs, [], _DepOpts) ->
ok;
update(G, Compiler, InDirs, [Source|Erls]) ->
populate_sources(G, Compiler, InDirs, [Source|Erls], DepOpts) ->
case digraph:vertex(G, Source) of
{_, LastUpdated} ->
case filelib:last_modified(Source) of
0 ->
%% The file doesn't exist anymore,
%% erase it from the graph.
%% All the edges will be erased automatically.
%% The File doesn't exist anymore, delete
%% from the graph.
digraph:del_vertex(G, Source),
mark_dirty(G),
update(G, Compiler, InDirs, Erls);
populate_sources(G, Compiler, InDirs, Erls, DepOpts);
LastModified when LastUpdated < LastModified ->
add_to_dag(G, Compiler, InDirs, Source, LastModified, filename:dirname(Source)),
update(G, Compiler, InDirs, Erls);
_ ->
AltErls = digraph:out_neighbours(G, Source),
%% Deps must be explored before the module itself
update(G, Compiler, InDirs, AltErls),
Modified = is_dirty(G),
MaxModified = update_max_modified_deps(G, Source),
case Modified orelse MaxModified > LastUpdated of
true -> mark_dirty(G);
false -> ok
end,
update(G, Compiler, InDirs, Erls)
digraph:add_vertex(G, Source, LastModified),
prepopulate_deps(G, Compiler, InDirs, Source, DepOpts, old),
mark_dirty(G);
_ -> % unchanged
ok
end;
false ->
add_to_dag(G, Compiler, InDirs, Source, filelib:last_modified(Source), filename:dirname(Source)),
update(G, Compiler, InDirs, Erls)
LastModified = filelib:last_modified(Source),
digraph:add_vertex(G, Source, LastModified),
prepopulate_deps(G, Compiler, InDirs, Source, DepOpts, new),
mark_dirty(G)
end,
populate_sources(G, Compiler, InDirs, Erls, DepOpts).
%% @doc Scan all files in the digraph that are seen as dependencies, but are
%% neither source files nor artifacts (i.e. header files that don't produce
%% artifacts of any kind).
populate_deps(G, SourceExt, ArtifactExts) ->
%% deps are files that are part of the digraph, but couldn't be scanned
%% because they are neither source files (`SourceExt') nor mappings
%% towards build artifacts (`ArtifactExts'); they will therefore never
%% be handled otherwise and need to be re-scanned for accuracy, even
%% if they are not being analyzed (we assume `Compiler:deps' did that
%% in depth already, and improvements should be driven at that level)
IgnoredExts = [SourceExt | ArtifactExts],
Vertices = digraph:vertices(G),
[refresh_dep(G, File)
|| File <- Vertices,
Ext <- [filename:extension(File)],
not lists:member(Ext, IgnoredExts)],
ok.
%% @doc Take the timestamps/diff changes and propagate them from a dep to the
%% parent; given:
%% A 0 -> B 1 -> C 3 -> D 2
%% then we expect to get back:
%% A 3 -> B 3 -> C 3 -> D 2
%% This is going to be safe for the current run of regeneration, but also for the
%% next one; unless any file in the chain has changed, the stamp won't move up
%% and there won't be a reason to recompile.
%% The obvious caveat to this one is that a file changing by restoring an old version
%% won't be picked up, but this weakness already existed in terms of timestamps.
propagate_stamps(G) ->
case is_dirty(G) of
false ->
%% no change, no propagation to make
ok;
true ->
%% we can use a topsort, start at the end of it (files with no deps)
%% and update them all in order. By doing this, each file only needs to check
%% for one level of out-neighbours to set itself to the right appropriate time.
DepSort = lists:reverse(digraph_utils:topsort(G)),
propagate_stamps(G, DepSort)
end.
%% @doc Return the reverse sorting order to get dep-free apps first.
%% -- we would usually not need to consider the non-source files for the order to
%% be complete, but using them doesn't hurt.
compile_order(G, AppDefs) ->
Edges = [{V1,V2} || E <- digraph:edges(G),
{_,V1,V2,_} <- [digraph:edge(G, E)]],
AppPaths = prepare_app_paths(AppDefs),
compile_order(Edges, AppPaths, #{}).
%% @doc Store the DAG on disk if it was dirty
maybe_store(G, Dir, Compiler, Label, CritMeta) ->
case is_dirty(G) of
true ->
@ -103,6 +186,7 @@ maybe_store(G, Dir, Compiler, Label, CritMeta) ->
ok
end.
%% Get rid of the live state for the digraph; leave disk stuff in place.
terminate(G) ->
true = digraph:delete(G).
@ -142,62 +226,168 @@ store_dag(G, File, CritMeta) ->
%% Drop a file from the digraph if it doesn't exist, and if so,
%% delete its related build artifact
maybe_rm_beam_and_edge(G, OutDir, Source) ->
maybe_rm_artifact_and_edge(G, OutDir, SrcExt, Ext, Source) ->
%% This is NOT a double check it is the only check that the source file is actually gone
case filelib:is_regular(Source) of
true ->
%% Actually exists, don't delete
false;
false ->
Target = target_base(OutDir, Source) ++ ".beam",
?DEBUG("Source ~ts is gone, deleting previous beam file if it exists ~ts", [Source, Target]),
Target = target(OutDir, Source, SrcExt, Ext),
?DEBUG("Source ~ts is gone, deleting previous ~ts file if it exists ~ts", [Source, Ext, Target]),
file:delete(Target),
digraph:del_vertex(G, Source),
mark_dirty(G),
true
end.
%% @private Return what should be the base name of an erl file, relocated to the
%% target directory. For example:
%% target_base("ebin/", "src/my_module.erl") -> "ebin/my_module"
target_base(OutDir, Source) ->
filename:join(OutDir, filename:basename(Source, ".erl")).
%% @private a file has been found to change or wasn't part of the DAG before,
%% and must be added, along with all its dependencies.
add_to_dag(G, Compiler, InDirs, Source, LastModified, SourceDir) ->
AbsIncls = Compiler:dependencies(Source, SourceDir, InDirs),
digraph:add_vertex(G, Source, LastModified),
digraph:del_edges(G, digraph:out_edges(G, Source)),
%% Deps must be explored before the module itself
[begin
update(G, Compiler, InDirs, [Incl]),
digraph:add_edge(G, Source, Incl)
end || Incl <- AbsIncls],
mark_dirty(G),
AbsIncls.
%% @private change status propagation: if the dependencies of a file have
%% been updated, mark the last_modified time for that file to be equivalent
%% to its most-recently-changed dependency; that way, nested header file
%% change stamps are propagated to the final module.
%% This is required because at some point the module is compared to its
%% associated .beam file's last-generation stamp to know if it requires
%% rebuilding.
%% The responsibility for this is however diffuse across various modules.
update_max_modified_deps(G, Source) ->
MaxModified = lists:foldl(
fun(File, Acc) ->
maybe_rm_vertex(G, Source) ->
case filelib:is_regular(Source) of
true ->
exists;
false ->
digraph:del_vertex(G, Source),
mark_dirty(G)
end.
%% Add dependencies of a given file to the DAG. If the file is not found yet,
%% mark its timestamp to 0, which means we have no info on it.
%% Source files will be covered at a later point in their own scan, and
%% non-source files are going to be covered by `populate_deps/3'.
prepopulate_deps(G, Compiler, InDirs, Source, DepOpts, Status) ->
SourceDir = filename:dirname(Source),
AbsIncls = case erlang:function_exported(Compiler, dependencies, 4) of
false ->
Compiler:dependencies(Source, SourceDir, InDirs);
true ->
Compiler:dependencies(Source, SourceDir, InDirs, DepOpts)
end,
%% the file hasn't been visited yet; set it to existing, but with
%% a last modified value that's null so it gets updated to something new.
[digraph:add_vertex(G, Src, 0) || Src <- AbsIncls,
digraph:vertex(G, Src) =:= false],
%% drop edges from deps that aren't included!
[digraph:del_edge(G, Edge) || Status == old,
Edge <- digraph:out_edges(G, Source),
{_, _Src, Path, _} <- [digraph:edge(G, Edge)],
not lists:member(Path, AbsIncls)],
%% Add the rest
[digraph:add_edge(G, Source, Incl) || Incl <- AbsIncls],
ok.
%% check that a dep file is up to date
refresh_dep(G, File) ->
{_, LastUpdated} = digraph:vertex(G, File),
case filelib:last_modified(File) of
0 ->
%% Gone! Erase from the graph
digraph:del_vertex(G, File),
mark_dirty(G);
LastModified when LastUpdated < LastModified ->
digraph:add_vertex(G, File, LastModified),
mark_dirty(G);
_ ->
% unchanged
ok
end.
%% Do the actual propagation of all files; the files are expected to be
%% in a topological order such that we don't need to go more than a level
%% deep in what we search.
propagate_stamps(_G, []) ->
ok;
propagate_stamps(G, [File|Files]) ->
Stamps = [element(2, digraph:vertex(G, F))
|| F <- digraph:out_neighbours(G, File)],
case Stamps of
[] ->
ok;
_ ->
Max = lists:max(Stamps),
case digraph:vertex(G, File) of
{_, MaxModified} when MaxModified > Acc -> MaxModified;
_ -> Acc
{_, Smaller} when Smaller < Max ->
digraph:add_vertex(G, File, Max);
_ ->
ok
end
end,
propagate_stamps(G, Files).
%% Do the actual reversal; be aware that only working from the edges
%% may omit files, so we have to add all non-dependant apps manually
%% to make sure we don't drop em. Since they have no deps, they're
%% safer to put first (and compile first)
compile_order([], AppPaths, AppDeps) ->
%% use a digraph so we don't reimplement topsort by hand.
G = digraph:new([acyclic]), % ignore cycles and hope it works
Tups = maps:keys(AppDeps),
{Va,Vb} = lists:unzip(Tups),
[digraph:add_vertex(G, V) || V <- Va],
[digraph:add_vertex(G, V) || V <- Vb],
[digraph:add_edge(G, V1, V2) || {V1, V2} <- Tups],
Sorted = lists:reverse(digraph_utils:topsort(G)),
digraph:delete(G),
Standalone = [Name || {_, Name} <- AppPaths] -- Sorted,
Standalone ++ Sorted;
compile_order([{P1,P2}|T], AppPaths, AppDeps) ->
%% Assume most dependencies are between files of the same app
%% so ask to see if it's the same before doing a deeper check:
case find_app(P1, AppPaths) of
not_found -> % system lib probably! not in the repo
compile_order(T, AppPaths, AppDeps);
{P1App, P1Path} ->
case find_cached_app(P2, {P1App, P1Path}, AppPaths) of
{P2App, _} when P2App =/= P1App ->
compile_order(T, AppPaths, AppDeps#{{P1App,P2App} => true});
_ ->
compile_order(T, AppPaths, AppDeps)
end
end,
0,
[Source | digraph:out_neighbours(G, Source)]
),
digraph:add_vertex(G, Source, MaxModified),
MaxModified.
end.
%% Swap app name with paths in the order, and sort there; this lets us
%% bail out early in a search where a file won't be found.
prepare_app_paths(AppPaths) ->
lists:sort([{filename:split(Path), Name} || {Name, Path} <- AppPaths]).
%% Look for the app to which the path belongs; needed to
%% go from an edge between files in the DAG to building
%% app-related orderings
find_app(Path, AppPaths) ->
find_app_(filename:split(Path), AppPaths).
%% A cached search for the app to which a path belongs;
%% the assumption is that sorted edges and common relationships
%% are going to be between local files within an app most
%% of the time; so we first look for the same path as a
%% prior match to avoid searching _all_ potential candidates.
%% If it doesn't work, go for the normal search.
find_cached_app(Path, {Name, AppPath}, AppPaths) ->
Split = filename:split(Path),
case find_app_(Split, [{AppPath, Name}]) of
not_found -> find_app_(Split, AppPaths);
LastEntry -> LastEntry
end.
%% Do the actual recursive search
find_app_(_Path, []) ->
not_found;
find_app_(Path, [{AppPath, AppName}|Rest]) ->
case lists:prefix(AppPath, Path) of
true ->
{AppName, AppPath};
false when AppPath > Path ->
not_found;
false ->
find_app_(Path, Rest)
end.
%% @private Return what should be the base name of an erl file, relocated to the
%% target directory. For example:
%% target_base("ebin/", "src/my_module.erl", ".erl", ".beam") -> "ebin/my_module.beam"
target(OutDir, Source, SrcExt, Ext) ->
filename:join(OutDir, filename:basename(Source, SrcExt) ++ Ext).
%% Mark the digraph as having been modified, which is required to
%% save its updated form on disk after the compiling run.

+ 176
- 0
src/rebar_compiler_epp.erl Ver fichero

@ -0,0 +1,176 @@
%%% @doc
%%% Analyze erlang-related files and compilation data using EPP, in order to
%%% build complete and accurate DAGs
%%% @end
-module(rebar_compiler_epp).
-export([deps/2, resolve_module/2]).
-include_lib("kernel/include/file.hrl").
%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Basic File Handling %%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Find all Erlang code dependencies for a given file
-spec deps(file:filename_all(), Opts) -> Attributes when
Opts :: [Opt, ...],
Opt :: {includes, [file:filename_all()]}
| {macros, [file:filename_all()]},
%% following are all required, OTP-18 don't like it though
Attributes :: #{include => [file:filename_all()],
missing_include_file => [file:filename_all()],
missing_include_lib => [file:filename_all()],
behaviour => [atom()],
parse_transform => [atom()],
is_behaviour => boolean()}.
deps(File, Opts) ->
{EppOpts, ExtraOpts} = split_opts(Opts),
{ok, Forms} = epp:parse_file(File, EppOpts),
normalize(handle_forms(Forms, default_attrs(), ExtraOpts)).
%% Find the path matching a given erlang module
resolve_module(Mod, Paths) ->
ModStr = atom_to_list(Mod),
try
[throw(P) || P <- Paths, ModStr =:= filename:basename(P, ".erl")],
{error, not_found}
catch
Path -> {ok, Path}
end.
%%%%%%%%%%%%%%%
%%% PRIVATE %%%
%%%%%%%%%%%%%%%
default_attrs() ->
#{include => [],
missing_include_file => [],
missing_include_lib => [],
behaviour => [],
parse_transform => [],
is_behaviour => false}.
normalize(Map) ->
#{include := Incl,
missing_include_file := InclF,
missing_include_lib := InclL,
behaviour := Behaviour,
parse_transform := PTrans} = Map,
Map#{include => lists:usort(Incl),
missing_include_file => lists:usort(InclF),
missing_include_lib => lists:usort(InclL),
behaviour => lists:usort(Behaviour),
parse_transform => lists:usort(PTrans)}.
handle_forms([File|Forms], Map, Opts) ->
lists:foldl(fun(Form, M) -> handle_form(Form, M, Opts) end,
Map, drop_self_file(File, Forms)).
drop_self_file(_, []) ->
[];
drop_self_file({attribute, _, file, {Path,_}} = File,
[{attribute,_, file, {Path,_}} | Rest]) ->
drop_self_file(File, Rest);
drop_self_file(File, [Keep|Rest]) ->
[Keep | drop_self_file(File, Rest)].
%% Included files (both libs and direct includes);
%% There are also references to the module's own file declaration
%% in there, but this is dropped by `drop_self_file/2' and assumed
%% to be gone here.
handle_form({attribute, _Line, file, {Path, Ln}}, Map, Opts) ->
%% Some people think they're funny and they go include attributes
%% like:
%% -file("fake/file.hrl", Ln).
%% Which are expanded to the very clause we have here, which in
%% turn is impossible to distinguish from actual included files
%% once checked through epp. The way we work around that here
%% is to check if the path is absolute, and if so, keep it in since
%% epp has expanded it; otherwise consider it to be a failed include.
%% This is not perfect but we can't do much more without touching the
%% disk and hopefully nobody else in the community has relied on this
%% thing.
case filename:absname(Path) of
Path ->
update_with(include, fun(L) -> [Path|L] end, [Path], Map);
_ -> % argh!
handle_form({error, {Ln, {epp, {include, file, Path}}}}, Map, Opts)
end;
%% Include files that EPP couldn't resolve
handle_form({error, {_Line, epp, {include, file, Name}}}, Map, _Opts) ->
update_with(missing_include_file, fun(L) -> [Name|L] end, [Name], Map);
handle_form({error, {_Line, epp, {include, lib, Path}}}, Map, Opts) ->
%% This file might still exist in the regular paths not in
%% code:lib_dir, which depend on options we pass to this module.
%% recursively seek it, and add it to the paths to expand here.
case find_include_with_opts(Path, Opts) of
{ok, File} ->
%% we can't go and figure out the contents within that include
%% file because we'd need its own compiler opts and app opts
%% to do it safely. Tracking that file is still better
%% than nothing though.
update_with(include, fun(L) -> [File|L] end, [File], Map);
{error, not_found} ->
update_with(missing_include_lib, fun(L) -> [Path|L] end, [Path], Map)
end;
%% Behaviour implementation declaration
handle_form({attribute, _Line, behaviour, Name}, Map, _Opts) ->
update_with(behaviour, fun(L) -> [Name|L] end, [Name], Map);
handle_form({attribute, _Line, behavior, Name}, Map, _Opts) ->
update_with(behaviour, fun(L) -> [Name|L] end, [Name], Map);
%% Extract parse transforms
handle_form({attribute, Line, compile, Attr}, Map, _Opts) when not is_list(Attr) ->
handle_form({attribute, Line, compile, [Attr]}, Map, _Opts);
handle_form({attribute, _Line, compile, Attrs}, Map, _Opts) ->
Mods = [case T of
{_, {M,_}} -> M;
{_, M} -> M
end || T <- proplists:lookup_all(parse_transform, Attrs)],
update_with(parse_transform, fun(L) -> Mods++L end, Mods, Map);
%% Current style behaviour specification declaration
handle_form({attribute, _Line, callback, _}, Map, _Opts) ->
Map#{is_behaviour => true};
%% Old style behaviour specification, both spellings supported
%% The function needs to be exported, but we skip over that logic
%% for now.
handle_form({function, _Line, behaviour_info, 1, _}, Map, _Opts) ->
Map#{is_behaviour => true};
handle_form({function, _Line, behavior_info, 1, _}, Map, _Opts) ->
Map#{is_behaviour => true};
%% Skip the rest
handle_form(_, Map, _Opts) ->
Map.
split_opts(Opts) ->
%% Extra Opts are options we added to palliate to issues we had
%% with resolving include_libs and other things in EPP.
lists:partition(
fun({OptName, _}) ->
not lists:member(OptName, [include_libs, parse_transforms])
end,
Opts
).
find_include_with_opts(Path, Opts) ->
InclPaths = proplists:get_value(include_libs, Opts, []),
find_include_lib(InclPaths, Path).
find_include_lib([], _) ->
{error, not_found};
find_include_lib([H|T], File) ->
Abs = filename:join([H, File]),
case filelib:is_regular(Abs) of
true -> {ok, Abs};
false -> find_include_lib(T, File)
end.
-ifdef(no_maps_update_with).
update_with(Key, Fun, Default, Map) ->
case Map of
#{Key := Value} -> Map#{Key := Fun(Value)};
_ -> Map#{Key => Default}
end.
-else.
update_with(Key, Fun, Default, Map) ->
maps:update_with(Key, Fun, Default, Map).
-endif.

+ 46
- 4
src/rebar_compiler_erl.erl Ver fichero

@ -4,7 +4,7 @@
-export([context/1,
needed_files/4,
dependencies/3,
dependencies/3, dependencies/4,
compile/4,
clean/2,
format_error/1]).
@ -26,11 +26,20 @@ context(AppInfo) ->
ErlOpts = rebar_opts:erl_opts(RebarOpts),
ErlOptIncludes = proplists:get_all_values(i, ErlOpts),
InclDirs = lists:map(fun(Incl) -> filename:absname(Incl) end, ErlOptIncludes),
AbsIncl = [filename:join([OutDir, "include"]) | InclDirs],
PTrans = proplists:get_all_values(parse_transform, ErlOpts),
Macros = [case Tup of
{d,Name} -> Name;
{d,Name,Val} -> {Name,Val}
end || Tup <- ErlOpts,
is_tuple(Tup) andalso element(1,Tup) == d],
#{src_dirs => ExistingSrcDirs,
include_dirs => [filename:join([OutDir, "include"]) | InclDirs],
include_dirs => AbsIncl,
src_ext => ".erl",
out_mappings => Mappings}.
out_mappings => Mappings,
dependencies_opts => [{includes, AbsIncl}, {macros, Macros},
{parse_transforms, PTrans}]}.
needed_files(Graph, FoundFiles, _, AppInfo) ->
@ -86,6 +95,32 @@ dependencies(Source, SourceDir, Dirs) ->
throw(?PRV_ERROR({cannot_read_file, Source, file:format_error(Reason)}))
end.
dependencies(Source, _SourceDir, Dirs, DepOpts) ->
OptPTrans = proplists:get_value(parse_transforms, DepOpts, []),
try rebar_compiler_epp:deps(Source, DepOpts) of
#{include := AbsIncls,
missing_include_file := _MissIncl,
missing_include_lib := _MissInclLib,
parse_transform := PTrans,
behaviour := Behaviours} ->
%% TODO: check for core transforms?
{_MissIncl, _MissInclLib} =/= {[],[]} andalso
?DEBUG("Missing: ~p", [{_MissIncl, _MissInclLib}]),
expand_file_names([module_to_erl(Mod) || Mod <- OptPTrans ++ PTrans], Dirs) ++
expand_file_names([module_to_erl(Mod) || Mod <- Behaviours], Dirs) ++
AbsIncls
catch
error:{badmatch, {error, Reason}} ->
case file:format_error(Reason) of
"unknown POSIX error" ->
throw(?PRV_ERROR({cannot_read_file, Source, Reason}));
ReadableReason ->
throw(?PRV_ERROR({cannot_read_file, Source, ReadableReason}))
end;
error:Reason ->
throw(?PRV_ERROR({cannot_read_file, Source, Reason}))
end.
compile(Source, [{_, OutDir}], Config, ErlOpts) ->
case compile:file(Source, [{outdir, OutDir} | ErlOpts]) of
{ok, _Mod} ->
@ -329,7 +364,14 @@ expand_file_names(Files, Dirs) ->
true ->
[Incl];
false ->
rebar_utils:find_files_in_dirs(Dirs, [$^, Incl, $$], true)
Res = rebar_utils:find_files_in_dirs(Dirs, [$^, Incl, $$], true),
case Res of
[] ->
?DEBUG("FILE ~p NOT FOUND", [Incl]),
[];
_ ->
Res
end
end
end, Files).

+ 55
- 24
src/rebar_prv_compile.erl Ver fichero

@ -169,9 +169,8 @@ run_compilers(State, _Providers, Apps, Tag) ->
CritMeta = [], % used to be incldirs per app
DAGs = [{Mod, rebar_compiler_dag:init(Dir, Mod, DAGLabel, CritMeta)}
|| Mod <- rebar_state:compilers(State)],
rebar_paths:set_paths([deps], State),
%% Compile all the apps
[build_app(DAGs, AppInfo, State) || AppInfo <- Apps],
build_apps(DAGs, Apps, State),
%% Potentially store shared compiler DAGs so next runs can easily
%% share the base information for easy re-scans.
lists:foreach(fun({Mod, G}) ->
@ -260,31 +259,63 @@ extra_virtual_apps(State, VApp0, [Dir|Dirs]) ->
%% Internal functions
%% ===================================================================
build_app(DAGs, AppInfo, State) ->
build_apps(DAGs, Apps, State) ->
{Rebar3, Custom} = lists:partition(
fun(AppInfo) ->
Type = rebar_app_info:project_type(AppInfo),
Type =:= rebar3 orelse Type =:= undefined
end,
Apps
),
[build_custom_builder_app(AppInfo, State) || AppInfo <- Custom],
build_rebar3_apps(DAGs, Rebar3, State).
build_custom_builder_app(AppInfo, State) ->
?INFO("Compiling ~ts", [rebar_app_info:name(AppInfo)]),
case rebar_app_info:project_type(AppInfo) of
Type when Type =:= rebar3 ; Type =:= undefined ->
%% assume the deps paths are already set by the caller (run_compilers/3)
%% and shared for multiple apps to save work.
rebar_compiler:compile_all(DAGs, AppInfo);
Type ->
ProjectBuilders = rebar_state:project_builders(State),
case lists:keyfind(Type, 1, ProjectBuilders) of
{_, Module} ->
%% load plugins since thats where project builders would be,
%% prevents parallelism at this level.
rebar_paths:set_paths([deps, plugins], State),
Res = Module:build(AppInfo),
rebar_paths:set_paths([deps], State),
case Res of
ok -> ok;
{error, Reason} -> throw({error, {Module, Reason}})
end;
_ ->
throw(?PRV_ERROR({unknown_project_type, rebar_app_info:name(AppInfo), Type}))
end
Type = rebar_app_info:project_type(AppInfo),
ProjectBuilders = rebar_state:project_builders(State),
case lists:keyfind(Type, 1, ProjectBuilders) of
{_, Module} ->
%% load plugins since thats where project builders would be,
%% prevents parallelism at this level.
rebar_paths:set_paths([deps, plugins], State),
Res = Module:build(AppInfo),
rebar_paths:set_paths([deps], State),
case Res of
ok -> ok;
{error, Reason} -> throw({error, {Module, Reason}})
end;
_ ->
throw(?PRV_ERROR({unknown_project_type, rebar_app_info:name(AppInfo), Type}))
end.
build_rebar3_apps(DAGs, Apps, _State) when DAGs =:= []; Apps =:= [] ->
%% No apps to actually build, probably just other compile phases
%% to run for non-rebar3 apps, someone wanting .app files built,
%% or just needing the hooks to run maybe.
ok;
build_rebar3_apps(DAGs, Apps, State) ->
rebar_paths:set_paths([deps], State),
%% To maintain output order, we need to mention each app being compiled
%% in order, even if the order isn't really there anymore due to each
%% compiler being run in broken sequence. The last compiler tends to be
%% the big ERLC one so we use the last compiler for the output.
LastDAG = lists:last(DAGs),
%% we actually need to compile each DAG one after the other to prevent
%% issues where a .yrl file that generates a .erl file gets to be seen.
[begin
{Ctx, ReorderedApps} = rebar_compiler:analyze_all(DAG, Apps),
lists:foreach(
fun(AppInfo) ->
DAG =:= LastDAG andalso
?INFO("Compiling ~ts", [rebar_app_info:name(AppInfo)]),
rebar_compiler:compile_analyzed(DAG, AppInfo, Ctx)
end,
ReorderedApps
)
end || DAG <- DAGs],
ok.
update_code_paths(State, ProjectApps) ->
ProjAppsPaths = paths_for_apps(ProjectApps),
ExtrasPaths = paths_for_extras(State, ProjectApps),

+ 92
- 0
test/rebar_compile_SUITE.erl Ver fichero

@ -18,6 +18,8 @@ all() ->
recompile_when_hrl_changes, recompile_when_included_hrl_changes,
recompile_extra_when_hrl_in_src_changes,
recompile_when_opts_included_hrl_changes,
recompile_when_foreign_included_hrl_changes,
recompile_when_foreign_behaviour_changes,
recompile_when_opts_change,
dont_recompile_when_opts_dont_change, dont_recompile_yrl_or_xrl,
delete_beam_if_source_deleted,
@ -801,6 +803,96 @@ recompile_when_opts_included_hrl_changes(Config) ->
?assert(ModTime =/= NewModTime).
recompile_when_foreign_included_hrl_changes(Config) ->
AppDir = ?config(apps, Config),
AppsDir = filename:join([AppDir, "apps"]),
Name1 = rebar_test_utils:create_random_name("app1_"),
Name2 = rebar_test_utils:create_random_name("app2_"),
Vsn = rebar_test_utils:create_random_vsn(),
rebar_test_utils:create_app(filename:join(AppsDir, Name1),
Name1, Vsn, [kernel, stdlib]),
rebar_test_utils:create_app(filename:join(AppsDir, Name2),
Name2, Vsn, [kernel, stdlib]),
ExtraSrc = [<<"-module(test_header_include).\n"
"-export([main/0]).\n"
"-include_lib(\"">>, Name2, <<"/include/test_header_include.hrl\").\n"
"main() -> ?SOME_DEFINE.\n">>],
ExtraHeader = <<"-define(SOME_DEFINE, true).\n">>,
ok = filelib:ensure_dir(filename:join([AppsDir, Name1, "src", "dummy"])),
ok = filelib:ensure_dir(filename:join([AppsDir, Name2, "include", "dummy"])),
HeaderFile = filename:join([AppsDir, Name2, "include", "test_header_include.hrl"]),
ok = file:write_file(filename:join([AppsDir, Name1, "src", "test_header_include.erl"]), ExtraSrc),
ok = file:write_file(HeaderFile, ExtraHeader),
rebar_test_utils:run_and_check(Config, [], ["compile"], {ok, [{app, Name1}]}),
EbinDir = filename:join([AppDir, "_build", "default", "lib", Name1, "ebin"]),
{ok, Files} = rebar_utils:list_dir(EbinDir),
ModTime = [filelib:last_modified(filename:join([EbinDir, F]))
|| F <- Files, filename:extension(F) == ".beam"],
timer:sleep(1000),
NewExtraHeader = <<"-define(SOME_DEFINE, false).\n">>,
ok = file:write_file(HeaderFile, NewExtraHeader),
rebar_test_utils:run_and_check(Config, [], ["compile"], {ok, [{app, Name1}]}),
{ok, NewFiles} = rebar_utils:list_dir(EbinDir),
NewModTime = [filelib:last_modified(filename:join([EbinDir, F]))
|| F <- NewFiles, filename:extension(F) == ".beam"],
?assert(ModTime =/= NewModTime).
recompile_when_foreign_behaviour_changes(Config) ->
AppDir = ?config(apps, Config),
AppsDir = filename:join([AppDir, "apps"]),
Name1 = rebar_test_utils:create_random_name("app1_"),
Name2 = rebar_test_utils:create_random_name("app2_"),
Vsn = rebar_test_utils:create_random_vsn(),
rebar_test_utils:create_app(filename:join(AppsDir, Name1),
Name1, Vsn, [kernel, stdlib]),
rebar_test_utils:create_app(filename:join(AppsDir, Name2),
Name2, Vsn, [kernel, stdlib]),
ExtraSrc = <<"-module(test_behaviour_include).\n"
"-export([main/0]).\n"
"-behaviour(app2_behaviour).\n"
"main() -> 1.\n">>,
Behaviour = <<"-module(app2_behaviour).\n"
"-callback main() -> term().\n">>,
ok = filelib:ensure_dir(filename:join([AppsDir, Name1, "src", "dummy"])),
ok = filelib:ensure_dir(filename:join([AppsDir, Name2, "src", "dummy"])),
BehaviourFile = filename:join([AppsDir, Name2, "src", "app2_behaviour.erl"]),
ok = file:write_file(filename:join([AppsDir, Name1, "src", "test_behaviour_include.erl"]), ExtraSrc),
ok = file:write_file(BehaviourFile, Behaviour),
rebar_test_utils:run_and_check(Config, [], ["compile"], {ok, [{app, Name1}]}),
EbinDir = filename:join([AppDir, "_build", "default", "lib", Name1, "ebin"]),
{ok, Files} = rebar_utils:list_dir(EbinDir),
ModTime = [filelib:last_modified(filename:join([EbinDir, F]))
|| F <- Files, filename:extension(F) == ".beam"],
timer:sleep(1000),
NewBehaviour = <<"-module(app2_behaviour).\n"
"-callback main(_) -> term().\n">>,
ok = file:write_file(BehaviourFile, NewBehaviour),
rebar_test_utils:run_and_check(Config, [], ["compile"], {ok, [{app, Name1}]}),
{ok, NewFiles} = rebar_utils:list_dir(EbinDir),
NewModTime = [filelib:last_modified(filename:join([EbinDir, F]))
|| F <- NewFiles, filename:extension(F) == ".beam"],
?assert(ModTime =/= NewModTime).
recompile_when_opts_change(Config) ->
AppDir = ?config(apps, Config),

+ 453
- 0
test/rebar_compiler_dag_SUITE.erl Ver fichero

@ -0,0 +1,453 @@
-module(rebar_compiler_dag_SUITE).
-compile([export_all, nowarn_export_all]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("kernel/include/file.hrl").
all() ->
[{group, with_project}].
groups() ->
%% The tests in this group are dirty, the order is specific
%% and required across runs for tests to work.
[{with_project, [sequence], [
find_structure, app_sort,
propagate_include_app1a, propagate_include_app1b,
propagate_include_app2, propagate_behaviour,
propagate_app1_ptrans, propagate_app2_ptrans,
propagate_app2_ptrans_hrl
]}
].
init_per_suite(Config) ->
rebar_compiler_erl:module_info(), % ensure it is loaded
Config.
end_per_suite(Config) ->
Config.
init_per_group(with_project, Config) ->
NewConfig = rebar_test_utils:init_rebar_state(Config, "apps"),
AppDir = ?config(apps, NewConfig),
Name1 = rebar_test_utils:create_random_name("app1_"),
Vsn1 = rebar_test_utils:create_random_vsn(),
rebar_test_utils:create_app(filename:join([AppDir,"apps",Name1]), Name1, Vsn1, [kernel, stdlib]),
Name2 = rebar_test_utils:create_random_name("app2_"),
Vsn2 = rebar_test_utils:create_random_vsn(),
rebar_test_utils:create_app(filename:join([AppDir,"apps",Name2]), Name2, Vsn2, [kernel, stdlib]),
Name3 = rebar_test_utils:create_random_name("app3_"),
Vsn3 = rebar_test_utils:create_random_vsn(),
rebar_test_utils:create_app(filename:join([AppDir,"apps",Name3]), Name3, Vsn3, [kernel, stdlib]),
apply_project(AppDir, [{app1, Name1}, {app2, Name2}, {app3, Name3}],
project()),
[{app_names, [Name1, Name2, Name3]},
{vsns, [Vsn1, Vsn2, Vsn3]}
| NewConfig];
init_per_group(_, Config) ->
Config.
end_per_group(_, Config) ->
Config.
project() ->
[{app1, [
{"src/app1.erl",
"-module(app1).\n"
"-include(\"app1_a.hrl\").\n"
"-include(\"app1_b.hrl\").\n"
"-include_lib(\"{{app2}}/include/app2.hrl\").\n"
"-compile({parse_transform, app1_trans}).\n"
"-compile({parse_transform, {app3, []}}).\n"
"-behaviour(app2).\n"
"-export([cb/0]).\n"
"cb() -> {?APP1A, ?APP1B, ?APP2}.\n"},
{"src/app1_trans.erl",
"-module(app1_trans).n"
"-export([parse_transform/2]).\n"
"parse_transform(Forms, _Opts) -> Forms.\n"},
{"src/app1_a.hrl",
"-define(APP1A, 1).\n"},
{"include/app1_b.hrl",
"-define(APP1B, 1).\n"}
]},
{app2, [
{"src/app2.erl",
"-module(app2).\n"
"-callback cb() -> term().\n"},
{"include/app2.hrl",
"-include(\"app2_resolve.hrl\").\n"
"-define(APP2, 1).\n"},
{"src/app2_resolve.hrl",
"this file should be found but never is"},
{"include/never_found.hrl",
"%% just comments"}
]},
{app3, [
{"src/app3.erl",
"-module(app3).\n"
"-include_lib(\"{{app2}}/include/app2.hrl\").\n"
"-include(\"app3_resolve.hrl\").\n"
"-export([parse_transform/2]).\n"
"parse_transform(Forms, _Opts) -> Forms.\n"},
{"src/app3_resolve.hrl",
"%% this file should be found"}
]}
].
find_structure() ->
[{doc, "ensure a proper digraph is built with all files"}].
find_structure(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
Edges = [{V1,V2} || E <- digraph:edges(G),
{_,V1,V2,_} <- [digraph:edge(G, E)]],
%% All timestamps are the same since we just created the thing
{_, Stamp} = hd(FileStamps),
Matches = [
{"/src/app1.erl", Stamp},
{"/src/app1_trans.erl", Stamp},
{"/src/app1_a.hrl", Stamp},
{"/include/app1_b.hrl", Stamp},
{"/src/app2.erl", Stamp},
{"/include/app2.hrl", Stamp},
{"/include/app2.hrl", Stamp},
{"/src/app3.erl", Stamp},
{"/src/app3_resolve.hrl", Stamp}
],
matches(Matches, FileStamps),
?assertEqual(undefined, find_match(".*/never_found.hrl", FileStamps)),
?assertEqual(undefined, find_match(".*/app2_resolve.hrl", FileStamps)),
ct:pal("Edges: ~p", [Edges]),
edges([
{"/src/app1.erl", "/src/app1_a.hrl"},
{"/src/app1.erl", "/include/app1_b.hrl"},
{"/src/app1.erl", "/src/app2.erl"},
{"/src/app1.erl", "/include/app2.hrl"},
{"/src/app1.erl", "/src/app1_trans.erl"},
{"/src/app1.erl", "/src/app3.erl"},
{"/src/app3.erl", "/include/app2.hrl"},
{"/src/app3.erl", "/src/app3_resolve.hrl"}
], Edges, FileStamps),
ok.
app_sort() ->
[{doc, "once the digraph is complete, we can sort apps by dependency order"}].
app_sort(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
analyze_apps(G, AppNames, AppDir),
AppPaths = [
{AppName, filename:join([AppDir, "apps", AppName])} || AppName <- AppNames
],
?assertEqual([lists:nth(2, AppNames),
lists:nth(3, AppNames),
lists:nth(1, AppNames)],
rebar_compiler_dag:compile_order(G, AppPaths)),
ok.
propagate_include_app1a() ->
[{doc, "changing the app1a header file propagates to its dependents"}].
propagate_include_app1a(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
next_second(),
F = filename:join([AppDir, "apps", lists:nth(1, AppNames), "src/app1_a.hrl"]),
bump_file(F),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
%% All timestamps are the same since we just created the thing
[Stamp1, Stamp2] = lists:usort([S || {_, S} <- FileStamps]),
Matches = [
{"/src/app1.erl", Stamp2},
{"/src/app1_trans.erl", Stamp1},
{"/src/app1_a.hrl", Stamp2},
{"/include/app1_b.hrl", Stamp1},
{"/src/app2.erl", Stamp1},
{"/include/app2.hrl", Stamp1},
{"/src/app3.erl", Stamp1},
{"/src/app3_resolve.hrl", Stamp1}
],
matches(Matches, FileStamps),
ok.
propagate_include_app1b() ->
[{doc, "changing the app1b header file propagates to its dependents"}].
propagate_include_app1b(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
next_second(),
F = filename:join([AppDir, "apps", lists:nth(1, AppNames), "include/app1_b.hrl"]),
bump_file(F),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
%% All timestamps are the same since we just created the thing
[Stamp1, Stamp2, Stamp3] = lists:usort([S || {_, S} <- FileStamps]),
Matches = [
{"/src/app1.erl", Stamp3},
{"/src/app1_trans.erl", Stamp1},
{"/src/app1_a.hrl", Stamp2},
{"/include/app1_b.hrl", Stamp3},
{"/src/app2.erl", Stamp1},
{"/include/app2.hrl", Stamp1},
{"/src/app3.erl", Stamp1},
{"/src/app3_resolve.hrl", Stamp1}
],
matches(Matches, FileStamps),
ok.
propagate_include_app2() ->
[{doc, "changing the app2 header file propagates to its dependents"}].
propagate_include_app2(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
next_second(),
F = filename:join([AppDir, "apps", lists:nth(2, AppNames), "include/app2.hrl"]),
bump_file(F),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
%% All timestamps are the same since we just created the thing
[S1, S2, S3, S4] = lists:usort([S || {_, S} <- FileStamps]),
Matches = [
{"/src/app1.erl", S4},
{"/src/app1_trans.erl", S1},
{"/src/app1_a.hrl", S2},
{"/include/app1_b.hrl", S3},
{"/src/app2.erl", S1},
{"/include/app2.hrl", S4},
{"/src/app3.erl", S4},
{"/src/app3_resolve.hrl", S1}
],
matches(Matches, FileStamps),
ok.
propagate_behaviour() ->
[{doc, "changing the behaviour file propagates to its dependents"}].
propagate_behaviour(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
next_second(),
F = filename:join([AppDir, "apps", lists:nth(2, AppNames), "src/app2.erl"]),
bump_file(F),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
%% All timestamps are the same since we just created the thing
[S1, S2, S3, S4, S5] = lists:usort([S || {_, S} <- FileStamps]),
Matches = [
{"/src/app1.erl", S5},
{"/src/app1_trans.erl", S1},
{"/src/app1_a.hrl", S2},
{"/include/app1_b.hrl", S3},
{"/src/app2.erl", S5},
{"/include/app2.hrl", S4},
{"/src/app3.erl", S4},
{"/src/app3_resolve.hrl", S1}
],
matches(Matches, FileStamps),
ok.
propagate_app1_ptrans() ->
[{doc, "changing an app-local parse transform propagates to its dependents"}].
propagate_app1_ptrans(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
next_second(),
F = filename:join([AppDir, "apps", lists:nth(1, AppNames), "src/app1_trans.erl"]),
bump_file(F),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
%% All timestamps are the same since we just created the thing
[S1, S2, S3, S4, S5, S6] = lists:usort([S || {_, S} <- FileStamps]),
Matches = [
{"/src/app1.erl", S6},
{"/src/app1_trans.erl", S6},
{"/src/app1_a.hrl", S2},
{"/include/app1_b.hrl", S3},
{"/src/app2.erl", S5},
{"/include/app2.hrl", S4},
{"/src/app3.erl", S4},
{"/src/app3_resolve.hrl", S1}
],
matches(Matches, FileStamps),
ok.
propagate_app2_ptrans() ->
[{doc, "changing an app-foreign parse transform propagates to its dependents"}].
propagate_app2_ptrans(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
next_second(),
F = filename:join([AppDir, "apps", lists:nth(3, AppNames), "src/app3.erl"]),
bump_file(F),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
%% All timestamps are the same since we just created the thing
[S1, S2, S3, S4, S5, S6, S7] = lists:usort([S || {_, S} <- FileStamps]),
Matches = [
{"/src/app1.erl", S7},
{"/src/app1_trans.erl", S6},
{"/src/app1_a.hrl", S2},
{"/include/app1_b.hrl", S3},
{"/src/app2.erl", S5},
{"/include/app2.hrl", S4},
{"/src/app3.erl", S7},
{"/src/app3_resolve.hrl", S1}
],
matches(Matches, FileStamps),
ok.
propagate_app2_ptrans_hrl() ->
%% the app-foreign ptrans' foreign hrl dep is tested by propagate_include_app2 as well
[{doc, "changing an app-foreign parse transform's local hrl propagates to its dependents"}].
propagate_app2_ptrans_hrl(Config) ->
AppDir = ?config(apps, Config),
AppNames = ?config(app_names, Config),
%% assume an empty graph
G = digraph:new([acyclic]),
next_second(),
F = filename:join([AppDir, "apps", lists:nth(3, AppNames), "src/app3_resolve.hrl"]),
bump_file(F),
analyze_apps(G, AppNames, AppDir),
FileStamps = [digraph:vertex(G, V) || V <- digraph:vertices(G)],
%% All timestamps are the same since we just created the thing
%% S1 and S7 are gone from the propagation now
[S2, S3, S4, S5, S6, S8] = lists:usort([S || {_, S} <- FileStamps]),
Matches = [
{"/src/app1.erl", S8},
{"/src/app1_trans.erl", S6},
{"/src/app1_a.hrl", S2},
{"/include/app1_b.hrl", S3},
{"/src/app2.erl", S5},
{"/include/app2.hrl", S4},
{"/src/app3.erl", S8},
{"/src/app3_resolve.hrl", S8}
],
matches(Matches, FileStamps),
ok.
%%%%%%%%%%%%%%%
%%% HELPERS %%%
%%%%%%%%%%%%%%%
apply_project(_BaseDir, _Names, []) ->
ok;
apply_project(BaseDir, Names, [{_AppName, []}|Rest]) ->
apply_project(BaseDir, Names, Rest);
apply_project(BaseDir, Names, [{AppName, [File|Files]}|Rest]) ->
apply_file(BaseDir, Names, AppName, File),
apply_project(BaseDir, Names, [{AppName, Files}|Rest]).
apply_file(BaseDir, Names, App, {FileName, Contents}) ->
AppName = proplists:get_value(App, Names),
FilePath = filename:join([BaseDir, "apps", AppName, FileName]),
ok = filelib:ensure_dir(FilePath),
file:write_file(FilePath, apply_template(Contents, Names)).
apply_template("", _) -> "";
apply_template("{{" ++ Text, Names) ->
{Var, Rest} = parse_to_var(Text),
App = list_to_atom(Var),
proplists:get_value(App, Names) ++ apply_template(Rest, Names);
apply_template([H|T], Names) ->
[H|apply_template(T, Names)].
parse_to_var(Str) -> parse_to_var(Str, []).
parse_to_var("}}"++Rest, Acc) ->
{lists:reverse(Acc), Rest};
parse_to_var([H|T], Acc) ->
parse_to_var(T, [H|Acc]).
analyze_apps(G, AppNames, AppDir) ->
populate_app(G, lists:nth(1, AppNames), AppNames, AppDir, ["app1.erl", "app1_trans.erl"]),
populate_app(G, lists:nth(2, AppNames), AppNames, AppDir, ["app2.erl"]),
populate_app(G, lists:nth(3, AppNames), AppNames, AppDir, ["app3.erl"]),
rebar_compiler_dag:populate_deps(G, ".erl", [{".beam", "ebin/"}]),
rebar_compiler_dag:propagate_stamps(G),
%% manually clear the dirty bit for ease of validation
digraph:del_vertex(G, '$r3_dirty_bit').
populate_app(G, Name, AppNames, AppDir, Sources) ->
InDirs = [filename:join([AppDir, "apps", AppName, "src"])
|| AppName <- AppNames]
++ [filename:join([AppDir, "apps", AppName, "include"])
|| AppName <- AppNames],
AbsSources = [filename:join([AppDir, "apps", Name, "src", Src])
|| Src <- Sources],
DepOpts = [{includes,
[filename:join([AppDir, "apps", Name, "src"]),
filename:join([AppDir, "apps", Name, "include"])
]},
{include_libs, [filename:join([AppDir, "apps"])]}
],
rebar_compiler_dag:populate_sources(
G, rebar_compiler_erl,
InDirs, AbsSources, DepOpts
).
find_match(Regex, FileStamps) ->
try
[throw(F) || {F, _} <- FileStamps, re:run(F, Regex) =/= nomatch],
undefined
catch
throw:F -> {ok, F}
end.
matches([], _) ->
ok;
matches([{R, Stamp} | T], FileStamps) ->
case find_match(R, FileStamps) of
{ok, F} ->
?assertEqual(Stamp, proplists:get_value(F, FileStamps)),
matches(T, FileStamps);
undefined ->
?assertEqual({R, Stamp}, FileStamps)
end.
edges([], _, _) ->
ok;
edges([{A,B}|T], Edges, Files) ->
{ok, AbsA} = find_match(A, Files),
{ok, AbsB} = find_match(B, Files),
?assert(lists:member({AbsA, AbsB}, Edges)),
edges(T, Edges, Files).
bump_file(F) ->
{ok, Bin} = file:read_file(F),
file:write_file(F, [Bin, "\n"]).
next_second() ->
%% Sleep until the next second. Rather than just doing a
%% sleep(1000) call, sleep for the amount of time required
%% to reach the next second as seen by the OS; this can save us
%% a few hundred milliseconds per test by triggering shorter delays.
{Mega, Sec, Micro} = os:timestamp(),
Now = (Mega*1000000 + Sec)*1000 + round(Micro/1000),
Ms = (trunc(Now / 1000)*1000 + 1000) - Now,
%% add a 50ms for jitter since the exact amount sometimes causes failures
timer:sleep(max(Ms+50, 1000)).

+ 300
- 0
test/rebar_compiler_epp_SUITE.erl Ver fichero

@ -0,0 +1,300 @@
%%% @doc
%%% Unit tests for epp-related compiler utils.
%%% Make it easier to validate internal behaviour of compiler data and
%%% handling of module parsing without having to actually set up
%%% entire projects.
%%% @end
-module(rebar_compiler_epp_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-compile([export_all, nowarn_export_all]).
all() ->
[{group, module}].
groups() ->
[{module, [], [
analyze, analyze_old_behaviour, analyze_old_behavior,
analyze_empty, analyze_bad_mod,
resolve_module
]}
].
init_per_group(module, Config) ->
to_file(Config, {"direct.hrl", "-direct(val). "}),
Config;
init_per_group(_, Config) ->
Config.
end_per_group(_, Config) ->
Config.
init_per_testcase(_, Config) ->
Config.
end_per_testcase(_, Config) ->
Config.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% module analysis group %%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
analyze() ->
[{docs, "Analyzing a module returns all the "
"parseable dependencies for it in a map."}].
analyze(Config) ->
?assert(check_analyze(
#{include => [
"eunit-[0-9.]+/include/eunit.hrl$",
"stdlib-[0-9.]+/include/assert.hrl$",
"/direct.hrl$"
],
%% missing includes
missing_include_file => [
"^false.hrl$"
],
missing_include_lib => [
"^some_app/include/lib.hrl$"
],
parse_transform => [
erl_id_trans,
eunit_autoexport, % added by include file!
missing_parse_trans1,
missing_parse_trans2
],
behaviour => [gen_server, gen_statem],
is_behaviour => true
},
rebar_compiler_epp:deps(
to_file(Config, fake_mod()),
[{includes, []}, {macros, []}]
)
)),
ok.
analyze_old_behaviour() ->
[{docs, "Analyzing old-style behaviour annotation"}].
analyze_old_behaviour(Config) ->
?assert(check_analyze(
#{include => [],
missing_include_file => [],
missing_include_lib => [],
parse_transform => [],
behaviour => [],
is_behaviour => true
},
rebar_compiler_epp:deps(
to_file(Config, old_behaviour_mod()),
[{includes, []}, {macros, []}]
)
)),
ok.
analyze_old_behavior() ->
[{docs, "Analyzing old-style behavior annotation"}].
analyze_old_behavior(Config) ->
?assert(check_analyze(
#{include => [],
missing_include_file => [],
missing_include_lib => [],
parse_transform => [],
behaviour => [],
is_behaviour => true
},
rebar_compiler_epp:deps(
to_file(Config, old_behavior_mod()),
[{includes, []}, {macros, []}]
)
)),
ok.
analyze_empty() ->
[{docs, "Making sure empty files are properly handled as valid but null "
"and let some other compiler phase handle this. We follow "
"what EPP handles."}].
analyze_empty(Config) ->
?assert(check_analyze(
#{include => [],
missing_include_file => [],
missing_include_lib => [],
parse_transform => [],
behaviour => [],
is_behaviour => false
},
rebar_compiler_epp:deps(
to_file(Config, empty_mod()),
[{includes, []}, {macros, []}]
)
)),
ok.
analyze_bad_mod() ->
[{docs, "Errors for bad modules that don't compile are skipped "
"by EPP and so we defer that to a later phase of the "
"compilation process"}].
analyze_bad_mod(Config) ->
?assert(check_analyze(
#{include => [],
missing_include_file => [],
missing_include_lib => [],
parse_transform => [],
behaviour => [],
is_behaviour => false
},
rebar_compiler_epp:deps(
to_file(Config, bad_mod()),
[{includes, []}, {macros, []}]
)
)),
ok.
resolve_module() ->
[{doc, "given a module name and a bunch of paths, find "
"the first path that matches the module"}].
resolve_module(Config) ->
Path1 = to_file(Config, fake_mod()),
Path2 = to_file(Config, old_behaviour_mod()),
Path3 = to_file(Config, empty_mod()),
?assertEqual(
{ok, Path2},
rebar_compiler_epp:resolve_module(
old_behaviour,
[Path1, Path2, Path3]
)
),
ok.
%%%%%%%%%%%%%%%
%%% HELPERS %%%
%%%%%%%%%%%%%%%
%% check each field of `Map' and validate them against `CheckMap'.
%% This allows to check each value in the map has a matching assertion.
%% Then check each field of `CheckMap' against `Map' to find if
%% any missing value exists.
check_analyze(CheckMap, Map) ->
ct:pal("check_analyze:~n~p~n~p", [CheckMap, Map]),
maps:fold(fun(K,V,Acc) -> check(CheckMap, K, V) and Acc end,
true, Map)
andalso
maps:fold(
fun(K,_,Acc) ->
check(CheckMap, K, maps:get(K, Map, make_ref())) and Acc
end,
true,
Map
).
check(Map, K, V) ->
case maps:is_key(K, Map) of
false -> false;
true ->
#{K := Val} = Map,
compare_val(Val, V)
end.
%% two identical values always works
compare_val(V, V) ->
true;
%% compare lists of strings; each string must be checked individually
%% because they are assumed to be regexes.
compare_val(V1, V2) when is_list(hd(V1)) ->
match_regexes(V1, V2);
compare_val(V1, _V2) when not is_integer(hd(V1)) ->
%% failing list of some sort, but not a string
false;
%% strings as regexes
compare_val(V1, V2) when is_list(V1) ->
match_regex(V1, [V2]) =/= nomatch;
%% anything else is not literally the same and is bad
compare_val(_, _) ->
false.
match_regexes([], List) ->
List == []; % no extra patterns, that would be weird
match_regexes([H|T], List) ->
case match_regex(H, List) of
nomatch ->
false;
{ok, Entry} ->
match_regexes(T, List -- [Entry])
end.
match_regex(_Pattern, []) ->
nomatch;
match_regex(Pattern, [H|T]) ->
case re:run(H, Pattern) of
nomatch -> match_regex(Pattern, T);
_ -> {ok, H}
end.
%% custom zip function that causes value failures (by using make_ref()
%% that will never match in compare_val/2) rather than crashing because
%% of lists of different lengths.
zip([], []) -> [];
zip([], [H|T]) -> [{make_ref(),H} | zip([], T)];
zip([H|T], []) -> [{H,make_ref()} | zip(T, [])];
zip([X|Xs], [Y|Ys]) -> [{X,Y} | zip(Xs, Ys)].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Module specifications %%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% turn a module string to a file that will live in CT's scratch dir
to_file(Config, {Name,Contents}) ->
Path = filename:join([?config(priv_dir, Config), Name]),
file:write_file(Path, Contents, [sync]),
Path.
%% base module with all the interesting includes and attributes
%% we want to track
fake_mod() ->
{"somemod.erl", "
-module(somemod).
-export([f/1]).
-include(\"direct.hrl\").
-include(\"direct.hrl\").
-include_lib(\"some_app/include/lib.hrl\").
-include_lib(\"eunit/include/eunit.hrl\").
-compile({parse_transform, {erl_id_trans, []}}).
-compile({parse_transform, missing_parse_trans1}).
-compile([{parse_transform, {missing_parse_trans2, []}}]).
-behaviour(gen_server).
-behavior(gen_statem).
-callback f() -> ok.
-ifdef(OPT).
-include(\"true.hrl\").
-else.
-include(\"false.hrl\").
-endif.
f(X) -> X.
"}.
%% variations for attributes that can't be checked in the
%% same base module
old_behaviour_mod() ->
{"old_behaviour.erl", "
-module(old_behaviour).
-export([f/1, behaviour_info/1]).
f(X) -> X.
behaviour_info(callbacks) -> [{f,1}].
"}.
old_behavior_mod() ->
{"old_behaviour.erl", "
-module(old_behaviour).
-export([f/1, behaviour_info/1]).
f(X) -> X.
behavior_info(callbacks) -> [{f,1}].
"}.
empty_mod() ->
{"empty.erl", ""}.
bad_mod() ->
{"badmod.erl", "
-module(bad_mod). % wrong name!
f(x) -> X+1. % bad vars
f((x)cv) -> bad syntax.
"}.

Cargando…
Cancelar
Guardar