@ -1,3 +1,4 @@ | |||
{{=@@ @@=}} | |||
[ | |||
{ {{name}}, []} | |||
{@@name@@, []} | |||
]. |
@ -1,24 +1,28 @@ | |||
{"1.1.0", | |||
[{<<"bbmustache">>,{pkg,<<"bbmustache">>,<<"1.5.0">>},0}, | |||
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.0.0">>},0}, | |||
[{<<"bbmustache">>,{pkg,<<"bbmustache">>,<<"1.6.0">>},0}, | |||
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.3.1">>},0}, | |||
{<<"cf">>,{pkg,<<"cf">>,<<"0.2.2">>},0}, | |||
{<<"cth_readable">>,{pkg,<<"cth_readable">>,<<"1.4.2">>},0}, | |||
{<<"erlware_commons">>,{pkg,<<"erlware_commons">>,<<"1.2.0">>},0}, | |||
{<<"erlware_commons">>,{pkg,<<"erlware_commons">>,<<"1.3.0">>},0}, | |||
{<<"eunit_formatters">>,{pkg,<<"eunit_formatters">>,<<"0.5.0">>},0}, | |||
{<<"getopt">>,{pkg,<<"getopt">>,<<"1.0.1">>},0}, | |||
{<<"hex_core">>,{pkg,<<"hex_core">>,<<"0.2.0">>},0}, | |||
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.0">>},0}, | |||
{<<"providers">>,{pkg,<<"providers">>,<<"1.7.0">>},0}, | |||
{<<"relx">>,{pkg,<<"relx">>,<<"3.26.0">>},0}, | |||
{<<"relx">>,{pkg,<<"relx">>,<<"3.27.0">>},0}, | |||
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.3">>},0}]}. | |||
[ | |||
{pkg_hash,[ | |||
{<<"bbmustache">>, <<"8CFDE0602E90A4057E161BF5288ADE854B4E511E2E8924966A8438730E958381">>}, | |||
{<<"certifi">>, <<"A0C0E475107135F76B8C1D5BC7EFB33CD3815CB3CF3DEA7AEFDD174DABEAD064">>}, | |||
{<<"bbmustache">>, <<"7AC372AEC621A69C369DF237FBD9986CAABCDD6341089FE5F42E5A7A4AC706B8">>}, | |||
{<<"certifi">>, <<"D0F424232390BF47D82DA8478022301C561CF6445B5B5FB6A84D49A9E76D2639">>}, | |||
{<<"cf">>, <<"7F2913FFF90ABCABD0F489896CFEB0B0674F6C8DF6C10B17A83175448029896C">>}, | |||
{<<"cth_readable">>, <<"0F57B4EB7DA7F5438F422312245F9143A1B3118C11B6BAE5C3D1391C9EE88322">>}, | |||
{<<"erlware_commons">>, <<"2BAB99CF88941145767A502F1209886F1F0D31695EEF21978A30F15E645721E0">>}, | |||
{<<"erlware_commons">>, <<"1705CF2AB4212EF235C21971A55E22E2A39055C05B9C65C8848126865F42A07A">>}, | |||
{<<"eunit_formatters">>, <<"6A9133943D36A465D804C1C5B6E6839030434B8879C5600D7DDB5B3BAD4CCB59">>}, | |||
{<<"getopt">>, <<"C73A9FA687B217F2FF79F68A3B637711BB1936E712B521D8CE466B29CBF7808A">>}, | |||
{<<"hex_core">>, <<"3A7EACCFB8ADD3FF05D950C10ED5BDB5D0C48C988EBBC5D7AE2A55498F0EFF1B">>}, | |||
{<<"parse_trans">>, <<"09765507A3C7590A784615CFD421D101AEC25098D50B89D7AA1D66646BC571C1">>}, | |||
{<<"providers">>, <<"BBF730563914328EC2511D205E6477A94831DB7297DE313B3872A2B26C562EAB">>}, | |||
{<<"relx">>, <<"DD645ECAA1AB1647DB80D3E9BCAE0B39ED0A536EF37245F6A74B114C6D0F4E87">>}, | |||
{<<"relx">>, <<"96CC7663EDCC02A8117AB0C64FE6D15BE79760C08726ABEAD1DAACE11BFBF75D">>}, | |||
{<<"ssl_verify_fun">>, <<"6C49665D4326E26CD4A5B7BD54AA442B33DADFB7C5D59A0D0CD0BF5534BBFBD7">>}]} | |||
]. |
@ -0,0 +1,303 @@ | |||
-module(rebar_compiler). | |||
-export([compile_all/2, | |||
clean/2, | |||
ok_tuple/2, | |||
error_tuple/4, | |||
maybe_report/1, | |||
format_error_source/2, | |||
report/1]). | |||
-include("rebar.hrl"). | |||
-type extension() :: string(). | |||
-type out_mappings() :: [{extension(), file:filename()}]. | |||
-callback context(rebar_app_info:t()) -> #{src_dirs => [file:dirname()], | |||
include_dirs => [file:dirname()], | |||
src_ext => extension(), | |||
out_mappings => out_mappings()}. | |||
-callback needed_files(digraph:graph(), [file:filename()], rebar_app_info:t()) -> [file:filename()]. | |||
-callback dependencies(file:filename(), file:dirname(), [file:dirname()]) -> [file:filename()]. | |||
-callback compile(file:filename(), out_mappings(), rebar_dict(), list()) -> | |||
ok | {ok, [string()]} | {ok, [string()], [string()]}. | |||
-define(DAG_VSN, 2). | |||
-define(DAG_FILE, "source.dag"). | |||
-type dag_v() :: {digraph:vertex(), term()} | 'false'. | |||
-type dag_e() :: {digraph:vertex(), digraph:vertex()}. | |||
-type dag() :: {list(dag_v()), list(dag_e()), list(string())}. | |||
-record(dag, {vsn = ?DAG_VSN :: pos_integer(), | |||
info = {[], [], []} :: dag()}). | |||
-define(RE_PREFIX, "^(?!\\._)"). | |||
compile_all(Compilers, AppInfo) -> | |||
EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)), | |||
%% Make sure that outdir is on the path | |||
ok = rebar_file_utils:ensure_dir(EbinDir), | |||
true = code:add_patha(filename:absname(EbinDir)), | |||
%% necessary for erlang:function_exported/3 to work as expected | |||
%% called here for clarity as it's required by both opts_changed/2 | |||
%% and erl_compiler_opts_set/0 in needed_files | |||
_ = code:ensure_loaded(compile), | |||
lists:foreach(fun(CompilerMod) -> | |||
run(CompilerMod, AppInfo), | |||
run_on_extra_src_dirs(CompilerMod, AppInfo, fun run/2) | |||
end, Compilers), | |||
ok. | |||
run(CompilerMod, AppInfo) -> | |||
#{src_dirs := SrcDirs, | |||
include_dirs := InclDirs, | |||
src_ext := SrcExt, | |||
out_mappings := Mappings} = CompilerMod:context(AppInfo), | |||
BaseDir = rebar_utils:to_list(rebar_app_info:dir(AppInfo)), | |||
EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)), | |||
BaseOpts = rebar_app_info:opts(AppInfo), | |||
AbsInclDirs = [filename:join(BaseDir, InclDir) || InclDir <- InclDirs], | |||
FoundFiles = find_source_files(BaseDir, SrcExt, SrcDirs, BaseOpts), | |||
OutDir = rebar_app_info:out_dir(AppInfo), | |||
AbsSrcDirs = [filename:join(BaseDir, SrcDir) || SrcDir <- SrcDirs], | |||
G = init_dag(CompilerMod, AbsInclDirs, AbsSrcDirs, FoundFiles, OutDir, EbinDir), | |||
{{FirstFiles, FirstFileOpts}, {RestFiles, Opts}} = CompilerMod:needed_files(G, FoundFiles, AppInfo), | |||
true = digraph:delete(G), | |||
compile_each(FirstFiles, FirstFileOpts, BaseOpts, Mappings, CompilerMod), | |||
compile_each(RestFiles, Opts, BaseOpts, Mappings, CompilerMod). | |||
compile_each([], _Opts, _Config, _Outs, _CompilerMod) -> | |||
ok; | |||
compile_each([Source | Rest], Opts, Config, Outs, CompilerMod) -> | |||
case CompilerMod:compile(Source, Outs, Config, Opts) of | |||
ok -> | |||
?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]); | |||
{ok, Warnings} -> | |||
report(Warnings), | |||
?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]); | |||
skipped -> | |||
?DEBUG("~tsSkipped ~ts", [rebar_utils:indent(1), filename:basename(Source)]); | |||
Error -> | |||
NewSource = format_error_source(Source, Config), | |||
?ERROR("Compiling ~ts failed", [NewSource]), | |||
maybe_report(Error), | |||
?DEBUG("Compilation failed: ~p", [Error]), | |||
?FAIL | |||
end, | |||
compile_each(Rest, Opts, Config, Outs, CompilerMod). | |||
%% @doc remove compiled artifacts from an AppDir. | |||
-spec clean([module()], rebar_app_info:t()) -> 'ok'. | |||
clean(Compilers, AppInfo) -> | |||
lists:foreach(fun(CompilerMod) -> | |||
clean_(CompilerMod, AppInfo), | |||
run_on_extra_src_dirs(CompilerMod, AppInfo, fun clean_/2) | |||
end, Compilers). | |||
clean_(CompilerMod, AppInfo) -> | |||
#{src_dirs := SrcDirs, | |||
src_ext := SrcExt} = CompilerMod:context(AppInfo), | |||
BaseDir = rebar_app_info:dir(AppInfo), | |||
Opts = rebar_app_info:opts(AppInfo), | |||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||
FoundFiles = find_source_files(BaseDir, SrcExt, SrcDirs, Opts), | |||
CompilerMod:clean(FoundFiles, AppInfo), | |||
rebar_file_utils:rm_rf(dag_file(CompilerMod, EbinDir)). | |||
run_on_extra_src_dirs(CompilerMod, AppInfo, Fun) -> | |||
ExtraDirs = rebar_dir:extra_src_dirs(rebar_app_info:opts(AppInfo), []), | |||
run_on_extra_src_dirs(ExtraDirs, CompilerMod, AppInfo, Fun). | |||
run_on_extra_src_dirs([], _CompilerMod, _AppInfo, _Fun) -> | |||
ok; | |||
run_on_extra_src_dirs([Dir | Rest], CompilerMod, AppInfo, Fun) -> | |||
case filelib:is_dir(filename:join(rebar_app_info:dir(AppInfo), Dir)) of | |||
true -> | |||
EbinDir = filename:join(rebar_app_info:out_dir(AppInfo), Dir), | |||
AppInfo1 = rebar_app_info:ebin_dir(AppInfo, EbinDir), | |||
AppInfo2 = rebar_app_info:set(AppInfo1, src_dirs, [Dir]), | |||
AppInfo3 = rebar_app_info:set(AppInfo2, extra_src_dirs, ["src"]), | |||
Fun(CompilerMod, AppInfo3); | |||
_ -> | |||
ok | |||
end, | |||
run_on_extra_src_dirs(Rest, CompilerMod, AppInfo, Fun). | |||
%% These functions are here for the ultimate goal of getting rid of | |||
%% rebar_base_compiler. This can't be done because of existing plugins. | |||
ok_tuple(Source, Ws) -> | |||
rebar_base_compiler:ok_tuple(Source, Ws). | |||
error_tuple(Source, Es, Ws, Opts) -> | |||
rebar_base_compiler:error_tuple(Source, Es, Ws, Opts). | |||
maybe_report(Reportable) -> | |||
rebar_base_compiler:maybe_report(Reportable). | |||
format_error_source(Path, Opts) -> | |||
rebar_base_compiler:format_error_source(Path, Opts). | |||
report(Messages) -> | |||
rebar_base_compiler:report(Messages). | |||
%% private functions | |||
find_source_files(BaseDir, SrcExt, SrcDirs, Opts) -> | |||
SourceExtRe = "^(?!\\._).*\\" ++ SrcExt ++ [$$], | |||
lists:flatmap(fun(SrcDir) -> | |||
Recursive = rebar_dir:recursive(Opts, SrcDir), | |||
rebar_utils:find_files_in_dirs([filename:join(BaseDir, SrcDir)], SourceExtRe, Recursive) | |||
end, SrcDirs). | |||
dag_file(CompilerMod, Dir) -> | |||
filename:join([rebar_dir:local_cache_dir(Dir), CompilerMod, ?DAG_FILE]). | |||
%% private graph functions | |||
%% Get dependency graph of given Erls files and their dependencies (header files, | |||
%% parse transforms, behaviours etc.) located in their directories or given | |||
%% InclDirs. Note that last modification times stored in vertices already respect | |||
%% dependencies induced by given graph G. | |||
init_dag(Compiler, InclDirs, SrcDirs, Erls, Dir, EbinDir) -> | |||
G = digraph:new([acyclic]), | |||
try restore_dag(Compiler, G, InclDirs, Dir) | |||
catch | |||
_:_ -> | |||
?WARN("Failed to restore ~ts file. Discarding it.~n", [dag_file(Compiler, Dir)]), | |||
file:delete(dag_file(Compiler, Dir)) | |||
end, | |||
Dirs = lists:usort(InclDirs ++ SrcDirs), | |||
%% A source file may have been renamed or deleted. Remove it from the graph | |||
%% and remove any beam file for that source if it exists. | |||
Modified = maybe_rm_beams_and_edges(G, EbinDir, Erls), | |||
Modified1 = lists:foldl(update_dag_fun(G, Compiler, Dirs), Modified, Erls), | |||
if Modified1 -> store_dag(Compiler, G, InclDirs, Dir); not Modified1 -> ok end, | |||
G. | |||
maybe_rm_beams_and_edges(G, Dir, Files) -> | |||
Vertices = digraph:vertices(G), | |||
case lists:filter(fun(File) -> | |||
case filename:extension(File) =:= ".erl" of | |||
true -> | |||
maybe_rm_beam_and_edge(G, Dir, File); | |||
false -> | |||
false | |||
end | |||
end, lists:sort(Vertices) -- lists:sort(Files)) of | |||
[] -> | |||
false; | |||
_ -> | |||
true | |||
end. | |||
maybe_rm_beam_and_edge(G, OutDir, Source) -> | |||
%% This is NOT a double check it is the only check that the source file is actually gone | |||
case filelib:is_regular(Source) of | |||
true -> | |||
%% Actually exists, don't delete | |||
false; | |||
false -> | |||
Target = target_base(OutDir, Source) ++ ".beam", | |||
?DEBUG("Source ~ts is gone, deleting previous beam file if it exists ~ts", [Source, Target]), | |||
file:delete(Target), | |||
digraph:del_vertex(G, Source), | |||
true | |||
end. | |||
target_base(OutDir, Source) -> | |||
filename:join(OutDir, filename:basename(Source, ".erl")). | |||
restore_dag(Compiler, G, InclDirs, Dir) -> | |||
case file:read_file(dag_file(Compiler, Dir)) of | |||
{ok, Data} -> | |||
% Since externally passed InclDirs can influence dependency graph (see | |||
% modify_dag), we have to check here that they didn't change. | |||
#dag{vsn=?DAG_VSN, info={Vs, Es, InclDirs}} = | |||
binary_to_term(Data), | |||
lists:foreach( | |||
fun({V, LastUpdated}) -> | |||
digraph:add_vertex(G, V, LastUpdated) | |||
end, Vs), | |||
lists:foreach( | |||
fun({_, V1, V2, _}) -> | |||
digraph:add_edge(G, V1, V2) | |||
end, Es); | |||
{error, _} -> | |||
ok | |||
end. | |||
store_dag(Compiler, G, InclDirs, Dir) -> | |||
Vs = lists:map(fun(V) -> digraph:vertex(G, V) end, digraph:vertices(G)), | |||
Es = lists:map(fun(E) -> digraph:edge(G, E) end, digraph:edges(G)), | |||
File = dag_file(Compiler, Dir), | |||
ok = filelib:ensure_dir(File), | |||
Data = term_to_binary(#dag{info={Vs, Es, InclDirs}}, [{compressed, 2}]), | |||
file:write_file(File, Data). | |||
update_dag(G, Compiler, Dirs, Source) -> | |||
case digraph:vertex(G, Source) of | |||
{_, LastUpdated} -> | |||
case filelib:last_modified(Source) of | |||
0 -> | |||
%% The file doesn't exist anymore, | |||
%% erase it from the graph. | |||
%% All the edges will be erased automatically. | |||
digraph:del_vertex(G, Source), | |||
modified; | |||
LastModified when LastUpdated < LastModified -> | |||
modify_dag(G, Compiler, Source, LastModified, filename:dirname(Source), Dirs); | |||
_ -> | |||
Modified = lists:foldl( | |||
update_dag_fun(G, Compiler, Dirs), | |||
false, digraph:out_neighbours(G, Source)), | |||
MaxModified = update_max_modified_deps(G, Source), | |||
case Modified orelse MaxModified > LastUpdated of | |||
true -> modified; | |||
false -> unmodified | |||
end | |||
end; | |||
false -> | |||
modify_dag(G, Compiler, Source, filelib:last_modified(Source), filename:dirname(Source), Dirs) | |||
end. | |||
modify_dag(G, Compiler, Source, LastModified, SourceDir, Dirs) -> | |||
AbsIncls = Compiler:dependencies(Source, SourceDir, Dirs), | |||
digraph:add_vertex(G, Source, LastModified), | |||
digraph:del_edges(G, digraph:out_edges(G, Source)), | |||
lists:foreach( | |||
fun(Incl) -> | |||
update_dag(G, Compiler, Dirs, Incl), | |||
digraph:add_edge(G, Source, Incl) | |||
end, AbsIncls), | |||
modified. | |||
update_dag_fun(G, Compiler, Dirs) -> | |||
fun(Erl, Modified) -> | |||
case update_dag(G, Compiler, Dirs, Erl) of | |||
modified -> true; | |||
unmodified -> Modified | |||
end | |||
end. | |||
update_max_modified_deps(G, Source) -> | |||
MaxModified = | |||
lists:foldl(fun(File, Acc) -> | |||
case digraph:vertex(G, File) of | |||
{_, MaxModified} when MaxModified > Acc -> | |||
MaxModified; | |||
_ -> | |||
Acc | |||
end | |||
end, 0, [Source | digraph:out_neighbours(G, Source)]), | |||
digraph:add_vertex(G, Source, MaxModified), | |||
MaxModified. |
@ -0,0 +1,368 @@ | |||
-module(rebar_compiler_erl). | |||
-behaviour(rebar_compiler). | |||
-export([context/1, | |||
needed_files/3, | |||
dependencies/3, | |||
compile/4, | |||
clean/2]). | |||
-include("rebar.hrl"). | |||
context(AppInfo) -> | |||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||
Mappings = [{".beam", EbinDir}], | |||
OutDir = rebar_app_info:dir(AppInfo), | |||
SrcDirs = rebar_dir:src_dirs(rebar_app_info:opts(AppInfo), ["src"]), | |||
ExistingSrcDirs = lists:filter(fun(D) -> | |||
ec_file:is_dir(filename:join(OutDir, D)) | |||
end, SrcDirs), | |||
RebarOpts = rebar_app_info:opts(AppInfo), | |||
ErlOpts = rebar_opts:erl_opts(RebarOpts), | |||
ErlOptIncludes = proplists:get_all_values(i, ErlOpts), | |||
InclDirs = lists:map(fun(Incl) -> filename:absname(Incl) end, ErlOptIncludes), | |||
#{src_dirs => ExistingSrcDirs, | |||
include_dirs => [filename:join([OutDir, "include"]) | InclDirs], | |||
src_ext => ".erl", | |||
out_mappings => Mappings}. | |||
needed_files(Graph, FoundFiles, AppInfo) -> | |||
OutDir = rebar_app_info:out_dir(AppInfo), | |||
Dir = rebar_app_info:dir(AppInfo), | |||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||
RebarOpts = rebar_app_info:opts(AppInfo), | |||
ErlOpts = rebar_opts:erl_opts(RebarOpts), | |||
?DEBUG("erlopts ~p", [ErlOpts]), | |||
?DEBUG("files to compile ~p", [FoundFiles]), | |||
%% Make sure that the ebin dir is on the path | |||
ok = rebar_file_utils:ensure_dir(EbinDir), | |||
true = code:add_patha(filename:absname(EbinDir)), | |||
{ParseTransforms, Rest} = split_source_files(FoundFiles, ErlOpts), | |||
NeededErlFiles = case needed_files(Graph, ErlOpts, RebarOpts, OutDir, EbinDir, ParseTransforms) of | |||
[] -> | |||
needed_files(Graph, ErlOpts, RebarOpts, OutDir, EbinDir, Rest); | |||
_ -> | |||
%% at least one parse transform in the opts needs updating, so recompile all | |||
FoundFiles | |||
end, | |||
{ErlFirstFiles, ErlOptsFirst} = erl_first_files(RebarOpts, ErlOpts, Dir, NeededErlFiles), | |||
SubGraph = digraph_utils:subgraph(Graph, NeededErlFiles), | |||
DepErlsOrdered = digraph_utils:topsort(SubGraph), | |||
OtherErls = lists:reverse(DepErlsOrdered), | |||
PrivIncludes = [{i, filename:join(OutDir, Src)} | |||
|| Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])], | |||
AdditionalOpts = PrivIncludes ++ [{i, filename:join(OutDir, "include")}, {i, OutDir}, return], | |||
true = digraph:delete(SubGraph), | |||
{{ErlFirstFiles, ErlOptsFirst ++ AdditionalOpts}, | |||
{[Erl || Erl <- OtherErls, | |||
not lists:member(Erl, ErlFirstFiles)], ErlOpts ++ AdditionalOpts}}. | |||
dependencies(Source, SourceDir, Dirs) -> | |||
{ok, Fd} = file:open(Source, [read]), | |||
Incls = parse_attrs(Fd, [], SourceDir), | |||
AbsIncls = expand_file_names(Incls, Dirs), | |||
ok = file:close(Fd), | |||
AbsIncls. | |||
compile(Source, [{_, OutDir}], Config, ErlOpts) -> | |||
case compile:file(Source, [{outdir, OutDir} | ErlOpts]) of | |||
{ok, _Mod} -> | |||
ok; | |||
{ok, _Mod, []} -> | |||
ok; | |||
{ok, _Mod, Ws} -> | |||
FormattedWs = format_error_sources(Ws, Config), | |||
rebar_compiler:ok_tuple(Source, FormattedWs); | |||
{error, Es, Ws} -> | |||
error_tuple(Source, Es, Ws, Config, ErlOpts); | |||
error -> | |||
error | |||
end. | |||
clean(Files, AppInfo) -> | |||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||
[begin | |||
Source = filename:basename(File, ".erl"), | |||
Target = target_base(EbinDir, Source) ++ ".beam", | |||
file:delete(Target) | |||
end || File <- Files]. | |||
%% | |||
error_tuple(Module, Es, Ws, AllOpts, Opts) -> | |||
FormattedEs = format_error_sources(Es, AllOpts), | |||
FormattedWs = format_error_sources(Ws, AllOpts), | |||
rebar_compiler:error_tuple(Module, FormattedEs, FormattedWs, Opts). | |||
format_error_sources(Es, Opts) -> | |||
[{rebar_compiler:format_error_source(Src, Opts), Desc} | |||
|| {Src, Desc} <- Es]. | |||
%% Get files which need to be compiled first, i.e. those specified in erl_first_files | |||
%% and parse_transform options. Also produce specific erl_opts for these first | |||
%% files, so that yet to be compiled parse transformations are excluded from it. | |||
erl_first_files(Opts, ErlOpts, Dir, NeededErlFiles) -> | |||
ErlFirstFilesConf = rebar_opts:get(Opts, erl_first_files, []), | |||
valid_erl_first_conf(ErlFirstFilesConf), | |||
NeededSrcDirs = lists:usort(lists:map(fun filename:dirname/1, NeededErlFiles)), | |||
%% NOTE: order of files here is important! | |||
ErlFirstFiles = | |||
[filename:join(Dir, File) || File <- ErlFirstFilesConf, | |||
lists:member(filename:join(Dir, File), NeededErlFiles)], | |||
{ParseTransforms, ParseTransformsErls} = | |||
lists:unzip(lists:flatmap( | |||
fun(PT) -> | |||
PTerls = [filename:join(D, module_to_erl(PT)) || D <- NeededSrcDirs], | |||
[{PT, PTerl} || PTerl <- PTerls, lists:member(PTerl, NeededErlFiles)] | |||
end, proplists:get_all_values(parse_transform, ErlOpts))), | |||
ErlOptsFirst = lists:filter(fun({parse_transform, PT}) -> | |||
not lists:member(PT, ParseTransforms); | |||
(_) -> | |||
true | |||
end, ErlOpts), | |||
{ErlFirstFiles ++ ParseTransformsErls, ErlOptsFirst}. | |||
split_source_files(SourceFiles, ErlOpts) -> | |||
ParseTransforms = proplists:get_all_values(parse_transform, ErlOpts), | |||
lists:partition(fun(Source) -> | |||
lists:member(filename_to_atom(Source), ParseTransforms) | |||
end, SourceFiles). | |||
filename_to_atom(F) -> list_to_atom(filename:rootname(filename:basename(F))). | |||
%% Get subset of SourceFiles which need to be recompiled, respecting | |||
%% dependencies induced by given graph G. | |||
needed_files(Graph, ErlOpts, RebarOpts, Dir, OutDir, SourceFiles) -> | |||
lists:filter(fun(Source) -> | |||
TargetBase = target_base(OutDir, Source), | |||
Target = TargetBase ++ ".beam", | |||
PrivIncludes = [{i, filename:join(Dir, Src)} | |||
|| Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])], | |||
AllOpts = [{outdir, filename:dirname(Target)} | |||
,{i, filename:join(Dir, "include")} | |||
,{i, Dir}] ++ PrivIncludes ++ ErlOpts, | |||
digraph:vertex(Graph, Source) > {Source, filelib:last_modified(Target)} | |||
orelse opts_changed(AllOpts, TargetBase) | |||
orelse erl_compiler_opts_set() | |||
end, SourceFiles). | |||
target_base(OutDir, Source) -> | |||
filename:join(OutDir, filename:basename(Source, ".erl")). | |||
opts_changed(NewOpts, Target) -> | |||
TotalOpts = case erlang:function_exported(compile, env_compiler_options, 0) of | |||
true -> NewOpts ++ compile:env_compiler_options(); | |||
false -> NewOpts | |||
end, | |||
case compile_info(Target) of | |||
{ok, Opts} -> lists:any(fun effects_code_generation/1, lists:usort(TotalOpts) -- lists:usort(Opts)); | |||
_ -> true | |||
end. | |||
effects_code_generation(Option) -> | |||
case Option of | |||
beam -> false; | |||
report_warnings -> false; | |||
report_errors -> false; | |||
return_errors-> false; | |||
return_warnings-> false; | |||
report -> false; | |||
warnings_as_errors -> false; | |||
binary -> false; | |||
verbose -> false; | |||
{cwd,_} -> false; | |||
{outdir, _} -> false; | |||
_ -> true | |||
end. | |||
compile_info(Target) -> | |||
case beam_lib:chunks(Target, [compile_info]) of | |||
{ok, {_mod, Chunks}} -> | |||
CompileInfo = proplists:get_value(compile_info, Chunks, []), | |||
{ok, proplists:get_value(options, CompileInfo, [])}; | |||
{error, beam_lib, Reason} -> | |||
?WARN("Couldn't read debug info from ~p for reason: ~p", [Target, Reason]), | |||
{error, Reason} | |||
end. | |||
erl_compiler_opts_set() -> | |||
EnvSet = case os:getenv("ERL_COMPILER_OPTIONS") of | |||
false -> false; | |||
_ -> true | |||
end, | |||
%% return false if changed env opts would have been caught in opts_changed/2 | |||
EnvSet andalso not erlang:function_exported(compile, env_compiler_options, 0). | |||
valid_erl_first_conf(FileList) -> | |||
Strs = filter_file_list(FileList), | |||
case rebar_utils:is_list_of_strings(Strs) of | |||
true -> true; | |||
false -> ?ABORT("An invalid file list (~p) was provided as part of your erl_first_files directive", | |||
[FileList]) | |||
end. | |||
filter_file_list(FileList) -> | |||
Atoms = lists:filter( fun(X) -> is_atom(X) end, FileList), | |||
case Atoms of | |||
[] -> | |||
FileList; | |||
_ -> | |||
atoms_in_erl_first_files_warning(Atoms), | |||
lists:filter( fun(X) -> not(is_atom(X)) end, FileList) | |||
end. | |||
atoms_in_erl_first_files_warning(Atoms) -> | |||
W = "You have provided atoms as file entries in erl_first_files; " | |||
"erl_first_files only expects lists of filenames as strings. " | |||
"The following modules (~p) may not work as expected and it is advised " | |||
"that you change these entires to string format " | |||
"(e.g., \"src/module.erl\") ", | |||
?WARN(W, [Atoms]). | |||
module_to_erl(Mod) -> | |||
atom_to_list(Mod) ++ ".erl". | |||
parse_attrs(Fd, Includes, Dir) -> | |||
case io:parse_erl_form(Fd, "") of | |||
{ok, Form, _Line} -> | |||
case erl_syntax:type(Form) of | |||
attribute -> | |||
NewIncludes = process_attr(Form, Includes, Dir), | |||
parse_attrs(Fd, NewIncludes, Dir); | |||
_ -> | |||
parse_attrs(Fd, Includes, Dir) | |||
end; | |||
{eof, _} -> | |||
Includes; | |||
_Err -> | |||
parse_attrs(Fd, Includes, Dir) | |||
end. | |||
process_attr(Form, Includes, Dir) -> | |||
AttrName = erl_syntax:atom_value(erl_syntax:attribute_name(Form)), | |||
process_attr(AttrName, Form, Includes, Dir). | |||
process_attr(import, Form, Includes, _Dir) -> | |||
case erl_syntax_lib:analyze_import_attribute(Form) of | |||
{Mod, _Funs} -> | |||
[module_to_erl(Mod)|Includes]; | |||
Mod -> | |||
[module_to_erl(Mod)|Includes] | |||
end; | |||
process_attr(file, Form, Includes, _Dir) -> | |||
{File, _} = erl_syntax_lib:analyze_file_attribute(Form), | |||
[File|Includes]; | |||
process_attr(include, Form, Includes, _Dir) -> | |||
[FileNode] = erl_syntax:attribute_arguments(Form), | |||
File = erl_syntax:string_value(FileNode), | |||
[File|Includes]; | |||
process_attr(include_lib, Form, Includes, Dir) -> | |||
[FileNode] = erl_syntax:attribute_arguments(Form), | |||
RawFile = erl_syntax:string_value(FileNode), | |||
maybe_expand_include_lib_path(RawFile, Dir) ++ Includes; | |||
process_attr(behavior, Form, Includes, _Dir) -> | |||
process_attr(behaviour, Form, Includes, _Dir); | |||
process_attr(behaviour, Form, Includes, _Dir) -> | |||
[FileNode] = erl_syntax:attribute_arguments(Form), | |||
File = module_to_erl(erl_syntax:atom_value(FileNode)), | |||
[File|Includes]; | |||
process_attr(compile, Form, Includes, _Dir) -> | |||
[Arg] = erl_syntax:attribute_arguments(Form), | |||
case erl_syntax:concrete(Arg) of | |||
{parse_transform, Mod} -> | |||
[module_to_erl(Mod)|Includes]; | |||
{core_transform, Mod} -> | |||
[module_to_erl(Mod)|Includes]; | |||
L when is_list(L) -> | |||
lists:foldl( | |||
fun({parse_transform, Mod}, Acc) -> | |||
[module_to_erl(Mod)|Acc]; | |||
({core_transform, Mod}, Acc) -> | |||
[module_to_erl(Mod)|Acc]; | |||
(_, Acc) -> | |||
Acc | |||
end, Includes, L); | |||
_ -> | |||
Includes | |||
end; | |||
process_attr(_, _Form, Includes, _Dir) -> | |||
Includes. | |||
%% NOTE: If, for example, one of the entries in Files, refers to | |||
%% gen_server.erl, that entry will be dropped. It is dropped because | |||
%% such an entry usually refers to the beam file, and we don't pass a | |||
%% list of OTP src dirs for finding gen_server.erl's full path. Also, | |||
%% if gen_server.erl was modified, it's not rebar's task to compile a | |||
%% new version of the beam file. Therefore, it's reasonable to drop | |||
%% such entries. Also see process_attr(behaviour, Form, Includes). | |||
-spec expand_file_names([file:filename()], | |||
[file:filename()]) -> [file:filename()]. | |||
expand_file_names(Files, Dirs) -> | |||
%% We check if Files exist by itself or within the directories | |||
%% listed in Dirs. | |||
%% Return the list of files matched. | |||
lists:flatmap( | |||
fun(Incl) -> | |||
case filelib:is_regular(Incl) of | |||
true -> | |||
[Incl]; | |||
false -> | |||
lists:flatmap( | |||
fun(Dir) -> | |||
FullPath = filename:join(Dir, Incl), | |||
case filelib:is_regular(FullPath) of | |||
true -> | |||
[FullPath]; | |||
false -> | |||
[] | |||
end | |||
end, Dirs) | |||
end | |||
end, Files). | |||
%% Given a path like "stdlib/include/erl_compile.hrl", return | |||
%% "OTP_INSTALL_DIR/lib/erlang/lib/stdlib-x.y.z/include/erl_compile.hrl". | |||
%% Usually a simple [Lib, SubDir, File1] = filename:split(File) should | |||
%% work, but to not crash when an unusual include_lib path is used, | |||
%% utilize more elaborate logic. | |||
maybe_expand_include_lib_path(File, Dir) -> | |||
File1 = filename:basename(File), | |||
case filename:split(filename:dirname(File)) of | |||
[_] -> | |||
warn_and_find_path(File, Dir); | |||
[Lib | SubDir] -> | |||
case code:lib_dir(list_to_atom(Lib), list_to_atom(filename:join(SubDir))) of | |||
{error, bad_name} -> | |||
warn_and_find_path(File, Dir); | |||
AppDir -> | |||
[filename:join(AppDir, File1)] | |||
end | |||
end. | |||
%% The use of -include_lib was probably incorrect by the user but lets try to make it work. | |||
%% We search in the outdir and outdir/../include to see if the header exists. | |||
warn_and_find_path(File, Dir) -> | |||
SrcHeader = filename:join(Dir, File), | |||
case filelib:is_regular(SrcHeader) of | |||
true -> | |||
[SrcHeader]; | |||
false -> | |||
IncludeDir = filename:join(rebar_utils:droplast(filename:split(Dir))++["include"]), | |||
IncludeHeader = filename:join(IncludeDir, File), | |||
case filelib:is_regular(IncludeHeader) of | |||
true -> | |||
[filename:join(IncludeDir, File)]; | |||
false -> | |||
[] | |||
end | |||
end. |
@ -0,0 +1,70 @@ | |||
-module(rebar_compiler_mib). | |||
-behaviour(rebar_compiler). | |||
-export([context/1, | |||
needed_files/3, | |||
dependencies/3, | |||
compile/4, | |||
clean/2]). | |||
-include("rebar.hrl"). | |||
-include_lib("stdlib/include/erl_compile.hrl"). | |||
context(AppInfo) -> | |||
Dir = rebar_app_info:dir(AppInfo), | |||
Mappings = [{".bin", filename:join([Dir, "priv", "mibs"])}, | |||
{".hrl", filename:join(Dir, "include")}], | |||
#{src_dirs => ["mibs"], | |||
include_dirs => [], | |||
src_ext => ".mib", | |||
out_mappings => Mappings}. | |||
needed_files(_, FoundFiles, AppInfo) -> | |||
FirstFiles = [], | |||
%% Remove first files from found files | |||
RestFiles = [Source || Source <- FoundFiles, not lists:member(Source, FirstFiles)], | |||
Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), mib_opts, []), | |||
{{FirstFiles, Opts}, {RestFiles, Opts}}. | |||
dependencies(_, _, _) -> | |||
[]. | |||
compile(Source, OutDirs, _, Opts) -> | |||
{_, BinOut} = lists:keyfind(".bin", 1, OutDirs), | |||
{_, HrlOut} = lists:keyfind(".hrl", 1, OutDirs), | |||
ok = rebar_file_utils:ensure_dir(BinOut), | |||
ok = rebar_file_utils:ensure_dir(HrlOut), | |||
Mib = filename:join(BinOut, filename:basename(Source, ".mib")), | |||
HrlFilename = Mib ++ ".hrl", | |||
AllOpts = [{outdir, BinOut}, {i, [BinOut]}] ++ Opts, | |||
case snmpc:compile(Source, AllOpts) of | |||
{ok, _} -> | |||
MibToHrlOpts = | |||
case proplists:get_value(verbosity, AllOpts, undefined) of | |||
undefined -> | |||
#options{specific = [], | |||
cwd = rebar_dir:get_cwd()}; | |||
Verbosity -> | |||
#options{specific = [{verbosity, Verbosity}], | |||
cwd = rebar_dir:get_cwd()} | |||
end, | |||
ok = snmpc:mib_to_hrl(Mib, Mib, MibToHrlOpts), | |||
rebar_file_utils:mv(HrlFilename, HrlOut), | |||
ok; | |||
{error, compilation_failed} -> | |||
?FAIL | |||
end. | |||
clean(MibFiles, AppInfo) -> | |||
AppDir = rebar_app_info:dir(AppInfo), | |||
MIBs = [filename:rootname(filename:basename(MIB)) || MIB <- MibFiles], | |||
rebar_file_utils:delete_each( | |||
[filename:join([AppDir, "include", MIB++".hrl"]) || MIB <- MIBs]), | |||
ok = rebar_file_utils:rm_rf(filename:join([AppDir, "priv/mibs/*.bin"])). |
@ -0,0 +1,50 @@ | |||
-module(rebar_compiler_xrl). | |||
-behaviour(rebar_compiler). | |||
-export([context/1, | |||
needed_files/3, | |||
dependencies/3, | |||
compile/4, | |||
clean/2]). | |||
context(AppInfo) -> | |||
Dir = rebar_app_info:dir(AppInfo), | |||
Mappings = [{".erl", filename:join([Dir, "src"])}], | |||
#{src_dirs => ["src"], | |||
include_dirs => [], | |||
src_ext => ".xrl", | |||
out_mappings => Mappings}. | |||
needed_files(_, FoundFiles, AppInfo) -> | |||
FirstFiles = [], | |||
%% Remove first files from found files | |||
RestFiles = [Source || Source <- FoundFiles, not lists:member(Source, FirstFiles)], | |||
Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), xrl_opts, []), | |||
{{FirstFiles, Opts}, {RestFiles, Opts}}. | |||
dependencies(_, _, _) -> | |||
[]. | |||
compile(Source, [{_, OutDir}], _, Opts) -> | |||
BaseName = filename:basename(Source), | |||
Target = filename:join([OutDir, BaseName]), | |||
AllOpts = [{parserfile, Target} | Opts], | |||
AllOpts1 = [{includefile, filename:join(OutDir, I)} || {includefile, I} <- AllOpts, | |||
filename:pathtype(I) =:= relative], | |||
case leex:file(Source, AllOpts1 ++ [{return, true}]) of | |||
{ok, _} -> | |||
ok; | |||
{ok, _Mod, Ws} -> | |||
rebar_compiler:ok_tuple(Source, Ws); | |||
{error, Es, Ws} -> | |||
rebar_compiler:error_tuple(Source, Es, Ws, AllOpts1) | |||
end. | |||
clean(XrlFiles, _AppInfo) -> | |||
rebar_file_utils:delete_each( | |||
[rebar_utils:to_list(re:replace(F, "\\.xrl$", ".erl", [unicode])) | |||
|| F <- XrlFiles]). |
@ -0,0 +1,49 @@ | |||
-module(rebar_compiler_yrl). | |||
-behaviour(rebar_compiler). | |||
-export([context/1, | |||
needed_files/3, | |||
dependencies/3, | |||
compile/4, | |||
clean/2]). | |||
context(AppInfo) -> | |||
Dir = rebar_app_info:dir(AppInfo), | |||
Mappings = [{".erl", filename:join([Dir, "src"])}], | |||
#{src_dirs => ["src"], | |||
include_dirs => [], | |||
src_ext => ".yrl", | |||
out_mappings => Mappings}. | |||
needed_files(_, FoundFiles, AppInfo) -> | |||
FirstFiles = [], | |||
%% Remove first files from found files | |||
RestFiles = [Source || Source <- FoundFiles, not lists:member(Source, FirstFiles)], | |||
Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), yrl_opts, []), | |||
{{FirstFiles, Opts}, {RestFiles, Opts}}. | |||
dependencies(_, _, _) -> | |||
[]. | |||
compile(Source, [{_, OutDir}], _, Opts) -> | |||
BaseName = filename:basename(Source), | |||
Target = filename:join([OutDir, BaseName]), | |||
AllOpts = [{parserfile, Target} | Opts], | |||
AllOpts1 = [{includefile, filename:join(OutDir, I)} || {includefile, I} <- AllOpts, | |||
filename:pathtype(I) =:= relative], | |||
case yecc:file(Source, AllOpts1 ++ [{return, true}]) of | |||
{ok, _} -> | |||
ok; | |||
{ok, _Mod, Ws} -> | |||
rebar_compiler:ok_tuple(Source, Ws); | |||
{error, Es, Ws} -> | |||
rebar_compiler:error_tuple(Source, Es, Ws, AllOpts1) | |||
end. | |||
clean(YrlFiles, _AppInfo) -> | |||
rebar_file_utils:delete_each( | |||
[rebar_utils:to_list(re:replace(F, "\\.yrl$", ".erl", [unicode])) | |||
|| F <- YrlFiles]). |
@ -0,0 +1,142 @@ | |||
-module(rebar_hex_repos). | |||
-export([from_state/2, | |||
get_repo_config/2, | |||
auth_config/1, | |||
update_auth_config/2, | |||
format_error/1]). | |||
-ifdef(TEST). | |||
%% exported for test purposes | |||
-export([repos/1, merge_repos/1]). | |||
-endif. | |||
-include("rebar.hrl"). | |||
-include_lib("providers/include/providers.hrl"). | |||
-export_type([repo/0]). | |||
-type repo() :: #{name => unicode:unicode_binary(), | |||
api_url => binary(), | |||
api_key => binary(), | |||
repo_url => binary(), | |||
repo_public_key => binary(), | |||
repo_verify => binary()}. | |||
from_state(BaseConfig, State) -> | |||
HexConfig = rebar_state:get(State, hex, []), | |||
Repos = repos(HexConfig), | |||
%% auth is stored in a separate config file since the plugin generates and modifies it | |||
Auth = ?MODULE:auth_config(State), | |||
%% add base config entries that are specific to use by rebar3 and not overridable | |||
Repos1 = merge_with_base_and_auth(Repos, BaseConfig, Auth), | |||
%% merge organizations parent repo options into each oraganization repo | |||
update_organizations(Repos1). | |||
-spec get_repo_config(unicode:unicode_binary(), rebar_state:t() | [repo()]) | |||
-> {ok, repo()} | error. | |||
get_repo_config(RepoName, Repos) when is_list(Repos) -> | |||
case ec_lists:find(fun(#{name := N}) -> N =:= RepoName end, Repos) of | |||
error -> | |||
throw(?PRV_ERROR({repo_not_found, RepoName})); | |||
{ok, RepoConfig} -> | |||
{ok, RepoConfig} | |||
end; | |||
get_repo_config(RepoName, State) -> | |||
Resources = rebar_state:resources(State), | |||
#{repos := Repos} = rebar_resource_v2:find_resource_state(pkg, Resources), | |||
get_repo_config(RepoName, Repos). | |||
merge_with_base_and_auth(Repos, BaseConfig, Auth) -> | |||
[maps:merge(maps:get(maps:get(name, Repo), Auth, #{}), | |||
maps:merge(Repo, BaseConfig)) || Repo <- Repos]. | |||
%% A user's list of repos are merged by name while keeping the order | |||
%% intact. The order is based on the first use of a repo by name in the | |||
%% list. The default repo is appended to the user's list. | |||
repos(HexConfig) -> | |||
HexDefaultConfig = default_repo(), | |||
case [R || R <- HexConfig, element(1, R) =:= repos] of | |||
[] -> | |||
[HexDefaultConfig]; | |||
%% we only care if the first element is a replace entry | |||
[{repos, replace, Repos} | _]-> | |||
merge_repos(Repos); | |||
Repos -> | |||
RepoList = repo_list(Repos), | |||
merge_repos(RepoList ++ [HexDefaultConfig]) | |||
end. | |||
-spec merge_repos([repo()]) -> [repo()]. | |||
merge_repos(Repos) -> | |||
lists:foldl(fun(R=#{name := Name}, ReposAcc) -> | |||
%% private organizations include the parent repo before a : | |||
case rebar_string:split(Name, <<":">>) of | |||
[Repo, Org] -> | |||
update_repo_list(R#{name => Name, | |||
organization => Org, | |||
parent => Repo}, ReposAcc); | |||
_ -> | |||
update_repo_list(R, ReposAcc) | |||
end | |||
end, [], Repos). | |||
update_organizations(Repos) -> | |||
lists:map(fun(Repo=#{organization := Organization, | |||
parent := ParentName}) -> | |||
{ok, Parent} = get_repo_config(ParentName, Repos), | |||
ParentRepoUrl = rebar_utils:to_list(maps:get(repo_url, Parent)), | |||
{ok, RepoUrl} = | |||
rebar_utils:url_append_path(ParentRepoUrl, | |||
filename:join("repos", rebar_utils:to_list(Organization))), | |||
%% still let the organization config override this constructed repo url | |||
maps:merge(Parent#{repo_url => rebar_utils:to_binary(RepoUrl)}, Repo); | |||
(Repo) -> | |||
Repo | |||
end, Repos). | |||
update_repo_list(R=#{name := N}, [H=#{name := HN} | Rest]) when N =:= HN -> | |||
[maps:merge(R, H) | Rest]; | |||
update_repo_list(R, [H | Rest]) -> | |||
[H | update_repo_list(R, Rest)]; | |||
update_repo_list(R, []) -> | |||
[R]. | |||
default_repo() -> | |||
HexDefaultConfig = hex_core:default_config(), | |||
HexDefaultConfig#{name => ?PUBLIC_HEX_REPO}. | |||
repo_list([]) -> | |||
[]; | |||
repo_list([{repos, Repos} | T]) -> | |||
Repos ++ repo_list(T); | |||
repo_list([{repos, replace, Repos} | T]) -> | |||
Repos ++ repo_list(T). | |||
format_error({repo_not_found, RepoName}) -> | |||
io_lib:format("The repo ~ts was not found in the configuration.", [RepoName]). | |||
%% auth functions | |||
%% authentication is in a separate config file because the hex plugin updates it | |||
-spec auth_config_file(rebar_state:t()) -> file:filename_all(). | |||
auth_config_file(State) -> | |||
filename:join(rebar_dir:global_config_dir(State), ?HEX_AUTH_FILE). | |||
-spec auth_config(rebar_state:t()) -> map(). | |||
auth_config(State) -> | |||
case file:consult(auth_config_file(State)) of | |||
{ok, [Config]} -> | |||
Config; | |||
_ -> | |||
#{} | |||
end. | |||
-spec update_auth_config(map(), rebar_state:t()) -> ok. | |||
update_auth_config(Updates, State) -> | |||
Config = auth_config(State), | |||
AuthConfigFile = auth_config_file(State), | |||
ok = filelib:ensure_dir(AuthConfigFile), | |||
NewConfig = iolist_to_binary([io_lib:print(maps:merge(Config, Updates)) | ".\n"]), | |||
ok = file:write_file(AuthConfigFile, NewConfig). |
@ -0,0 +1,208 @@ | |||
-module(rebar_paths). | |||
-include("rebar.hrl"). | |||
-type target() :: deps | plugins. | |||
-type targets() :: [target(), ...]. | |||
-export_type([target/0, targets/0]). | |||
-export([set_paths/2, unset_paths/2]). | |||
-export([clashing_apps/2]). | |||
-ifdef(TEST). | |||
-export([misloaded_modules/2]). | |||
-endif. | |||
-spec set_paths(targets(), rebar_state:t()) -> ok. | |||
set_paths(UserTargets, State) -> | |||
Targets = normalize_targets(UserTargets), | |||
GroupPaths = path_groups(Targets, State), | |||
Paths = lists:append(lists:reverse([P || {_, P} <- GroupPaths])), | |||
code:add_pathsa(Paths), | |||
AppGroups = app_groups(Targets, State), | |||
purge_and_load(AppGroups, sets:new()), | |||
ok. | |||
-spec unset_paths(targets(), rebar_state:t()) -> ok. | |||
unset_paths(UserTargets, State) -> | |||
Targets = normalize_targets(UserTargets), | |||
GroupPaths = path_groups(Targets, State), | |||
Paths = lists:append([P || {_, P} <- GroupPaths]), | |||
[code:del_path(P) || P <- Paths], | |||
purge(Paths, code:all_loaded()), | |||
ok. | |||
-spec clashing_apps(targets(), rebar_state:t()) -> [{target(), [binary()]}]. | |||
clashing_apps(Targets, State) -> | |||
AppGroups = app_groups(Targets, State), | |||
AppNames = [{G, sets:from_list( | |||
[rebar_app_info:name(App) || App <- Apps] | |||
)} || {G, Apps} <- AppGroups], | |||
clashing_app_names(sets:new(), AppNames, []). | |||
%%%%%%%%%%%%%%% | |||
%%% PRIVATE %%% | |||
%%%%%%%%%%%%%%% | |||
%% The paths are to be set in the reverse order; i.e. the default | |||
%% path is always last when possible (minimize cases where a build | |||
%% tool version clashes with an app's), and put the highest priorities | |||
%% first. | |||
-spec normalize_targets(targets()) -> targets(). | |||
normalize_targets(List) -> | |||
%% Plan for the eventuality of getting values piped in | |||
%% from future versions of rebar3, possibly from plugins and so on, | |||
%% which means we'd risk failing kind of violently. We only support | |||
%% deps and plugins | |||
TmpList = lists:foldl( | |||
fun(deps, [deps | _] = Acc) -> Acc; | |||
(plugins, [plugins | _] = Acc) -> Acc; | |||
(deps, Acc) -> [deps | Acc -- [deps]]; | |||
(plugins, Acc) -> [plugins | Acc -- [plugins]]; | |||
(_, Acc) -> Acc | |||
end, | |||
[], | |||
List | |||
), | |||
lists:reverse(TmpList). | |||
purge_and_load([], _) -> | |||
ok; | |||
purge_and_load([{_Group, Apps}|Rest], Seen) -> | |||
%% We have: a list of all applications in the current priority group, | |||
%% a list of all loaded modules with their active path, and a list of | |||
%% seen applications. | |||
%% | |||
%% We do the following: | |||
%% 1. identify the apps that have not been solved yet | |||
%% 2. find the paths for all apps in the current group | |||
%% 3. unload and reload apps that may have changed paths in order | |||
%% to get updated module lists and specs | |||
%% (we ignore started apps and apps that have not run for this) | |||
%% This part turns out to be the bottleneck of this module, so | |||
%% to speed it up, using clash detection proves useful: | |||
%% only reload apps that clashed since others are unlikely to | |||
%% conflict in significant ways | |||
%% 4. create a list of modules to check from that app list—only loaded | |||
%% modules make sense to check. | |||
%% 5. check the modules to match their currently loaded paths with | |||
%% the path set from the apps in the current group; modules | |||
%% that differ must be purged; others can stay | |||
%% 1) | |||
AppNames = [AppName || App <- Apps, | |||
AppName <- [rebar_app_info:name(App)], | |||
not sets:is_element(AppName, Seen)], | |||
GoodApps = [App || AppName <- AppNames, | |||
App <- Apps, | |||
rebar_app_info:name(App) =:= AppName], | |||
%% 2) | |||
%% (no need for extra_src_dirs since those get put into ebin; | |||
%% also no need for OTP libs; we want to allow overtaking them) | |||
GoodAppPaths = [rebar_app_info:ebin_dir(App) || App <- GoodApps], | |||
%% 3) | |||
[begin | |||
AtomApp = binary_to_atom(AppName, utf8), | |||
%% blind load/unload won't interrupt an already-running app, | |||
%% preventing odd errors, maybe! | |||
case application:unload(AtomApp) of | |||
ok -> application:load(AtomApp); | |||
_ -> ok | |||
end | |||
end || AppName <- AppNames, | |||
%% Shouldn't unload ourselves; rebar runs without ever | |||
%% being started and unloading breaks logging! | |||
AppName =/= <<"rebar">>], | |||
%% 4) | |||
CandidateMods = lists:append( | |||
%% Start by asking the currently loaded app (if loaded) | |||
%% since it would be the primary source of conflicting modules | |||
[case application:get_key(AppName, modules) of | |||
{ok, Mods} -> | |||
Mods; | |||
undefined -> | |||
%% if not found, parse the app file on disk, in case | |||
%% the app's modules are used without it being loaded | |||
case rebar_app_info:app_details(App) of | |||
[] -> []; | |||
Details -> proplists:get_value(modules, Details, []) | |||
end | |||
end || App <- GoodApps, | |||
AppName <- [binary_to_atom(rebar_app_info:name(App), utf8)]] | |||
), | |||
ModPaths = [{Mod,Path} || Mod <- CandidateMods, | |||
erlang:function_exported(Mod, module_info, 0), | |||
{file, Path} <- [code:is_loaded(Mod)]], | |||
%% 5) | |||
Mods = misloaded_modules(GoodAppPaths, ModPaths), | |||
[purge_mod(Mod) || Mod <- Mods], | |||
purge_and_load(Rest, sets:union(Seen, sets:from_list(AppNames))). | |||
purge(Paths, ModPaths) -> | |||
SortedPaths = lists:sort(Paths), | |||
lists:map(fun purge_mod/1, | |||
[Mod || {Mod, Path} <- ModPaths, | |||
is_list(Path), % not 'preloaded' or mocked | |||
any_prefix(Path, SortedPaths)] | |||
). | |||
misloaded_modules(GoodAppPaths, ModPaths) -> | |||
%% Identify paths that are invalid; i.e. app paths that cover an | |||
%% app in the desired group, but are not in the desired group. | |||
lists:usort( | |||
[Mod || {Mod, Path} <- ModPaths, | |||
is_list(Path), % not 'preloaded' or mocked | |||
not any_prefix(Path, GoodAppPaths)] | |||
). | |||
any_prefix(Path, Paths) -> | |||
lists:any(fun(P) -> lists:prefix(P, Path) end, Paths). | |||
%% assume paths currently set are good; only unload a module so next call | |||
%% uses the correctly set paths | |||
purge_mod(Mod) -> | |||
code:soft_purge(Mod) andalso code:delete(Mod). | |||
%% This is a tricky O(n²) check since we want to | |||
%% know whether an app clashes with any of the top priority groups. | |||
%% | |||
%% For example, let's say we have `[deps, plugins]', then we want | |||
%% to find the plugins that clash with deps: | |||
%% | |||
%% `[{deps, [ClashingPlugins]}, {plugins, []}]' | |||
%% | |||
%% In case we'd ever have alternative or additional types, we can | |||
%% find all clashes from other 'groups'. | |||
clashing_app_names(_, [], Acc) -> | |||
lists:reverse(Acc); | |||
clashing_app_names(PrevNames, [{G,AppNames} | Rest], Acc) -> | |||
CurrentNames = sets:subtract(AppNames, PrevNames), | |||
NextNames = sets:subtract(sets:union([A || {_, A} <- Rest]), PrevNames), | |||
Clashes = sets:intersection(CurrentNames, NextNames), | |||
NewAcc = [{G, sets:to_list(Clashes)} | Acc], | |||
clashing_app_names(sets:union(PrevNames, CurrentNames), Rest, NewAcc). | |||
path_groups(Targets, State) -> | |||
[{Target, get_paths(Target, State)} || Target <- Targets]. | |||
app_groups(Targets, State) -> | |||
[{Target, get_apps(Target, State)} || Target <- Targets]. | |||
get_paths(deps, State) -> | |||
rebar_state:code_paths(State, all_deps); | |||
get_paths(plugins, State) -> | |||
rebar_state:code_paths(State, all_plugin_deps). | |||
get_apps(deps, State) -> | |||
%% The code paths for deps also include the top level apps | |||
%% and the extras, which we don't have here; we have to | |||
%% add the apps by hand | |||
case rebar_state:project_apps(State) of | |||
undefined -> []; | |||
List -> List | |||
end ++ | |||
rebar_state:all_deps(State); | |||
get_apps(plugins, State) -> | |||
rebar_state:all_plugin_deps(State). |
@ -0,0 +1,47 @@ | |||
%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*- | |||
%% ex: ts=4 sw=4 et | |||
-module(rebar_prv_repos). | |||
-behaviour(provider). | |||
-export([init/1, | |||
do/1, | |||
format_error/1]). | |||
-include("rebar.hrl"). | |||
-define(PROVIDER, repos). | |||
-define(DEPS, []). | |||
%% =================================================================== | |||
%% Public API | |||
%% =================================================================== | |||
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}. | |||
init(State) -> | |||
Provider = providers:create( | |||
[{name, ?PROVIDER}, | |||
{module, ?MODULE}, | |||
{bare, false}, | |||
{deps, ?DEPS}, | |||
{example, "rebar3 repos"}, | |||
{short_desc, "Print current package repository configuration"}, | |||
{desc, "Display repository configuration for debugging purpose"}, | |||
{opts, []}]), | |||
State1 = rebar_state:add_provider(State, Provider), | |||
{ok, State1}. | |||
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}. | |||
do(State) -> | |||
Resources = rebar_state:resources(State), | |||
#{repos := Repos} = rebar_resource_v2:find_resource_state(pkg, Resources), | |||
?CONSOLE("Repos:", []), | |||
%%TODO: do some formatting | |||
?CONSOLE("~p", [Repos]), | |||
{ok, State}. | |||
-spec format_error(any()) -> iolist(). | |||
format_error(Reason) -> | |||
io_lib:format("~p", [Reason]). |
@ -0,0 +1,147 @@ | |||
%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*- | |||
%% ex: ts=4 sw=4 et | |||
-module(rebar_resource_v2). | |||
-export([new/3, | |||
find_resource_state/2, | |||
format_source/1, | |||
lock/2, | |||
download/3, | |||
needs_update/2, | |||
make_vsn/3, | |||
format_error/1]). | |||
-export_type([resource/0, | |||
source/0, | |||
type/0, | |||
location/0, | |||
ref/0, | |||
resource_state/0]). | |||
-include("rebar.hrl"). | |||
-include_lib("providers/include/providers.hrl"). | |||
-type resource() :: #resource{}. | |||
-type source() :: {type(), location(), ref()} | {type(), location(), ref(), binary()}. | |||
-type type() :: atom(). | |||
-type location() :: string(). | |||
-type ref() :: any(). | |||
-type resource_state() :: term(). | |||
-callback init(type(), rebar_state:t()) -> {ok, resource()}. | |||
-callback lock(rebar_app_info:t(), resource_state()) -> source(). | |||
-callback download(file:filename_all(), rebar_app_info:t(), resource_state(), rebar_state:t()) -> | |||
ok | {error, any()}. | |||
-callback needs_update(rebar_app_info:t(), resource_state()) -> boolean(). | |||
-callback make_vsn(rebar_app_info:t(), resource_state()) -> | |||
{plain, string()} | {error, string()}. | |||
-spec new(type(), module(), term()) -> resource(). | |||
new(Type, Module, State) -> | |||
#resource{type=Type, | |||
module=Module, | |||
state=State, | |||
implementation=?MODULE}. | |||
-spec find_resource(type(), [resource()]) -> {ok, resource()} | {error, not_found}. | |||
find_resource(Type, Resources) -> | |||
case ec_lists:find(fun(#resource{type=T}) -> T =:= Type end, Resources) of | |||
error when is_atom(Type) -> | |||
case code:which(Type) of | |||
non_existing -> | |||
{error, not_found}; | |||
_ -> | |||
{ok, rebar_resource:new(Type, Type, #{})} | |||
end; | |||
error -> | |||
{error, not_found}; | |||
{ok, Resource} -> | |||
{ok, Resource} | |||
end. | |||
find_resource_state(Type, Resources) -> | |||
case lists:keyfind(Type, #resource.type, Resources) of | |||
false -> | |||
{error, not_found}; | |||
#resource{state=State} -> | |||
State | |||
end. | |||
format_source({pkg, Name, Vsn, _Hash, _}) -> {pkg, Name, Vsn}; | |||
format_source(Source) -> Source. | |||
lock(AppInfo, State) -> | |||
resource_run(lock, rebar_app_info:source(AppInfo), [AppInfo], State). | |||
resource_run(Function, Source, Args, State) -> | |||
Resources = rebar_state:resources(State), | |||
case get_resource_type(Source, Resources) of | |||
{ok, #resource{type=_, | |||
module=Module, | |||
state=ResourceState, | |||
implementation=?MODULE}} -> | |||
erlang:apply(Module, Function, Args++[ResourceState]); | |||
{ok, #resource{type=_, | |||
module=Module, | |||
state=_, | |||
implementation=rebar_resource}} -> | |||
erlang:apply(rebar_resource, Function, [Module | Args]) | |||
end. | |||
download(TmpDir, AppInfo, State) -> | |||
resource_run(download, rebar_app_info:source(AppInfo), [TmpDir, AppInfo, State], State). | |||
needs_update(AppInfo, State) -> | |||
resource_run(needs_update, rebar_app_info:source(AppInfo), [AppInfo], State). | |||
%% this is a special case since it is used for project apps as well, not just deps | |||
make_vsn(AppInfo, VcsType, State) -> | |||
Resources = rebar_state:resources(State), | |||
case is_resource_type(VcsType, Resources) of | |||
true -> | |||
case find_resource(VcsType, Resources) of | |||
{ok, #resource{type=_, | |||
module=Module, | |||
state=ResourceState, | |||
implementation=?MODULE}} -> | |||
Module:make_vsn(AppInfo, ResourceState); | |||
{ok, #resource{type=_, | |||
module=Module, | |||
state=_, | |||
implementation=rebar_resource}} -> | |||
rebar_resource:make_vsn(Module, AppInfo) | |||
end; | |||
false -> | |||
unknown | |||
end. | |||
format_error({no_resource, Location, Type}) -> | |||
io_lib:format("Cannot handle dependency ~ts.~n" | |||
" No module found for resource type ~p.", [Location, Type]); | |||
format_error({no_resource, Source}) -> | |||
io_lib:format("Cannot handle dependency ~ts.~n" | |||
" No module found for unknown resource type.", [Source]). | |||
is_resource_type(Type, Resources) -> | |||
lists:any(fun(#resource{type=T}) -> T =:= Type end, Resources). | |||
-spec get_resource_type(term(), [resource()]) -> {ok, resource()}. | |||
get_resource_type({Type, Location}, Resources) -> | |||
get_resource(Type, Location, Resources); | |||
get_resource_type({Type, Location, _}, Resources) -> | |||
get_resource(Type, Location, Resources); | |||
get_resource_type({Type, _, _, Location}, Resources) -> | |||
get_resource(Type, Location, Resources); | |||
get_resource_type(Location={Type, _, _, _, _}, Resources) -> | |||
get_resource(Type, Location, Resources); | |||
get_resource_type(Source, _) -> | |||
throw(?PRV_ERROR({no_resource, Source})). | |||
-spec get_resource(type(), term(), [resource()]) -> {ok, resource()}. | |||
get_resource(Type, Location, Resources) -> | |||
case find_resource(Type, Resources) of | |||
{error, not_found} -> | |||
throw(?PRV_ERROR({no_resource, Location, Type})); | |||
{ok, Resource} -> | |||
{ok, Resource} | |||
end. |
@ -0,0 +1,50 @@ | |||
%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*- | |||
%% ex: ts=4 sw=4 et | |||
%% | |||
%% @doc A localfs custom resource (for testing purposes only) | |||
%% | |||
%% ``` | |||
%% {deps, [ | |||
%% %% Application files are copied from "/path/to/app_name" | |||
%% {app_name, {localfs, "/path/to/app_name", undefined}} | |||
%% ]}. | |||
%% ''' | |||
-module(rebar_localfs_resource_v2). | |||
-behaviour(rebar_resource_v2). | |||
-export([init/2 | |||
,lock/2 | |||
,download/4 | |||
,needs_update/2 | |||
,make_vsn/2]). | |||
-include_lib("eunit/include/eunit.hrl"). | |||
-spec init(atom(), rebar_state:t()) -> {ok, term()}. | |||
init(Type, _State) -> | |||
Resource = rebar_resource_v2:new(Type, ?MODULE, #{}), | |||
{ok, Resource}. | |||
lock(AppInfo, _) -> | |||
case rebar_app_info:source(AppInfo) of | |||
{localfs, Path, _Ref} -> | |||
{localfs, Path, undefined}; | |||
{localfs, Path} -> | |||
{localfs, Path, undefined} | |||
end. | |||
needs_update(_AppInfo, _) -> | |||
false. | |||
download(TmpDir, AppInfo, State, _) -> | |||
download_(TmpDir, rebar_app_info:source(AppInfo), State). | |||
download_(TmpDir, {localfs, Path, _Ref}, State) -> | |||
download_(TmpDir, {localfs, Path}, State); | |||
download_(TmpDir, {localfs, Path}, _State) -> | |||
ok = rebar_file_utils:cp_r(filelib:wildcard(Path ++ "/*"), TmpDir), | |||
{ok, undefined}. | |||
make_vsn(_AppInfo, _) -> | |||
{plain, "undefined"}. |
@ -0,0 +1,240 @@ | |||
-module(rebar_paths_SUITE). | |||
-include_lib("eunit/include/eunit.hrl"). | |||
-include_lib("common_test/include/ct.hrl"). | |||
-compile(export_all). | |||
all() -> | |||
[clashing_apps, | |||
check_modules, | |||
set_paths, | |||
misloaded_mods | |||
]. | |||
%%%%%%%%%%%%%%%%%% | |||
%%% TEST SETUP %%% | |||
%%%%%%%%%%%%%%%%%% | |||
init_per_testcase(Case, Config) -> | |||
BasePaths = code:get_path(), | |||
%% This test checks that the right module sets get loaded; however, we must | |||
%% ensure that we do not have clashes with other test suites' loaded modules, | |||
%% which we cannot track. As such, we have to ensure all module names here are | |||
%% unique. | |||
%% | |||
%% This is done by hand; if you see this test suite failing on its own, you | |||
%% probably wrote a test suite that clashes! | |||
Dir = filename:join([?config(priv_dir, Config), atom_to_list(?MODULE), | |||
atom_to_list(Case)]), | |||
InDir = fun(Path) -> filename:join([Dir, Path]) end, | |||
ADep = fake_app(<<"rp_a">>, <<"1.0.0">>, InDir("_build/default/lib/rp_a/")), | |||
BDep = fake_app(<<"rp_b">>, <<"1.0.0">>, InDir("_build/default/lib/rp_b/")), | |||
CDep = fake_app(<<"rp_c">>, <<"1.0.0">>, InDir("_build/default/lib/rp_c/")), | |||
DDep = fake_app(<<"rp_d">>, <<"1.0.0">>, InDir("_build/default/lib/rp_d/")), | |||
RelxDep = fake_app(<<"relx">>, <<"1.0.0">>, InDir("_build/default/lib/relx/")), | |||
APlug = fake_app(<<"rp_a">>, <<"1.0.0">>, | |||
InDir("_build/default/plugins/lib/rp_a/")), | |||
RelxPlug = fake_app(<<"relx">>, <<"1.1.1">>, | |||
InDir("_build/default/plugins/lib/relx")), | |||
EPlug = fake_app(<<"rp_e">>, <<"1.0.0">>, | |||
InDir("_build/default/plugins/lib/rp_e/")), | |||
S0 = rebar_state:new(), | |||
S1 = rebar_state:all_deps(S0, [ADep, BDep, CDep, DDep, RelxDep]), | |||
S2 = rebar_state:all_plugin_deps(S1, [APlug, RelxPlug]), | |||
S3 = rebar_state:code_paths(S2, default, code:get_path()), | |||
S4 = rebar_state:code_paths( | |||
S3, | |||
all_deps, | |||
[rebar_app_info:ebin_dir(A) || A <- [ADep, BDep, CDep, DDep, RelxDep]] | |||
), | |||
S5 = rebar_state:code_paths( | |||
S4, | |||
all_plugin_deps, | |||
[rebar_app_info:ebin_dir(A) || A <- [APlug, RelxPlug, EPlug]] | |||
), | |||
[{base_paths, BasePaths}, {root_dir, Dir}, {state, S5} | Config]. | |||
end_per_testcase(_, Config) -> | |||
%% this is deeply annoying because we interfere with rebar3's own | |||
%% path handling! | |||
rebar_paths:unset_paths([plugins, deps], ?config(state, Config)), | |||
Config. | |||
fake_app(Name, Vsn, OutDir) -> | |||
{ok, App} = rebar_app_info:new(Name, Vsn, OutDir), | |||
compile_fake_appmod(App), | |||
App. | |||
compile_fake_appmod(App) -> | |||
OutDir = rebar_app_info:ebin_dir(App), | |||
Vsn = rebar_app_info:original_vsn(App), | |||
Name = rebar_app_info:name(App), | |||
ok = filelib:ensure_dir(filename:join([OutDir, ".touch"])), | |||
AppFile = [ | |||
"{application,", Name, ", " | |||
" [{description, \"some app\"}, " | |||
" {vsn, \"", Vsn, "\"}, " | |||
" {modules, [",Name,"]}, " | |||
" {registered, []}, " | |||
" {applications, [stdlib, kernel]} " | |||
" ]}. "], | |||
ok = file:write_file(filename:join([OutDir, <<Name/binary, ".app">>]), AppFile), | |||
Mod = [{attribute, 1, module, binary_to_atom(Name, utf8)}, | |||
{attribute, 2, export, [{f,0}]}, | |||
{function,3,f,0, | |||
[{clause,3, [], [], | |||
[{string,3,OutDir}] | |||
}]} | |||
], | |||
{ok, _, Bin} = compile:forms(Mod), | |||
ok = file:write_file(filename:join([OutDir, <<Name/binary, ".beam">>]), Bin). | |||
%%%%%%%%%%%%% | |||
%%% TESTS %%% | |||
%%%%%%%%%%%%% | |||
clashing_apps(Config) -> | |||
Clashes = rebar_paths:clashing_apps([deps, plugins], | |||
?config(state, Config)), | |||
ct:pal("Clashes: ~p", [Clashes]), | |||
?assertEqual([<<"relx">>, <<"rp_a">>], lists:sort(proplists:get_value(deps, Clashes))), | |||
?assertEqual([], proplists:get_value(plugins, Clashes)), | |||
ok. | |||
set_paths(Config) -> | |||
State = ?config(state, Config), | |||
RootDir = filename:split(?config(root_dir, Config)), | |||
rebar_paths:set_paths([plugins, deps], State), | |||
PluginPaths = code:get_path(), | |||
rebar_paths:set_paths([deps, plugins], State), | |||
DepPaths = code:get_path(), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "plugins", "lib", "rp_a", "ebin"], | |||
find_first_instance("rp_a", PluginPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "rp_b", "ebin"], | |||
find_first_instance("rp_b", PluginPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "rp_c", "ebin"], | |||
find_first_instance("rp_c", PluginPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "rp_d", "ebin"], | |||
find_first_instance("rp_d", PluginPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "plugins", "lib", "rp_e", "ebin"], | |||
find_first_instance("rp_e", PluginPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "plugins", "lib", "relx", "ebin"], | |||
find_first_instance("relx", PluginPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "rp_a", "ebin"], | |||
find_first_instance("rp_a", DepPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "rp_b", "ebin"], | |||
find_first_instance("rp_b", DepPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "rp_c", "ebin"], | |||
find_first_instance("rp_c", DepPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "rp_d", "ebin"], | |||
find_first_instance("rp_d", DepPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "plugins", "lib", "rp_e", "ebin"], | |||
find_first_instance("rp_e", DepPaths) | |||
), | |||
?assertEqual( | |||
RootDir ++ ["_build", "default", "lib", "relx", "ebin"], | |||
find_first_instance("relx", DepPaths) | |||
), | |||
ok. | |||
check_modules(Config) -> | |||
State = ?config(state, Config), | |||
RootDir = ?config(root_dir, Config)++"/", | |||
rebar_paths:set_paths([plugins, deps], State), | |||
ct:pal("code:get_path() -> ~p", [code:get_path()]), | |||
?assertEqual(RootDir ++ "_build/default/plugins/lib/rp_a/ebin", rp_a:f()), | |||
ct:pal("~p", [catch file:list_dir(RootDir ++ "_build/default/lib/")]), | |||
ct:pal("~p", [catch file:list_dir(RootDir ++ "_build/default/lib/rp_b/")]), | |||
ct:pal("~p", [catch file:list_dir(RootDir ++ "_build/default/lib/rp_b/ebin")]), | |||
ct:pal("~p", [catch b:module_info()]), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_b/ebin", rp_b:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_c/ebin", rp_c:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_d/ebin", rp_d:f()), | |||
?assertEqual(RootDir ++ "_build/default/plugins/lib/rp_e/ebin", rp_e:f()), | |||
?assertEqual(RootDir ++ "_build/default/plugins/lib/relx/ebin", relx:f()), | |||
?assertEqual(3, length(relx:module_info(exports))), % can't replace bundled | |||
rebar_paths:set_paths([deps, plugins], State), | |||
ct:pal("code:get_path() -> ~p", [code:get_path()]), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_a/ebin", rp_a:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_b/ebin", rp_b:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_c/ebin", rp_c:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_d/ebin", rp_d:f()), | |||
?assertEqual(RootDir ++ "_build/default/plugins/lib/rp_e/ebin", rp_e:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/relx/ebin", relx:f()), | |||
?assertEqual(3, length(relx:module_info(exports))), % can't replace bundled | |||
%% once again | |||
rebar_paths:set_paths([plugins, deps], State), | |||
ct:pal("code:get_path() -> ~p", [code:get_path()]), | |||
?assertEqual(RootDir ++ "_build/default/plugins/lib/rp_a/ebin", rp_a:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_b/ebin", rp_b:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_c/ebin", rp_c:f()), | |||
?assertEqual(RootDir ++ "_build/default/lib/rp_d/ebin", rp_d:f()), | |||
?assertEqual(RootDir ++ "_build/default/plugins/lib/rp_e/ebin", rp_e:f()), | |||
?assertEqual(RootDir ++ "_build/default/plugins/lib/relx/ebin", relx:f()), | |||
?assertEqual(3, length(relx:module_info(exports))), % can't replace bundled | |||
ok. | |||
misloaded_mods(_Config) -> | |||
Res = rebar_paths:misloaded_modules( | |||
["/1/2/3/4", | |||
"/1/2/4", | |||
"/2/1/1", | |||
"/3/4/5"], | |||
[{a, "/0/1/2/file.beam"}, | |||
{b, "/1/2/3/4/file.beam"}, | |||
{c, "/2/1/file.beam"}, | |||
{f, preloaded}, | |||
{d, "/3/5/7/file.beam"}, | |||
{e, "/3/4/5/file.beam"}] | |||
), | |||
?assertEqual([a,c,d], Res), | |||
ok. | |||
%%%%%%%%%%%%%%% | |||
%%% HELPERS %%% | |||
%%%%%%%%%%%%%%% | |||
find_first_instance(Frag, []) -> | |||
{not_found, Frag}; | |||
find_first_instance(Frag, [Path|Rest]) -> | |||
Frags = filename:split(Path), | |||
case lists:member(Frag, Frags) of | |||
true -> Frags; | |||
false -> find_first_instance(Frag, Rest) | |||
end. |
@ -0,0 +1,376 @@ | |||
%% Test suite for the handling hexpm repo configurations | |||
-module(rebar_pkg_repos_SUITE). | |||
-compile(export_all). | |||
-include_lib("common_test/include/ct.hrl"). | |||
-include_lib("eunit/include/eunit.hrl"). | |||
-include("rebar.hrl"). | |||
all() -> | |||
[default_repo, repo_merging, repo_replacing, | |||
auth_merging, organization_merging, {group, resolve_version}]. | |||
groups() -> | |||
[{resolve_version, [use_first_repo_match, use_exact_with_hash, fail_repo_update, | |||
ignore_match_in_excluded_repo, optional_prereleases]}]. | |||
init_per_group(resolve_version, Config) -> | |||
Repo1 = <<"test-repo-1">>, | |||
Repo2 = <<"test-repo-2">>, | |||
Repo3 = <<"test-repo-3">>, | |||
Hexpm = <<"hexpm">>, | |||
Repos = [Repo1, Repo2, Repo3, Hexpm], | |||
Deps = [{"A", "0.1.1", <<"good checksum">>, Repo1, false}, | |||
{"A", "0.1.1", <<"good checksum">>, Repo2, false}, | |||
{"B", "1.0.0", Repo1, false}, | |||
{"B", "2.0.0", Repo2, false}, | |||
{"B", "1.4.0", Repo3, false}, | |||
{"B", "1.4.3", Hexpm, false}, | |||
{"B", "1.4.6", Hexpm, #{reason => 'RETIRED_INVALID'}}, | |||
{"B", "1.5.0", Hexpm, false}, | |||
{"B", "1.5.6-rc.0", Hexpm, true}, | |||
{"C", "1.3.1", <<"bad checksum">>, Repo1, false}, | |||
{"C", "1.3.1", <<"good checksum">>, Repo2, false}], | |||
[{deps, Deps}, {repos, Repos} | Config]; | |||
init_per_group(_, Config) -> | |||
Config. | |||
end_per_group(_, _) -> | |||
ok. | |||
init_per_testcase(use_first_repo_match, Config) -> | |||
Deps = ?config(deps, Config), | |||
Repos = ?config(repos, Config), | |||
State = setup_deps_and_repos(Deps, Repos), | |||
meck:new(rebar_packages, [passthrough, no_link]), | |||
%% fail when the first repo is updated since it doesn't have a matching package | |||
%% should continue anyway | |||
meck:expect(rebar_packages, update_package, | |||
fun(_, _, _State) -> ok end), | |||
meck:expect(rebar_packages, verify_table, | |||
fun(_State) -> true end), | |||
[{state, State} | Config]; | |||
init_per_testcase(use_exact_with_hash, Config) -> | |||
Deps = ?config(deps, Config), | |||
Repos = ?config(repos, Config), | |||
State = setup_deps_and_repos(Deps, Repos), | |||
meck:new(rebar_packages, [passthrough, no_link]), | |||
%% fail when the first repo is updated since it doesn't have a matching package | |||
%% should continue anyway | |||
meck:expect(rebar_packages, update_package, | |||
fun(_, _, _State) -> ok end), | |||
meck:expect(rebar_packages, verify_table, | |||
fun(_State) -> true end), | |||
[{state, State} | Config]; | |||
init_per_testcase(fail_repo_update, Config) -> | |||
Deps = ?config(deps, Config), | |||
Repos = ?config(repos, Config), | |||
State = setup_deps_and_repos(Deps, Repos), | |||
meck:new(rebar_packages, [passthrough, no_link]), | |||
%% fail when the first repo is updated since it doesn't have a matching package | |||
%% should continue anyway | |||
[Repo1 | _] = Repos, | |||
meck:expect(rebar_packages, update_package, | |||
fun(_, #{name := Repo}, _State) when Repo =:= Repo1 -> fail; | |||
(_, _, _State) -> ok end), | |||
meck:expect(rebar_packages, verify_table, | |||
fun(_State) -> true end), | |||
[{state, State} | Config]; | |||
init_per_testcase(ignore_match_in_excluded_repo, Config) -> | |||
Deps = ?config(deps, Config), | |||
Repos = [Repo1, _, Repo3 | _] = ?config(repos, Config), | |||
%% drop repo1 and repo2 from the repos to be used by the pkg resource | |||
State = setup_deps_and_repos(Deps, [R || R <- Repos, R =/= Repo3, R =/= Repo1]), | |||
meck:new(rebar_packages, [passthrough, no_link]), | |||
%% fail when the first repo is updated since it doesn't have a matching package | |||
%% should continue anyway | |||
[_, _, Repo3 | _] = Repos, | |||
meck:expect(rebar_packages, update_package, | |||
fun(_, _, _State) -> ok end), | |||
meck:expect(rebar_packages, verify_table, | |||
fun(_State) -> true end), | |||
[{state, State} | Config]; | |||
init_per_testcase(optional_prereleases, Config) -> | |||
Deps = ?config(deps, Config), | |||
Repos = ?config(repos, Config), | |||
State = setup_deps_and_repos(Deps, Repos), | |||
meck:new(rebar_packages, [passthrough, no_link]), | |||
meck:expect(rebar_packages, update_package, | |||
fun(_, _, _State) -> ok end), | |||
meck:expect(rebar_packages, verify_table, | |||
fun(_State) -> true end), | |||
[{state, State} | Config]; | |||
init_per_testcase(auth_merging, Config) -> | |||
meck:new(file, [passthrough, no_link, unstick]), | |||
meck:new(rebar_packages, [passthrough, no_link]), | |||
Config; | |||
init_per_testcase(organization_merging, Config) -> | |||
meck:new(file, [passthrough, no_link, unstick]), | |||
meck:new(rebar_packages, [passthrough, no_link]), | |||
Config; | |||
init_per_testcase(_, Config) -> | |||
Config. | |||
end_per_testcase(Case, _Config) when Case =:= auth_merging ; | |||
Case =:= organization_merging -> | |||
meck:unload(file), | |||
meck:unload(rebar_packages); | |||
end_per_testcase(Case, _Config) when Case =:= use_first_repo_match ; | |||
Case =:= use_exact_with_hash ; | |||
Case =:= fail_repo_update ; | |||
Case =:= ignore_match_in_excluded_repo ; | |||
Case =:= optional_prereleases -> | |||
meck:unload(rebar_packages); | |||
end_per_testcase(_, _) -> | |||
ok. | |||
default_repo(_Config) -> | |||
Repo1 = #{name => <<"hexpm">>, | |||
api_key => <<"asdf">>}, | |||
MergedRepos = rebar_hex_repos:repos([{repos, [Repo1]}]), | |||
?assertMatch([#{name := <<"hexpm">>, | |||
api_key := <<"asdf">>, | |||
api_url := <<"https://hex.pm/api">>}], MergedRepos). | |||
repo_merging(_Config) -> | |||
Repo1 = #{name => <<"repo-1">>, | |||
api_url => <<"repo-1/api">>}, | |||
Repo2 = #{name => <<"repo-2">>, | |||
repo_url => <<"repo-2/repo">>, | |||
repo_verify => false}, | |||
Result = rebar_hex_repos:merge_repos([Repo1, Repo2, | |||
#{name => <<"repo-2">>, | |||
api_url => <<"repo-2/api">>, | |||
repo_url => <<"bad url">>, | |||
repo_verify => true}, | |||
#{name => <<"repo-1">>, | |||
api_url => <<"bad url">>, | |||
repo_verify => true}, | |||
#{name => <<"repo-2">>, | |||
api_url => <<"repo-2/api-2">>, | |||
repo_url => <<"other/repo">>}]), | |||
?assertMatch([#{name := <<"repo-1">>, | |||
api_url := <<"repo-1/api">>, | |||
repo_verify := true}, | |||
#{name := <<"repo-2">>, | |||
api_url := <<"repo-2/api">>, | |||
repo_url := <<"repo-2/repo">>, | |||
repo_verify := false}], Result). | |||
repo_replacing(_Config) -> | |||
Repo1 = #{name => <<"repo-1">>, | |||
api_url => <<"repo-1/api">>}, | |||
Repo2 = #{name => <<"repo-2">>, | |||
repo_url => <<"repo-2/repo">>, | |||
repo_verify => false}, | |||
?assertMatch([Repo1, Repo2, #{name := <<"hexpm">>}], | |||
rebar_hex_repos:repos([{repos, [Repo1]}, | |||
{repos, [Repo2]}])), | |||
%% use of replace is ignored if found in later entries than the first | |||
?assertMatch([Repo1, Repo2, #{name := <<"hexpm">>}], | |||
rebar_hex_repos:repos([{repos, [Repo1]}, | |||
{repos, replace, [Repo2]}])), | |||
?assertMatch([Repo1], | |||
rebar_hex_repos:repos([{repos, replace, [Repo1]}, | |||
{repos, [Repo2]}])). | |||
auth_merging(_Config) -> | |||
Repo1 = #{name => <<"repo-1">>, | |||
api_url => <<"repo-1/api">>}, | |||
Repo2 = #{name => <<"repo-2">>, | |||
repo_url => <<"repo-2/repo">>, | |||
repo_verify => false}, | |||
State = rebar_state:new([{hex, [{repos, [Repo1, Repo2]}]}]), | |||
meck:expect(file, consult, | |||
fun(_) -> | |||
{ok, [#{<<"repo-1">> => #{read_key => <<"read key">>, | |||
write_key => <<"write key">>}, | |||
<<"repo-2">> => #{read_key => <<"read key 2">>, | |||
repos_key => <<"repos key 2">>, | |||
write_key => <<"write key 2">>}, | |||
<<"hexpm">> => #{write_key => <<"write key hexpm">>}}]} | |||
end), | |||
?assertMatch({ok, | |||
#resource{state=#{repos := [#{name := <<"repo-1">>, | |||
read_key := <<"read key">>, | |||
write_key := <<"write key">>}, | |||
#{name := <<"repo-2">>, | |||
read_key := <<"read key 2">>, | |||
repos_key := <<"repos key 2">>, | |||
write_key := <<"write key 2">>}, | |||
#{name := <<"hexpm">>, | |||
write_key := <<"write key hexpm">>}]}}}, | |||
rebar_pkg_resource:init(pkg, State)), | |||
ok. | |||
organization_merging(_Config) -> | |||
Repo1 = #{name => <<"hexpm:repo-1">>, | |||
api_url => <<"repo-1/api">>}, | |||
Repo2 = #{name => <<"hexpm:repo-2">>, | |||
repo_url => <<"repo-2/repo">>, | |||
repo_verify => false}, | |||
State = rebar_state:new([{hex, [{repos, [Repo1, Repo2]}]}]), | |||
meck:expect(file, consult, | |||
fun(_) -> | |||
{ok, [#{<<"hexpm:repo-1">> => #{read_key => <<"read key">>}, | |||
<<"hexpm:repo-2">> => #{read_key => <<"read key 2">>, | |||
repos_key => <<"repos key 2">>, | |||
write_key => <<"write key 2">>}, | |||
<<"hexpm">> => #{write_key => <<"write key hexpm">>}}]} | |||
end), | |||
?assertMatch({ok, | |||
#resource{state=#{repos := [#{name := <<"hexpm:repo-1">>, | |||
parent := <<"hexpm">>, | |||
read_key := <<"read key">>, | |||
write_key := <<"write key hexpm">>}, | |||
#{name := <<"hexpm:repo-2">>, | |||
parent := <<"hexpm">>, | |||
read_key := <<"read key 2">>, | |||
repos_key := <<"repos key 2">>, | |||
write_key := <<"write key 2">>}, | |||
#{name := <<"hexpm">>, | |||
write_key := <<"write key hexpm">>}]}}}, | |||
rebar_pkg_resource:init(pkg, State)), | |||
ok. | |||
use_first_repo_match(Config) -> | |||
State = ?config(state, Config), | |||
?assertMatch({ok,{package,{<<"B">>, {{2,0,0}, {[],[]}}, Repo2}, | |||
<<"some checksum">>, false, []}, | |||
#{name := Repo2, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"B">>, <<"> 1.4.0">>, undefined, | |||
?PACKAGE_TABLE, State)), | |||
?assertMatch({ok,{package,{<<"B">>, {{1,4,0}, {[],[]}}, Repo3}, | |||
<<"some checksum">>, false, []}, | |||
#{name := Repo3, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"B">>, <<"~> 1.4.0">>, undefined, | |||
?PACKAGE_TABLE, State)). | |||
%% tests that even though an easier repo has C-1.3.1 it doesn't use it since its hash is different | |||
use_exact_with_hash(Config) -> | |||
State = ?config(state, Config), | |||
?assertMatch({ok,{package,{<<"C">>, {{1,3,1}, {[],[]}}, Repo2}, | |||
<<"good checksum">>, false, []}, | |||
#{name := Repo2, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"C">>, <<"1.3.1">>, <<"good checksum">>, | |||
?PACKAGE_TABLE, State)). | |||
fail_repo_update(Config) -> | |||
State = ?config(state, Config), | |||
?assertMatch({ok,{package,{<<"B">>, {{1,4,0}, {[],[]}}, Repo3}, | |||
<<"some checksum">>, false, []}, | |||
#{name := Repo3, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"B">>, <<"~> 1.4.0">>, undefined, | |||
?PACKAGE_TABLE, State)). | |||
ignore_match_in_excluded_repo(Config) -> | |||
State = ?config(state, Config), | |||
Repos = ?config(repos, Config), | |||
?assertMatch({ok,{package,{<<"B">>, {{1,4,6}, {[],[]}}, Hexpm}, | |||
<<"some checksum">>, #{reason := 'RETIRED_INVALID'}, []}, | |||
#{name := Hexpm, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"B">>, <<"~> 1.4.0">>, undefined, | |||
?PACKAGE_TABLE, State)), | |||
[_, Repo2 | _] = Repos, | |||
?assertMatch({ok,{package,{<<"A">>, {{0,1,1}, {[],[]}}, Repo2}, | |||
<<"good checksum">>, false, []}, | |||
#{name := Repo2, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"A">>, <<"0.1.1">>, <<"good checksum">>, | |||
?PACKAGE_TABLE, State)). | |||
optional_prereleases(Config) -> | |||
State = ?config(state, Config), | |||
?assertMatch({ok,{package,{<<"B">>, {{1,5,0}, {[],[]}}, Hexpm}, | |||
<<"some checksum">>, false, []}, | |||
#{name := Hexpm, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"B">>, <<"~> 1.5.0">>, undefined, | |||
?PACKAGE_TABLE, State)), | |||
?assertMatch({ok,{package,{<<"B">>, {{1,5,6}, {[<<"rc">>,0],[]}}, Hexpm}, | |||
<<"some checksum">>, true, []}, | |||
#{name := Hexpm, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"B">>, <<"1.5.6-rc.0">>, <<"some checksum">>, | |||
?PACKAGE_TABLE, State)), | |||
%% allow prerelease through configuration | |||
State1 = rebar_state:set(State, deps_allow_prerelease, true), | |||
?assertMatch({ok,{package,{<<"B">>, {{1,5,6}, {[<<"rc">>,0],[]}}, Hexpm}, | |||
<<"some checksum">>, true, []}, | |||
#{name := Hexpm, | |||
http_adapter_config := #{profile := rebar}}}, | |||
rebar_packages:resolve_version(<<"B">>, <<"~> 1.5.0">>, <<"some checksum">>, | |||
?PACKAGE_TABLE, State1)). | |||
%% | |||
setup_deps_and_repos(Deps, Repos) -> | |||
catch ets:delete(?PACKAGE_TABLE), | |||
true = rebar_packages:new_package_table(), | |||
insert_deps(Deps), | |||
State = rebar_state:new([{hex, [{repos, [#{name => R} || R <- Repos]}]}]), | |||
rebar_state:create_resources([{pkg, rebar_pkg_resource}], State). | |||
insert_deps(Deps) -> | |||
lists:foreach(fun({Name, Version, Repo, Retired}) -> | |||
ets:insert(?PACKAGE_TABLE, #package{key={rebar_utils:to_binary(Name), | |||
ec_semver:parse(Version), | |||
rebar_utils:to_binary(Repo)}, | |||
dependencies=[], | |||
retired=Retired, | |||
checksum = <<"some checksum">>}); | |||
({Name, Version, Checksum, Repo, Retired}) -> | |||
ets:insert(?PACKAGE_TABLE, #package{key={rebar_utils:to_binary(Name), | |||
ec_semver:parse(Version), | |||
rebar_utils:to_binary(Repo)}, | |||
dependencies=[], | |||
retired=Retired, | |||
checksum = Checksum}) | |||
end, Deps). |