diff --git a/src/rebar_compiler.erl b/src/rebar_compiler.erl index 8b83ec5a..f4b4c472 100644 --- a/src/rebar_compiler.erl +++ b/src/rebar_compiler.erl @@ -1,6 +1,7 @@ -module(rebar_compiler). -export([analyze_all/2, + analyze_all_extras/2, compile_analyzed/3, compile_all/2, clean/2, @@ -46,7 +47,7 @@ %% @doc analysis by the caller, in order to let an OTP app %% find and resolve all its dependencies as part of compile_all's new %% API, which presumes a partial analysis is done ahead of time --spec analyze_all(DAG, [App, ...]) -> ok when +-spec analyze_all(DAG, [App, ...]) -> {map(), [App]} when DAG :: {module(), digraph:graph()}, App :: rebar_app_info:t(). analyze_all({Compiler, G}, Apps) -> @@ -61,7 +62,7 @@ analyze_all({Compiler, G}, Apps) -> OutExt = maps:get(artifact_exts, Contexts), rebar_compiler_dag:prune( - G, SrcExt, OutExt, lists:append(AbsSources), AppOutPaths + G, SrcExt, OutExt, lists:append(AbsSources), lists:append(AppOutPaths) ), rebar_compiler_dag:populate_deps(G, SrcExt, OutExt), rebar_compiler_dag:propagate_stamps(G), @@ -72,6 +73,78 @@ analyze_all({Compiler, G}, Apps) -> AppNames = rebar_compiler_dag:compile_order(G, AppPaths), {Contexts, sort_apps(AppNames, Apps)}. +%% @doc same as analyze_all/2, but over extra_src_apps, +%% which are a big cheat. +-spec analyze_all_extras(DAG, [App, ...]) -> {map(), [App]} when + DAG :: {module(), digraph:graph()}, + App :: rebar_app_info:t(). +analyze_all_extras(DAG, Apps) -> + case lists:append([annotate_extras(App) || App <- Apps]) of + [] -> {#{}, []}; + ExtraApps -> analyze_all(DAG, ExtraApps) + end. + +-spec compile_analyzed({module(), digraph:graph()}, rebar_app_info:t(), map()) -> ok. +compile_analyzed({Compiler, G}, AppInfo, Contexts) -> % > 3.13.2 + run(G, Compiler, AppInfo, Contexts), + ok. + +-spec compile_all([module(), ...], rebar_app_info:t()) -> ok. +compile_all(Compilers, AppInfo) -> % =< 3.13.0 interface; plugins use this! + %% Support the old-style API by re-declaring a local DAG for the + %% compile steps needed. + lists:foreach(fun(Compiler) -> + OutDir = rebar_app_info:out_dir(AppInfo), + G = rebar_compiler_dag:init(OutDir, Compiler, undefined, []), + {Ctx, _} = analyze_all({Compiler, G}, [AppInfo]), + compile_analyzed({Compiler, G}, AppInfo, Ctx), + rebar_compiler_dag:maybe_store(G, OutDir, Compiler, undefined, []), + rebar_compiler_dag:terminate(G) + end, Compilers). + +%% @doc remove compiled artifacts from an AppDir. +-spec clean([module()], rebar_app_info:t()) -> 'ok'. +clean(Compilers, AppInfo) -> + lists:foreach(fun(CompilerMod) -> + clean_(CompilerMod, AppInfo, undefined), + Extras = annotate_extras(AppInfo), + [clean_(CompilerMod, ExtraApp, "extra") || ExtraApp <- Extras] + end, Compilers). + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%%% COMPILER UTIL EXPORTS %%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% These functions are here for the ultimate goal of getting rid of +%% rebar_base_compiler. This can't be done because of existing plugins. + +-spec needs_compile(filename:all(), extension(), [{extension(), file:dirname()}]) -> boolean(). +needs_compile(Source, OutExt, Mappings) -> + Ext = filename:extension(Source), + BaseName = filename:basename(Source, Ext), + {_, OutDir} = lists:keyfind(OutExt, 1, Mappings), + Target = filename:join(OutDir, BaseName++OutExt), + filelib:last_modified(Source) > filelib:last_modified(Target). + +ok_tuple(Source, Ws) -> + rebar_base_compiler:ok_tuple(Source, Ws). + +error_tuple(Source, Es, Ws, Opts) -> + rebar_base_compiler:error_tuple(Source, Es, Ws, Opts). + +maybe_report(Reportable) -> + rebar_base_compiler:maybe_report(Reportable). + +format_error_source(Path, Opts) -> + rebar_base_compiler:format_error_source(Path, Opts). + +report(Messages) -> + rebar_base_compiler:report(Messages). + +%%%%%%%%%%%%%%% +%%% PRIVATE %%% +%%%%%%%%%%%%%%% + gather_contexts(Compiler, Apps) -> Default = default_ctx(), Contexts = [{rebar_app_info:name(AppInfo), @@ -121,37 +194,14 @@ analyze_app({Compiler, G}, Contexts, AppInfo) -> rebar_compiler_dag:populate_sources( G, Compiler, InDirs, AbsSources, DepOpts ), - {{BaseDir, ArtifactDir}, AbsSources}. + {[{filename:join([BaseDir, SrcDir]), ArtifactDir} || SrcDir <- SrcDirs], + AbsSources}. sort_apps(Names, Apps) -> NamedApps = [{rebar_app_info:name(App), App} || App <- Apps], [App || Name <- Names, {_, App} <- [lists:keyfind(Name, 1, NamedApps)]]. --spec compile_analyzed({module(), digraph:graph()}, rebar_app_info:t(), map()) -> ok. -compile_analyzed({Compiler, G}, AppInfo, Contexts) -> % > 3.13.0 - run(G, Compiler, AppInfo, Contexts), - %% Extras are tricky and get their own mini-analysis - ExtraApps = annotate_extras(AppInfo), - [begin - {ExtraCtx, [SortedExtra]} = analyze_all({Compiler, G}, [ExtraAppInfo]), - run(G, Compiler, SortedExtra, ExtraCtx) - end || ExtraAppInfo <- ExtraApps], - ok. - --spec compile_all([module(), ...], rebar_app_info:t()) -> ok. -compile_all(Compilers, AppInfo) -> % =< 3.13.0 interface; plugins use this! - %% Support the old-style API by re-declaring a local DAG for the - %% compile steps needed. - lists:foreach(fun(Compiler) -> - OutDir = rebar_app_info:out_dir(AppInfo), - G = rebar_compiler_dag:init(OutDir, Compiler, undefined, []), - Ctx = analyze_all({Compiler, G}, [AppInfo]), - compile_analyzed({Compiler, G}, AppInfo, Ctx), - rebar_compiler_dag:maybe_store(G, OutDir, Compiler, undefined, []), - rebar_compiler_dag:terminate(G) - end, Compilers). - prepare_compiler_env(Compiler, Apps) -> lists:foreach( fun(AppInfo) -> @@ -183,11 +233,14 @@ run(G, CompilerMod, AppInfo, Contexts) -> {{FirstFiles, FirstFileOpts}, {RestFiles, Opts}} = CompilerMod:needed_files(G, FoundFiles, Mappings, AppInfo), - compile_each(FirstFiles, FirstFileOpts, BaseOpts, Mappings, CompilerMod), - Tracked = case RestFiles of + Tracked = + compile_each(FirstFiles, FirstFileOpts, BaseOpts, Mappings, CompilerMod) + ++ case RestFiles of {Sequential, Parallel} -> % parallelizable form compile_each(Sequential, Opts, BaseOpts, Mappings, CompilerMod) ++ - compile_parallel(Parallel, Opts, BaseOpts, Mappings, CompilerMod); + lists:append( + compile_parallel(Parallel, Opts, BaseOpts, Mappings, CompilerMod) + ); _ when is_list(RestFiles) -> % traditional sequential build compile_each(RestFiles, Opts, BaseOpts, Mappings, CompilerMod) end, @@ -246,88 +299,49 @@ store_artifacts(_G, []) -> ok; store_artifacts(G, [{Source, Target, Meta}|Rest]) -> %% Assume the source exists since it was tracked to be compiled - digraph:add_vertex(G, Target, {artifact, Meta}), - digraph:add_edge(G, Target, Source, artifact), + rebar_compiler_dag:store_artifact(G, Source, Target, Meta), store_artifacts(G, Rest). -compile_worker(QueuePid, Opts, Config, Outs, CompilerMod) -> - QueuePid ! self(), - receive - {compile, Source} -> - Result = - case erlang:function_exported(CompilerMod, compile_and_track, 4) of - false -> - CompilerMod:compile(Source, Outs, Config, Opts); - true -> - CompilerMod:compile_and_track(Source, Outs, Config, Opts) - end, - QueuePid ! {Result, Source}, - compile_worker(QueuePid, Opts, Config, Outs, CompilerMod); - empty -> - ok - end. - -compile_parallel([], _Opts, _BaseOpts, _Mappings, _CompilerMod) -> - []; compile_parallel(Targets, Opts, BaseOpts, Mappings, CompilerMod) -> - Self = self(), - F = fun() -> compile_worker(Self, Opts, BaseOpts, Mappings, CompilerMod) end, - Jobs = min(length(Targets), erlang:system_info(schedulers)), - ?DEBUG("Starting ~B compile worker(s)", [Jobs]), - Pids = [spawn_monitor(F) || _I <- lists:seq(1, Jobs)], - compile_queue(Targets, Pids, Opts, BaseOpts, Mappings, CompilerMod). - -compile_queue([], [], _Opts, _Config, _Outs, _CompilerMod) -> - []; -compile_queue(Targets, Pids, Opts, Config, Outs, CompilerMod) -> Tracking = erlang:function_exported(CompilerMod, compile_and_track, 4), - receive - Worker when is_pid(Worker), Targets =:= [] -> - Worker ! empty, - compile_queue(Targets, Pids, Opts, Config, Outs, CompilerMod); - Worker when is_pid(Worker) -> - Worker ! {compile, hd(Targets)}, - compile_queue(tl(Targets), Pids, Opts, Config, Outs, CompilerMod); - {ok, Source} -> - ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), - compile_queue(Targets, Pids, Opts, Config, Outs, CompilerMod); - {{ok, Tracked}, Source} when Tracking -> - ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), - Tracked ++ - compile_queue(Targets, Pids, Opts, Config, Outs, CompilerMod); - {{ok, Warnings}, Source} when not Tracking -> - report(Warnings), - ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), - compile_queue(Targets, Pids, Opts, Config, Outs, CompilerMod); - {{ok, Tracked, Warnings}, Source} when Tracking -> - report(Warnings), - ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), - Tracked ++ - compile_queue(Targets, Pids, Opts, Config, Outs, CompilerMod); - {skipped, Source} -> - ?DEBUG("~sSkipped ~s", [rebar_utils:indent(1), Source]), - compile_queue(Targets, Pids, Opts, Config, Outs, CompilerMod); - {Error, Source} -> - NewSource = format_error_source(Source, Config), - ?ERROR("Compiling ~ts failed", [NewSource]), - maybe_report(Error), - ?FAIL; - {'DOWN', Mref, _, Pid, normal} -> - Pids2 = lists:delete({Pid, Mref}, Pids), - compile_queue(Targets, Pids2, Opts, Config, Outs, CompilerMod); - {'DOWN', _Mref, _, _Pid, Info} -> - ?ERROR("Compilation failed: ~p", [Info]), - ?FAIL - end. + rebar_parallel:queue( + Targets, + fun compile_worker/2, [Opts, BaseOpts, Mappings, CompilerMod], + fun compile_handler/2, [BaseOpts, Tracking] + ). -%% @doc remove compiled artifacts from an AppDir. --spec clean([module()], rebar_app_info:t()) -> 'ok'. -clean(Compilers, AppInfo) -> - lists:foreach(fun(CompilerMod) -> - clean_(CompilerMod, AppInfo, undefined), - Extras = annotate_extras(AppInfo), - [clean_(CompilerMod, ExtraApp, "extra") || ExtraApp <- Extras] - end, Compilers). +compile_worker(Source, [Opts, Config, Outs, CompilerMod]) -> + Result = case erlang:function_exported(CompilerMod, compile_and_track, 4) of + false -> + CompilerMod:compile(Source, Outs, Config, Opts); + true -> + CompilerMod:compile_and_track(Source, Outs, Config, Opts) + end, + %% Bundle the source to allow proper reporting in the handler: + {Result, Source}. + +compile_handler({ok, Source}, _Args) -> + ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), + ok; +compile_handler({{ok, Tracked}, Source}, [_, Tracking]) when Tracking -> + ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), + {ok, Tracked}; +compile_handler({{ok, Warnings}, Source}, _Args) -> + report(Warnings), + ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), + ok; +compile_handler({{ok, Tracked, Warnings}, Source}, [_, Tracking]) when Tracking -> + report(Warnings), + ?DEBUG("~sCompiled ~s", [rebar_utils:indent(1), Source]), + {ok, Tracked}; +compile_handler({skipped, Source}, _Args) -> + ?DEBUG("~sSkipped ~s", [rebar_utils:indent(1), Source]), + ok; +compile_handler({Error, Source}, [Config | _Rest]) -> + NewSource = format_error_source(Source, Config), + ?ERROR("Compiling ~ts failed", [NewSource]), + maybe_report(Error), + ?FAIL. clean_(CompilerMod, AppInfo, _Label) -> #{src_dirs := SrcDirs, @@ -339,21 +353,18 @@ clean_(CompilerMod, AppInfo, _Label) -> CompilerMod:clean(FoundFiles, AppInfo), ok. --spec needs_compile(filename:all(), extension(), [{extension(), file:dirname()}]) -> boolean(). -needs_compile(Source, OutExt, Mappings) -> - Ext = filename:extension(Source), - BaseName = filename:basename(Source, Ext), - {_, OutDir} = lists:keyfind(OutExt, 1, Mappings), - Target = filename:join(OutDir, BaseName++OutExt), - filelib:last_modified(Source) > filelib:last_modified(Target). - annotate_extras(AppInfo) -> ExtraDirs = rebar_dir:extra_src_dirs(rebar_app_info:opts(AppInfo), []), OldSrcDirs = rebar_app_info:get(AppInfo, src_dirs, ["src"]), AppDir = rebar_app_info:dir(AppInfo), lists:map(fun(Dir) -> EbinDir = filename:join(rebar_app_info:out_dir(AppInfo), Dir), - AppInfo1 = rebar_app_info:ebin_dir(AppInfo, EbinDir), + %% need a unique name to prevent lookup issues that clobber entries + AppName = unicode:characters_to_binary( + [rebar_app_info:name(AppInfo), "_", Dir] + ), + AppInfo0 = rebar_app_info:name(AppInfo, AppName), + AppInfo1 = rebar_app_info:ebin_dir(AppInfo0, EbinDir), AppInfo2 = rebar_app_info:set(AppInfo1, src_dirs, [Dir]), AppInfo3 = rebar_app_info:set(AppInfo2, extra_src_dirs, OldSrcDirs), add_to_includes( % give access to .hrl in app's src/ @@ -365,26 +376,6 @@ annotate_extras(AppInfo) -> filelib:is_dir(filename:join(AppDir, ExtraDir))] ). -%% These functions are here for the ultimate goal of getting rid of -%% rebar_base_compiler. This can't be done because of existing plugins. - -ok_tuple(Source, Ws) -> - rebar_base_compiler:ok_tuple(Source, Ws). - -error_tuple(Source, Es, Ws, Opts) -> - rebar_base_compiler:error_tuple(Source, Es, Ws, Opts). - -maybe_report(Reportable) -> - rebar_base_compiler:maybe_report(Reportable). - -format_error_source(Path, Opts) -> - rebar_base_compiler:format_error_source(Path, Opts). - -report(Messages) -> - rebar_base_compiler:report(Messages). - -%%% private functions - find_source_files(BaseDir, SrcExt, SrcDirs, Opts) -> SourceExtRe = "^(?!\\._).*\\" ++ SrcExt ++ [$$], lists:flatmap(fun(SrcDir) -> diff --git a/src/rebar_compiler_dag.erl b/src/rebar_compiler_dag.erl index 710cde89..db903b16 100644 --- a/src/rebar_compiler_dag.erl +++ b/src/rebar_compiler_dag.erl @@ -3,7 +3,7 @@ -module(rebar_compiler_dag). -export([init/4, maybe_store/5, terminate/1]). -export([prune/5, populate_sources/5, populate_deps/3, propagate_stamps/1, - compile_order/2]). + compile_order/2, store_artifact/4]). -include("rebar.hrl"). @@ -41,7 +41,7 @@ init(Dir, Compiler, Label, CritMeta) -> %% The file must be in one of the directories that may contain source files %% for an OTP application; source files found in the DAG `G' that lie outside %% of these directories may be used in other circumstances (i.e. options affecting -%% visibility). +%% visibility, extra_src_dirs). %% Prune out files that have no corresponding sources prune(G, SrcExt, ArtifactExt, Sources, AppPaths) -> %% Collect source files that may have been removed. These files: @@ -50,14 +50,28 @@ prune(G, SrcExt, ArtifactExt, Sources, AppPaths) -> %% In the process, prune header files - those don't have ArtifactExt %% extension - using side effect in is_deleted_source/5. case [Del || Del <- (digraph:vertices(G) -- Sources), - is_deleted_source(G, Del, filename:extension(Del), SrcExt, ArtifactExt)] of + is_deleted_source(G, Del, filename:extension(Del), SrcExt, ArtifactExt)] of [] -> ok; %% short circuit without sorting AppPaths Deleted -> - prune_source_files(G, SrcExt, ArtifactExt, - lists:sort(AppPaths), lists:sort(Deleted)) + SafeAppPaths = safe_dirs(AppPaths), + OutFiles = filter_prefix(G, lists:sort(SafeAppPaths), lists:sort(Deleted)), + [maybe_rm_artifact_and_edge(G, Out, SrcExt, ArtifactExt, File) + || {File, Out} <- OutFiles], + ok end. +%% Some app paths may be prefixes of one another; for example, +%% `/some/app/directory' may be seen as a prefix +%% of `/some/app/directory_trick' and cause pruning outside +%% of the proper scopes. +safe_dirs(AppPaths) -> + [{safe_dir(AppDir), Path} || {AppDir, Path} <- AppPaths]. + +safe_dir([]) -> "/"; +safe_dir("/") -> "/"; +safe_dir([H|T]) -> [H|safe_dir(T)]. + is_deleted_source(_G, _F, Extension, Extension, _ArtifactExt) -> %% source file true; @@ -74,22 +88,21 @@ is_deleted_source(G, F, _Extension, _SrcExt, _ArtifactExt) -> %% AppDirs & Fs are sorted, and to check if File is outside of %% App, lists:prefix is checked. When the App with File in it %% exists, verify file is still there on disk. -prune_source_files(_G, _SrcExt, _ArtifactExt, [], _) -> - ok; -prune_source_files(_G, _SrcExt, _ArtifactExt, _, []) -> - ok; -prune_source_files(G, SrcExt, ArtifactExt, [AppDir | AppTail], Fs) when is_atom(AppDir) -> +filter_prefix(_G, [], _) -> + []; +filter_prefix(_G, _, []) -> + []; +filter_prefix(G, Apps, [F|Fs]) when is_atom(F) -> %% dirty bit shenanigans - prune_source_files(G, SrcExt, ArtifactExt, AppTail, Fs); -prune_source_files(G, SrcExt, ArtifactExt, [{App, Out} | AppTail] = AppPaths, [File | FTail]) -> + filter_prefix(G, Apps, Fs); +filter_prefix(G, [{App, Out} | AppTail] = AppPaths, [File | FTail]) -> case lists:prefix(App, File) of true -> - maybe_rm_artifact_and_edge(G, Out, SrcExt, ArtifactExt, File), - prune_source_files(G, SrcExt, ArtifactExt, AppPaths, FTail); + [{File, Out} | filter_prefix(G, AppPaths, FTail)]; false when App < File -> - prune_source_files(G, SrcExt, ArtifactExt, AppTail, [File|FTail]); + filter_prefix(G, AppTail, [File|FTail]); false -> - prune_source_files(G, SrcExt, ArtifactExt, AppPaths, FTail) + filter_prefix(G, AppPaths, FTail) end. %% @doc this function scans all the source files found and looks into @@ -192,6 +205,10 @@ maybe_store(G, Dir, Compiler, Label, CritMeta) -> terminate(G) -> true = digraph:delete(G). +store_artifact(G, Source, Target, Meta) -> + digraph:add_vertex(G, Target, {artifact, Meta}), + digraph:add_edge(G, Target, Source, artifact). + %%%%%%%%%%%%%%% %%% PRIVATE %%% %%%%%%%%%%%%%%% @@ -401,8 +418,6 @@ find_app_(Path, [{AppPath, AppName}|Rest]) -> find_app_(Path, Rest) end. - - %% @private Return what should be the base name of an erl file, relocated to the %% target directory. For example: %% target_base("ebin/", "src/my_module.erl", ".erl", ".beam") -> "ebin/my_module.beam" diff --git a/src/rebar_compiler_erl.erl b/src/rebar_compiler_erl.erl index c849e47c..9a8a9377 100644 --- a/src/rebar_compiler_erl.erl +++ b/src/rebar_compiler_erl.erl @@ -217,17 +217,18 @@ filename_to_atom(F) -> list_to_atom(filename:rootname(filename:basename(F))). %% Get subset of SourceFiles which need to be recompiled, respecting %% dependencies induced by given graph G. needed_files(Graph, ErlOpts, RebarOpts, Dir, OutDir, SourceFiles) -> + PrivIncludes = [{i, filename:join(Dir, Src)} + || Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])], + SharedOpts = [{i, filename:join(Dir, "include")}, + {i, Dir}] ++ PrivIncludes ++ ErlOpts, + CompilerOptsSet = erl_compiler_opts_set(), lists:filter(fun(Source) -> TargetBase = target_base(OutDir, Source), Target = TargetBase ++ ".beam", - PrivIncludes = [{i, filename:join(Dir, Src)} - || Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])], - AllOpts = [{outdir, filename:dirname(Target)} - ,{i, filename:join(Dir, "include")} - ,{i, Dir}] ++ PrivIncludes ++ ErlOpts, + AllOpts = [{outdir, filename:dirname(Target)} | SharedOpts], digraph:vertex(Graph, Source) > {Source, filelib:last_modified(Target)} orelse opts_changed(Graph, AllOpts, Target, TargetBase) - orelse erl_compiler_opts_set() + orelse CompilerOptsSet end, SourceFiles). target_base(OutDir, Source) -> diff --git a/src/rebar_parallel.erl b/src/rebar_parallel.erl new file mode 100644 index 00000000..e443619c --- /dev/null +++ b/src/rebar_parallel.erl @@ -0,0 +1,56 @@ +%%% @doc +%%% This module contains a small parallel dispatch queue that allows +%%% to take a list of jobs and run as many of them in parallel as there +%%% are schedulers ongoing. +%%% +%%% Original design by Max Fedorov in the rebar compiler, then generalised +%%% and extracted here to be reused in other circumstances. +%%% @end +-module(rebar_parallel). +-export([queue/5]). +-include("rebar.hrl"). + +queue(Tasks, WorkF, WArgs, Handler, HArgs) -> + Parent = self(), + Worker = fun() -> worker(Parent, WorkF, WArgs) end, + Jobs = min(length(Tasks), erlang:system_info(schedulers)), + ?DEBUG("Starting ~B worker(s)", [Jobs]), + Pids = [spawn_monitor(Worker) || _ <- lists:seq(1, Jobs)], + parallel_dispatch(Tasks, Pids, Handler, HArgs). + +parallel_dispatch([], [], _, _) -> + []; +parallel_dispatch(Targets, Pids, Handler, Args) -> + receive + {ready, Worker} when is_pid(Worker), Targets =:= [] -> + Worker ! empty, + parallel_dispatch(Targets, Pids, Handler, Args); + {ready, Worker} when is_pid(Worker) -> + [Task|Tasks] = Targets, + Worker ! {task, Task}, + parallel_dispatch(Tasks, Pids, Handler, Args); + {'DOWN', Mref, _, Pid, normal} -> + NewPids = lists:delete({Pid, Mref}, Pids), + parallel_dispatch(Targets, NewPids, Handler, Args); + {'DOWN', _Mref, _, _Pid, Info} -> + ?ERROR("Task failed: ~p", [Info]), + ?FAIL; + {result, Result} -> + case Handler(Result, Args) of + ok -> + parallel_dispatch(Targets, Pids, Handler, Args); + {ok, Acc} -> + [Acc | parallel_dispatch(Targets, Pids, Handler, Args)] + end + end. + +worker(QueuePid, F, Args) -> + QueuePid ! {ready, self()}, + receive + {task, Task} -> + QueuePid ! {result, F(Task, Args)}, + worker(QueuePid, F, Args); + empty -> + ok + end. + diff --git a/src/rebar_prv_compile.erl b/src/rebar_prv_compile.erl index f8974466..f7adbe4b 100644 --- a/src/rebar_prv_compile.erl +++ b/src/rebar_prv_compile.erl @@ -251,7 +251,13 @@ extra_virtual_apps(State, VApp0, [Dir|Dirs]) -> VApp2 = rebar_app_info:ebin_dir(VApp1, OutDir), Opts = rebar_state:opts(State), VApp3 = rebar_app_info:opts(VApp2, Opts), - [rebar_app_info:set(VApp3, src_dirs, [OutDir]) + %% ensure we don't end up running a similar extra fake app while + %% compiling root-level extras by marking it explicitly null + VApp4 = rebar_app_info:set(VApp3, extra_src_dirs, []), + %% need a unique name to prevent lookup issues that clobber entries + AppName = unicode:characters_to_binary(["extra_", Dir]), + VApp5 = rebar_app_info:name(VApp4, AppName), + [rebar_app_info:set(VApp5, src_dirs, [OutDir]) | extra_virtual_apps(State, VApp0, Dirs)] end. @@ -303,6 +309,7 @@ build_rebar3_apps(DAGs, Apps, State) -> LastDAG = lists:last(DAGs), %% we actually need to compile each DAG one after the other to prevent %% issues where a .yrl file that generates a .erl file gets to be seen. + ?INFO("Analyzing applications...", []), [begin {Ctx, ReorderedApps} = rebar_compiler:analyze_all(DAG, Apps), lists:foreach( @@ -312,6 +319,13 @@ build_rebar3_apps(DAGs, Apps, State) -> rebar_compiler:compile_analyzed(DAG, AppInfo, Ctx) end, ReorderedApps + ), + {ExtraCtx, ReorderedExtraApps} = rebar_compiler:analyze_all_extras(DAG, Apps), + lists:foreach( + fun(AppInfo) -> + rebar_compiler:compile_analyzed(DAG, AppInfo, ExtraCtx) + end, + ReorderedExtraApps ) end || DAG <- DAGs], ok.