@ -0,0 +1,13 @@ | |||||
# EditorConfig file: http://EditorConfig.org | |||||
# Top-most EditorConfig file. | |||||
root = true | |||||
# Unix-style, newlines, indent style of 4 spaces, with a newline ending every file. | |||||
[*] | |||||
end_of_line = lf | |||||
insert_final_newline = true | |||||
charset = utf-8 | |||||
trim_trailing_whitespace = true | |||||
indent_style = space | |||||
indent_size = 4 |
@ -0,0 +1,31 @@ | |||||
### Pre-Check ### | |||||
- If you are filing for a bug, please do a quick search in current issues first | |||||
- For bugs, mention if you are willing or interested in helping fix the issue | |||||
- For questions or support, it helps to include context around your project or problem | |||||
- Think of a descriptive title (more descriptive than 'feature X is broken' unless it is fully broken) | |||||
### Environment ### | |||||
- Add the result of `rebar3 report` to your message: | |||||
``` | |||||
$ rebar3 report "my failing command" | |||||
... | |||||
``` | |||||
- Verify whether the version of rebar3 you're running is the latest release (see https://github.com/erlang/rebar3/releases) | |||||
- If possible, include information about your project and its structure. Open source projects or examples are always easier to debug. | |||||
If you can provide an example code base to reproduce the issue on, we will generally be able to provide more help, and faster. | |||||
### Current behaviour ### | |||||
Describe the current behaviour. In case of a failure, crash, or exception, please include the result of running the command with debug information: | |||||
``` | |||||
DEBUG=1 rebar3 <my failing command> | |||||
``` | |||||
### Expected behaviour ### | |||||
Describe what you expected to happen. |
@ -1,25 +1,48 @@ | |||||
sudo: false | |||||
language: erlang | language: erlang | ||||
install: 'true' | |||||
otp_release: | |||||
- 19.0 | |||||
- 18.0 | |||||
- 17.5 | |||||
- R16B03-1 | |||||
- R15B03 | |||||
before_script: "./bootstrap" | |||||
script: "./rebar3 ct" | |||||
matrix: | |||||
include: | |||||
- os: linux | |||||
otp_release: 17.5 | |||||
- os: linux | |||||
otp_release: 18.3 | |||||
- os: linux | |||||
otp_release: 19.3 | |||||
- os: linux | |||||
otp_release: 20.0 | |||||
- os: linux | |||||
otp_release: 21.0 | |||||
- os: osx | |||||
language: generic | |||||
before_script: | |||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi | |||||
## should eventually use a tap that has previous erlang versions here | |||||
## as this only uses the latest erlang available via brew | |||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install erlang; fi | |||||
script: "./bootstrap && ./rebar3 ct" | |||||
branches: | branches: | ||||
only: | only: | ||||
- master | - master | ||||
- hex_core | |||||
cache: | cache: | ||||
directories: | directories: | ||||
- "$HOME/.cache/rebar3/hex/default" | - "$HOME/.cache/rebar3/hex/default" | ||||
deploy: | deploy: | ||||
provider: releases | |||||
api_key: | |||||
secure: MjloYuaQF3cd3Oab57zqwPDLPqt5MDgBIrRLpXOQwNovr2tnkKd4aJK3QJ3pTxvZievjgl+qIYI1IZyjuRV37nkjAfMw14iig959wi0k8XTJoMdylVxE5X7hk4SiWhX/ycnJx3C28PPw1OitGTF76HAJDMgEelNdoNt+hvjvDEo= | |||||
file: rebar3 | |||||
on: | |||||
repo: erlang/rebar3 | |||||
tags: true | |||||
- provider: releases | |||||
api_key: | |||||
secure: MjloYuaQF3cd3Oab57zqwPDLPqt5MDgBIrRLpXOQwNovr2tnkKd4aJK3QJ3pTxvZievjgl+qIYI1IZyjuRV37nkjAfMw14iig959wi0k8XTJoMdylVxE5X7hk4SiWhX/ycnJx3C28PPw1OitGTF76HAJDMgEelNdoNt+hvjvDEo= | |||||
file: rebar3 | |||||
on: | |||||
repo: erlang/rebar3 | |||||
tags: true | |||||
- provider: s3 | |||||
access_key_id: AKIAJAPYAQEFYCYSNL7Q | |||||
secret_access_key: | |||||
secure: "BUv2KQABv0Q4e8DAVNBRTc/lXHWt27yCN46Fdgo1IrcSSIiP+hq2yXzQcXLbPwkEu6pxUZQtL3mvKbt6l7uw3wFrcRfFAi1PGTITAW8MTmxtwcZIBcHSk3XOzDbkK+fYYcaddszmt7hDzzEFPtmYXiNgnaMIVeynhQLgcCcIRRQ=" | |||||
skip_cleanup: true | |||||
local-dir: _build/prod/bin | |||||
bucket: "rebar3-nightly" | |||||
acl: public_read | |||||
on: | |||||
repo: erlang/rebar3 | |||||
branch: master | |||||
condition: $TRAVIS_OTP_RELEASE = "17.5" |
@ -0,0 +1,20 @@ | |||||
version: "{build}" | |||||
branches: | |||||
only: | |||||
- master | |||||
environment: | |||||
matrix: | |||||
- erlang_vsn: 19.2 | |||||
- erlang_vsn: 18.3 | |||||
- erlang_vsn: 17.5 | |||||
install: | |||||
- ps: choco install erlang --version $env:erlang_vsn | |||||
build_script: | |||||
- ps: cmd.exe /c 'bootstrap.bat' | |||||
test_script: | |||||
- ps: cmd.exe /c 'rebar3.cmd ct' | |||||
notifications: | |||||
- provider: GitHubPullRequest | |||||
on_build_success: true | |||||
on_build_failure: true | |||||
on_build_status_changed: false |
@ -0,0 +1,25 @@ | |||||
f(), | |||||
P = application:get_env(rebar, providers, []), | |||||
S = lists:foldl(fun(P, S) -> {ok, S2} = P:init(S), S2 end, rebar_state:new(), P), | |||||
PS = rebar_state:providers(S), | |||||
DP = lists:keysort(2,providers:get_providers_by_namespace(default, PS)), | |||||
f(Str), | |||||
Str = [begin | |||||
Name = element(2,Pn), | |||||
Desc = element(8,Pn), | |||||
Opts = element(10,Pn), | |||||
OptShort = [case {Short,Long} of | |||||
{undefined,undefined} -> ""; | |||||
{undefined,_} -> ["[\\fI--",Long,"\\fR] "]; | |||||
{_,undefined} -> ["[\\fI-",Short,"\\fR] "]; | |||||
{_,_} -> ["[\\fI-",Short,"\\fR|\\fI--",Long,"\\fR] "] | |||||
end || {_,Short,Long,_,_Desc} <- Opts], | |||||
OptLong = [case {Short,Long} of | |||||
{undefined,undefined} -> ""; | |||||
{_,undefined} -> [".IP\n\\fI-",Short,"\\fR: ", Desc, "\n"]; | |||||
{_,_} -> [".IP\n\\fI--",Long,"\\fR: ", Desc, "\n"] | |||||
end || {_,Short,Long,_,Desc} <- Opts], | |||||
[".TP\n", | |||||
"\\fB", atom_to_list(element(2,Pn)), "\\fR ", OptShort, "\n", | |||||
Desc, "\n", OptLong] end || Pn <- DP, element(5,Pn) == true], | |||||
file:write_file("commands.out", Str). |
@ -0,0 +1,441 @@ | |||||
.TH "REBAR3" "1" "November 2018" "Erlang" | |||||
.SH NAME | |||||
\fBrebar3\fR \- tool for working with Erlang projects | |||||
.SH "SYNOPSIS" | |||||
\fBrebar3\fR \fB\-\-version\fR | |||||
.br | |||||
\fBrebar3\fR \fBhelp\fR | |||||
.br | |||||
\fBrebar3\fR \fIcommand\fR [\fIoptions\fR] \.\.\. | |||||
.SH "DESCRIPTION" | |||||
Rebar3 is an Erlang tool that makes it easy to create, develop, and release Erlang libraries, applications, and systems in a repeatable manner\. | |||||
Full documentation at \fIhttp://www.rebar3.org/\fR | |||||
.SH "ESSENTIAL COMMANDS" | |||||
For the full command list, see the \fICOMMANDS\fR section\. | |||||
.P | |||||
With \fBrebar3 help <task>\fR, commands and plugins will display their own help information\. | |||||
.TP | |||||
\fBcompile\fR | |||||
Compile the current project | |||||
.TP | |||||
\fBnew\fR (\fBhelp [\fItemplate\fR])|\fItemplate\fR | |||||
Show information about templates or use one | |||||
.TP | |||||
\fBupdate\fR | |||||
Fetch the newest version of the Hex index | |||||
.TP | |||||
\fBdo\fR \fIcommand\fR[,\fIcommand\fR,...] | |||||
Run one or more commands in a sequence | |||||
.TP | |||||
\fBshell\fR | |||||
Start the current project in a REPL\. You can then use \fBr3:do(\fItask\fR)\fR to call it on the project without dropping the current state. | |||||
.SH "COMMANDS" | |||||
. this section generated by running the contents of 'commands' in rebar3 shell | |||||
.TP | |||||
\fBas\fR | |||||
Higher order provider for running multiple tasks in a sequence as a certain profiles. | |||||
.TP | |||||
\fBclean\fR [\fI-a\fR|\fI--all\fR] [\fI-p\fR|\fI--profile\fR] | |||||
Remove compiled beam files from apps. | |||||
.IP | |||||
\fI--all\fR: Clean all apps include deps | |||||
.IP | |||||
\fI--profile\fR: Clean under profile. Equivalent to `rebar3 as <profile> clean` | |||||
.TP | |||||
\fBcompile\fR [\fI-d\fR|\fI--deps_only\fR] | |||||
Compile apps .app.src and .erl files. | |||||
.IP | |||||
\fI--deps_only\fR: Only compile dependencies, no project apps will be built. | |||||
.TP | |||||
\fBcover\fR [\fI-r\fR|\fI--reset\fR] [\fI-v\fR|\fI--verbose\fR] [\fI-m\fR|\fI--min_coverage\fR] | |||||
Perform coverage analysis. | |||||
.IP | |||||
\fI--reset\fR: Reset all coverdata. | |||||
.IP | |||||
\fI--verbose\fR: Print coverage analysis. | |||||
.IP | |||||
\fI--min_coverage\fR: Mandate a coverage percentage required to succeed (0..100) | |||||
.TP | |||||
\fBct\fR [\fI--dir\fR] [\fI--suite\fR] [\fI--group\fR] [\fI--case\fR] [\fI--label\fR] [\fI--config\fR] [\fI--spec\fR] [\fI--join_specs\fR] [\fI--allow_user_terms\fR] [\fI--logdir\fR] [\fI--logopts\fR] [\fI--verbosity\fR] [\fI-c\fR|\fI--cover\fR] [\fI--cover_export_name\fR] [\fI--repeat\fR] [\fI--duration\fR] [\fI--until\fR] [\fI--force_stop\fR] [\fI--basic_html\fR] [\fI--stylesheet\fR] [\fI--decrypt_key\fR] [\fI--decrypt_file\fR] [\fI--abort_if_missing_suites\fR] [\fI--multiply_timetraps\fR] [\fI--scale_timetraps\fR] [\fI--create_priv_dir\fR] [\fI--include\fR] [\fI--readable\fR] [\fI-v\fR|\fI--verbose\fR] [\fI--name\fR] [\fI--sname\fR] [\fI--setcookie\fR] [\fI--sys_config\fR] [\fI--compile_only\fR] [\fI--retry\fR] | |||||
Run Common Tests. | |||||
.IP | |||||
\fI--dir\fR: List of additional directories containing test suites | |||||
.IP | |||||
\fI--suite\fR: List of test suites to run | |||||
.IP | |||||
\fI--group\fR: List of test groups to run | |||||
.IP | |||||
\fI--case\fR: List of test cases to run | |||||
.IP | |||||
\fI--label\fR: Test label | |||||
.IP | |||||
\fI--config\fR: List of config files | |||||
.IP | |||||
\fI--spec\fR: List of test specifications | |||||
.IP | |||||
\fI--join_specs\fR: Merge all test specifications and perform a single test run | |||||
.IP | |||||
\fI--allow_user_terms\fR: Allow user defined config values in config files | |||||
.IP | |||||
\fI--logdir\fR: Log folder | |||||
.IP | |||||
\fI--logopts\fR: Options for common test logging | |||||
.IP | |||||
\fI--verbosity\fR: Verbosity | |||||
.IP | |||||
\fI--cover\fR: Generate cover data | |||||
.IP | |||||
\fI--cover_export_name\fR: Base name of the coverdata file to write | |||||
.IP | |||||
\fI--repeat\fR: How often to repeat tests | |||||
.IP | |||||
\fI--duration\fR: Max runtime (format: HHMMSS) | |||||
.IP | |||||
\fI--until\fR: Run until (format: HHMMSS) | |||||
.IP | |||||
\fI--force_stop\fR: Force stop on test timeout (true | false | skip_rest) | |||||
.IP | |||||
\fI--basic_html\fR: Show basic HTML | |||||
.IP | |||||
\fI--stylesheet\fR: CSS stylesheet to apply to html output | |||||
.IP | |||||
\fI--decrypt_key\fR: Path to key for decrypting config | |||||
.IP | |||||
\fI--decrypt_file\fR: Path to file containing key for decrypting config | |||||
.IP | |||||
\fI--abort_if_missing_suites\fR: Abort if suites are missing | |||||
.IP | |||||
\fI--multiply_timetraps\fR: | |||||
.IP | |||||
\fI--scale_timetraps\fR: Scale timetraps | |||||
.IP | |||||
\fI--create_priv_dir\fR: Create priv dir (auto_per_run | auto_per_tc | manual_per_tc) | |||||
.IP | |||||
\fI--include\fR: Directories containing additional include files | |||||
.IP | |||||
\fI--readable\fR: Shows test case names and only displays logs to shell on failures (true | compact | false) | |||||
.IP | |||||
\fI--verbose\fR: Verbose output | |||||
.IP | |||||
\fI--name\fR: Gives a long name to the node | |||||
.IP | |||||
\fI--sname\fR: Gives a short name to the node | |||||
.IP | |||||
\fI--setcookie\fR: Sets the cookie if the node is distributed | |||||
.IP | |||||
\fI--sys_config\fR: List of application config files | |||||
.IP | |||||
\fI--compile_only\fR: Compile modules in the project with the test configuration but do not run the tests | |||||
.IP | |||||
\fI--retry\fR: Experimental feature. If any specification for previously failing test is found, runs them. | |||||
.TP | |||||
\fBdeps\fR | |||||
List dependencies | |||||
.TP | |||||
\fBdialyzer\fR [\fI-u\fR|\fI--update-plt\fR] [\fI-s\fR|\fI--succ-typings\fR] | |||||
Run the Dialyzer analyzer on the project. | |||||
.IP | |||||
\fI--update-plt\fR: Enable updating the PLT. Default: true | |||||
.IP | |||||
\fI--succ-typings\fR: Enable success typing analysis. Default: true | |||||
.TP | |||||
\fBdo\fR | |||||
Higher order provider for running multiple tasks in a sequence. | |||||
.TP | |||||
\fBedoc\fR | |||||
Generate documentation using edoc. | |||||
.TP | |||||
\fBescriptize\fR | |||||
Generate escript archive. | |||||
.TP | |||||
\fBeunit\fR [\fI--app\fR] [\fI--application\fR] [\fI-c\fR|\fI--cover\fR] [\fI--cover_export_name\fR] [\fI-d\fR|\fI--dir\fR] [\fI-f\fR|\fI--file\fR] [\fI-m\fR|\fI--module\fR] [\fI-s\fR|\fI--suite\fR] [\fI-v\fR|\fI--verbose\fR] [\fI--name\fR] [\fI--sname\fR] [\fI--setcookie\fR] | |||||
Run EUnit Tests. | |||||
.IP | |||||
\fI--app\fR: Comma separated list of application test suites to run. Equivalent to `[{application, App}]`. | |||||
.IP | |||||
\fI--application\fR: Comma separated list of application test suites to run. Equivalent to `[{application, App}]`. | |||||
.IP | |||||
\fI--cover\fR: Generate cover data. Defaults to false. | |||||
.IP | |||||
\fI--cover_export_name\fR: Base name of the coverdata file to write | |||||
.IP | |||||
\fI--dir\fR: Comma separated list of dirs to load tests from. Equivalent to `[{dir, Dir}]`. | |||||
.IP | |||||
\fI--file\fR: Comma separated list of files to load tests from. Equivalent to `[{file, File}]`. | |||||
.IP | |||||
\fI--module\fR: Comma separated list of modules to load tests from. Equivalent to `[{module, Module}]`. | |||||
.IP | |||||
\fI--suite\fR: Comma separated list of modules to load tests from. Equivalent to `[{module, Module}]`. | |||||
.IP | |||||
\fI--verbose\fR: Verbose output. Defaults to false. | |||||
.IP | |||||
\fI--name\fR: Gives a long name to the node | |||||
.IP | |||||
\fI--sname\fR: Gives a short name to the node | |||||
.IP | |||||
\fI--setcookie\fR: Sets the cookie if the node is distributed | |||||
.TP | |||||
\fBget-deps\fR | |||||
Fetch dependencies. | |||||
.TP | |||||
\fBhelp\fR | |||||
Display a list of tasks or help for a given task or subtask. | |||||
.TP | |||||
\fBnew\fR [\fI-f\fR|\fI--force\fR] | |||||
Create new project from templates. | |||||
.IP | |||||
\fI--force\fR: overwrite existing files | |||||
.TP | |||||
\fBpath\fR [\fI--app\fR] [\fI--base\fR] [\fI--bin\fR] [\fI--ebin\fR] [\fI--lib\fR] [\fI--priv\fR] [\fI-s\fR|\fI--separator\fR] [\fI--src\fR] [\fI--rel\fR] | |||||
Print paths to build dirs in current profile. | |||||
.IP | |||||
\fI--app\fR: Comma separated list of applications to return paths for. | |||||
.IP | |||||
\fI--base\fR: Return the `base' path of the current profile. | |||||
.IP | |||||
\fI--bin\fR: Return the `bin' path of the current profile. | |||||
.IP | |||||
\fI--ebin\fR: Return all `ebin' paths of the current profile's applications. | |||||
.IP | |||||
\fI--lib\fR: Return the `lib' path of the current profile. | |||||
.IP | |||||
\fI--priv\fR: Return the `priv' path of the current profile's applications. | |||||
.IP | |||||
\fI--separator\fR: In case of multiple return paths, the separator character to use to join them. | |||||
.IP | |||||
\fI--src\fR: Return the `src' path of the current profile's applications. | |||||
.IP | |||||
\fI--rel\fR: Return the `rel' path of the current profile. | |||||
.TP | |||||
\fBpkgs\fR | |||||
List information for a package. | |||||
.TP | |||||
\fBrelease\fR [\fI-n\fR|\fI--relname\fR] [\fI-v\fR|\fI--relvsn\fR] [\fI-g\fR|\fI--goal\fR] [\fI-u\fR|\fI--upfrom\fR] [\fI-o\fR|\fI--output-dir\fR] [\fI-h\fR|\fI--help\fR] [\fI-l\fR|\fI--lib-dir\fR] [\fI-p\fR|\fI--path\fR] [\fI--default-libs\fR] [\fI-V\fR|\fI--verbose\fR] [\fI-d\fR|\fI--dev-mode\fR] [\fI-i\fR|\fI--include-erts\fR] [\fI-a\fR|\fI--override\fR] [\fI-c\fR|\fI--config\fR] [\fI--overlay_vars\fR] [\fI--vm_args\fR] [\fI--sys_config\fR] [\fI--system_libs\fR] [\fI--version\fR] [\fI-r\fR|\fI--root\fR] | |||||
Build release of project. | |||||
.IP | |||||
\fI--relname\fR: Specify the name for the release that will be generated | |||||
.IP | |||||
\fI--relvsn\fR: Specify the version for the release | |||||
.IP | |||||
\fI--goal\fR: Specify a target constraint on the system. These are usually the OTP | |||||
.IP | |||||
\fI--upfrom\fR: Only valid with relup target, specify the release to upgrade from | |||||
.IP | |||||
\fI--output-dir\fR: The output directory for the release. This is `./` by default. | |||||
.IP | |||||
\fI--help\fR: Print usage | |||||
.IP | |||||
\fI--lib-dir\fR: Additional dir that should be searched for OTP Apps | |||||
.IP | |||||
\fI--path\fR: Additional dir to add to the code path | |||||
.IP | |||||
\fI--default-libs\fR: Whether to use the default system added lib dirs (means you must add them all manually). Default is true | |||||
.IP | |||||
\fI--verbose\fR: Verbosity level, maybe between 0 and 3 | |||||
.IP | |||||
\fI--dev-mode\fR: Symlink the applications and configuration into the release instead of copying | |||||
.IP | |||||
\fI--include-erts\fR: If true include a copy of erts used to build with, if a path include erts at that path. If false, do not include erts | |||||
.IP | |||||
\fI--override\fR: Provide an app name and a directory to override in the form <appname>:<app directory> | |||||
.IP | |||||
\fI--config\fR: The path to a config file | |||||
.IP | |||||
\fI--overlay_vars\fR: Path to a file of overlay variables | |||||
.IP | |||||
\fI--vm_args\fR: Path to a file to use for vm.args | |||||
.IP | |||||
\fI--sys_config\fR: Path to a file to use for sys.config | |||||
.IP | |||||
\fI--system_libs\fR: Path to dir of Erlang system libs | |||||
.IP | |||||
\fI--version\fR: Print relx version | |||||
.IP | |||||
\fI--root\fR: The project root directory | |||||
.TP | |||||
\fBrelup\fR [\fI-n\fR|\fI--relname\fR] [\fI-v\fR|\fI--relvsn\fR] [\fI-g\fR|\fI--goal\fR] [\fI-u\fR|\fI--upfrom\fR] [\fI-o\fR|\fI--output-dir\fR] [\fI-h\fR|\fI--help\fR] [\fI-l\fR|\fI--lib-dir\fR] [\fI-p\fR|\fI--path\fR] [\fI--default-libs\fR] [\fI-V\fR|\fI--verbose\fR] [\fI-d\fR|\fI--dev-mode\fR] [\fI-i\fR|\fI--include-erts\fR] [\fI-a\fR|\fI--override\fR] [\fI-c\fR|\fI--config\fR] [\fI--overlay_vars\fR] [\fI--vm_args\fR] [\fI--sys_config\fR] [\fI--system_libs\fR] [\fI--version\fR] [\fI-r\fR|\fI--root\fR] | |||||
Create relup of releases. | |||||
.IP | |||||
\fI--relname\fR: Specify the name for the release that will be generated | |||||
.IP | |||||
\fI--relvsn\fR: Specify the version for the release | |||||
.IP | |||||
\fI--goal\fR: Specify a target constraint on the system. These are usually the OTP | |||||
.IP | |||||
\fI--upfrom\fR: Only valid with relup target, specify the release to upgrade from | |||||
.IP | |||||
\fI--output-dir\fR: The output directory for the release. This is `./` by default. | |||||
.IP | |||||
\fI--help\fR: Print usage | |||||
.IP | |||||
\fI--lib-dir\fR: Additional dir that should be searched for OTP Apps | |||||
.IP | |||||
\fI--path\fR: Additional dir to add to the code path | |||||
.IP | |||||
\fI--default-libs\fR: Whether to use the default system added lib dirs (means you must add them all manually). Default is true | |||||
.IP | |||||
\fI--verbose\fR: Verbosity level, maybe between 0 and 3 | |||||
.IP | |||||
\fI--dev-mode\fR: Symlink the applications and configuration into the release instead of copying | |||||
.IP | |||||
\fI--include-erts\fR: If true include a copy of erts used to build with, if a path include erts at that path. If false, do not include erts | |||||
.IP | |||||
\fI--override\fR: Provide an app name and a directory to override in the form <appname>:<app directory> | |||||
.IP | |||||
\fI--config\fR: The path to a config file | |||||
.IP | |||||
\fI--overlay_vars\fR: Path to a file of overlay variables | |||||
.IP | |||||
\fI--vm_args\fR: Path to a file to use for vm.args | |||||
.IP | |||||
\fI--sys_config\fR: Path to a file to use for sys.config | |||||
.IP | |||||
\fI--system_libs\fR: Path to dir of Erlang system libs | |||||
.IP | |||||
\fI--version\fR: Print relx version | |||||
.IP | |||||
\fI--root\fR: The project root directory | |||||
.TP | |||||
\fBreport\fR | |||||
Provide a crash report to be sent to the rebar3 issues page. | |||||
.TP | |||||
\fBshell\fR [\fI--config\fR] [\fI--name\fR] [\fI--sname\fR] [\fI--setcookie\fR] [\fI--script\fR] [\fI--apps\fR] [\fI--start-clean\fR] [\fI--user_drv_args\fR] | |||||
Run shell with project apps and deps in path. | |||||
.IP | |||||
\fI--config\fR: Path to the config file to use. Defaults to {shell, [{config, File}]} and then the relx sys.config file if not specified. | |||||
.IP | |||||
\fI--name\fR: Gives a long name to the node. | |||||
.IP | |||||
\fI--sname\fR: Gives a short name to the node. | |||||
.IP | |||||
\fI--setcookie\fR: Sets the cookie if the node is distributed. | |||||
.IP | |||||
\fI--script\fR: Path to an escript file to run before starting the project apps. Defaults to rebar.config {shell, [{script_file, File}]} if not specified. | |||||
.IP | |||||
\fI--apps\fR: A list of apps to boot before starting the shell. (E.g. --apps app1,app2,app3) Defaults to rebar.config {shell, [{apps, Apps}]} or relx apps if not specified. | |||||
.IP | |||||
\fI--start-clean\fR: Cancel any applications in the 'apps' list or release. | |||||
.IP | |||||
\fI--user_drv_args\fR: Arguments passed to user_drv start function for creating custom shells. | |||||
.TP | |||||
\fBtar\fR [\fI-n\fR|\fI--relname\fR] [\fI-v\fR|\fI--relvsn\fR] [\fI-g\fR|\fI--goal\fR] [\fI-u\fR|\fI--upfrom\fR] [\fI-o\fR|\fI--output-dir\fR] [\fI-h\fR|\fI--help\fR] [\fI-l\fR|\fI--lib-dir\fR] [\fI-p\fR|\fI--path\fR] [\fI--default-libs\fR] [\fI-V\fR|\fI--verbose\fR] [\fI-d\fR|\fI--dev-mode\fR] [\fI-i\fR|\fI--include-erts\fR] [\fI-a\fR|\fI--override\fR] [\fI-c\fR|\fI--config\fR] [\fI--overlay_vars\fR] [\fI--vm_args\fR] [\fI--sys_config\fR] [\fI--system_libs\fR] [\fI--version\fR] [\fI-r\fR|\fI--root\fR] | |||||
Tar archive of release built of project. | |||||
.IP | |||||
\fI--relname\fR: Specify the name for the release that will be generated | |||||
.IP | |||||
\fI--relvsn\fR: Specify the version for the release | |||||
.IP | |||||
\fI--goal\fR: Specify a target constraint on the system. These are usually the OTP | |||||
.IP | |||||
\fI--upfrom\fR: Only valid with relup target, specify the release to upgrade from | |||||
.IP | |||||
\fI--output-dir\fR: The output directory for the release. This is `./` by default. | |||||
.IP | |||||
\fI--help\fR: Print usage | |||||
.IP | |||||
\fI--lib-dir\fR: Additional dir that should be searched for OTP Apps | |||||
.IP | |||||
\fI--path\fR: Additional dir to add to the code path | |||||
.IP | |||||
\fI--default-libs\fR: Whether to use the default system added lib dirs (means you must add them all manually). Default is true | |||||
.IP | |||||
\fI--verbose\fR: Verbosity level, maybe between 0 and 3 | |||||
.IP | |||||
\fI--dev-mode\fR: Symlink the applications and configuration into the release instead of copying | |||||
.IP | |||||
\fI--include-erts\fR: If true include a copy of erts used to build with, if a path include erts at that path. If false, do not include erts | |||||
.IP | |||||
\fI--override\fR: Provide an app name and a directory to override in the form <appname>:<app directory> | |||||
.IP | |||||
\fI--config\fR: The path to a config file | |||||
.IP | |||||
\fI--overlay_vars\fR: Path to a file of overlay variables | |||||
.IP | |||||
\fI--vm_args\fR: Path to a file to use for vm.args | |||||
.IP | |||||
\fI--sys_config\fR: Path to a file to use for sys.config | |||||
.IP | |||||
\fI--system_libs\fR: Path to dir of Erlang system libs | |||||
.IP | |||||
\fI--version\fR: Print relx version | |||||
.IP | |||||
\fI--root\fR: The project root directory | |||||
.TP | |||||
\fBtree\fR [\fI-v\fR|\fI--verbose\fR] | |||||
Print dependency tree. | |||||
.IP | |||||
\fI--verbose\fR: Print repo and branch/tag/ref for git and hg deps | |||||
.TP | |||||
\fBunlock\fR | |||||
Unlock dependencies. | |||||
.TP | |||||
\fBupdate\fR | |||||
Update package index. | |||||
.TP | |||||
\fBupgrade\fR | |||||
Upgrade dependencies. | |||||
.TP | |||||
\fBversion\fR | |||||
Print version for rebar and current Erlang. | |||||
.TP | |||||
\fBxref\fR | |||||
Run cross reference analysis. | |||||
.SH ENVIRONMENT | |||||
Environment variables allow overall rebar3 control across command boundaries. | |||||
.TP | |||||
\fBREBAR_PROFILE\fR | |||||
Choose a default profile. Defaults to \fBdefault\fR | |||||
.TP | |||||
\fBHEX_CDN\fR | |||||
Pick an alternative hex mirror. | |||||
.TP | |||||
\fBREBAR_CACHE_DIR\fR | |||||
Location of the directory for local cache. Defaults to \fIhex.pm\fB. | |||||
.TP | |||||
\fBQUIET\fR | |||||
Only display errors. | |||||
.TP | |||||
\fBDEBUG\fR | |||||
Display debug information. | |||||
.TP | |||||
\fBREBAR_COLOR\fR=\fIhigh\fR|\fIlow\fR | |||||
How much color to show in the terminal. Defaults to \fIhigh\fR. | |||||
.TP | |||||
\fBREBAR_CONFIG\fR | |||||
Name of rebar configuration files. Defaults to \fIrebar.config\fR | |||||
.TP | |||||
\fBREBAR_GIT_CLONE_OPTIONS\fR | |||||
Arguments to add after each \fIgit clone\fR operation. For example, the value \fI--reference ~/.cache/repos.reference\fR allows to create a cache of all fetched repositories across builds | |||||
.SH Configuration File Options | |||||
See \fIhttp://www.rebar3.org/v3.0/docs/configuration\fR |
@ -1,29 +1,191 @@ | |||||
Copyright (c) {{copyright_year}}, {{author_name}} <{{author_email}}>. | |||||
All rights reserved. | |||||
Redistribution and use in source and binary forms, with or without | |||||
modification, are permitted provided that the following conditions are | |||||
met: | |||||
* Redistributions of source code must retain the above copyright | |||||
notice, this list of conditions and the following disclaimer. | |||||
* Redistributions in binary form must reproduce the above copyright | |||||
notice, this list of conditions and the following disclaimer in the | |||||
documentation and/or other materials provided with the distribution. | |||||
* The names of its contributors may not be used to endorse or promote | |||||
products derived from this software without specific prior written | |||||
permission. | |||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||||
Apache License | |||||
Version 2.0, January 2004 | |||||
http://www.apache.org/licenses/ | |||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | |||||
1. Definitions. | |||||
"License" shall mean the terms and conditions for use, reproduction, | |||||
and distribution as defined by Sections 1 through 9 of this document. | |||||
"Licensor" shall mean the copyright owner or entity authorized by | |||||
the copyright owner that is granting the License. | |||||
"Legal Entity" shall mean the union of the acting entity and all | |||||
other entities that control, are controlled by, or are under common | |||||
control with that entity. For the purposes of this definition, | |||||
"control" means (i) the power, direct or indirect, to cause the | |||||
direction or management of such entity, whether by contract or | |||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the | |||||
outstanding shares, or (iii) beneficial ownership of such entity. | |||||
"You" (or "Your") shall mean an individual or Legal Entity | |||||
exercising permissions granted by this License. | |||||
"Source" form shall mean the preferred form for making modifications, | |||||
including but not limited to software source code, documentation | |||||
source, and configuration files. | |||||
"Object" form shall mean any form resulting from mechanical | |||||
transformation or translation of a Source form, including but | |||||
not limited to compiled object code, generated documentation, | |||||
and conversions to other media types. | |||||
"Work" shall mean the work of authorship, whether in Source or | |||||
Object form, made available under the License, as indicated by a | |||||
copyright notice that is included in or attached to the work | |||||
(an example is provided in the Appendix below). | |||||
"Derivative Works" shall mean any work, whether in Source or Object | |||||
form, that is based on (or derived from) the Work and for which the | |||||
editorial revisions, annotations, elaborations, or other modifications | |||||
represent, as a whole, an original work of authorship. For the purposes | |||||
of this License, Derivative Works shall not include works that remain | |||||
separable from, or merely link (or bind by name) to the interfaces of, | |||||
the Work and Derivative Works thereof. | |||||
"Contribution" shall mean any work of authorship, including | |||||
the original version of the Work and any modifications or additions | |||||
to that Work or Derivative Works thereof, that is intentionally | |||||
submitted to Licensor for inclusion in the Work by the copyright owner | |||||
or by an individual or Legal Entity authorized to submit on behalf of | |||||
the copyright owner. For the purposes of this definition, "submitted" | |||||
means any form of electronic, verbal, or written communication sent | |||||
to the Licensor or its representatives, including but not limited to | |||||
communication on electronic mailing lists, source code control systems, | |||||
and issue tracking systems that are managed by, or on behalf of, the | |||||
Licensor for the purpose of discussing and improving the Work, but | |||||
excluding communication that is conspicuously marked or otherwise | |||||
designated in writing by the copyright owner as "Not a Contribution." | |||||
"Contributor" shall mean Licensor and any individual or Legal Entity | |||||
on behalf of whom a Contribution has been received by Licensor and | |||||
subsequently incorporated within the Work. | |||||
2. Grant of Copyright License. Subject to the terms and conditions of | |||||
this License, each Contributor hereby grants to You a perpetual, | |||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |||||
copyright license to reproduce, prepare Derivative Works of, | |||||
publicly display, publicly perform, sublicense, and distribute the | |||||
Work and such Derivative Works in Source or Object form. | |||||
3. Grant of Patent License. Subject to the terms and conditions of | |||||
this License, each Contributor hereby grants to You a perpetual, | |||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |||||
(except as stated in this section) patent license to make, have made, | |||||
use, offer to sell, sell, import, and otherwise transfer the Work, | |||||
where such license applies only to those patent claims licensable | |||||
by such Contributor that are necessarily infringed by their | |||||
Contribution(s) alone or by combination of their Contribution(s) | |||||
with the Work to which such Contribution(s) was submitted. If You | |||||
institute patent litigation against any entity (including a | |||||
cross-claim or counterclaim in a lawsuit) alleging that the Work | |||||
or a Contribution incorporated within the Work constitutes direct | |||||
or contributory patent infringement, then any patent licenses | |||||
granted to You under this License for that Work shall terminate | |||||
as of the date such litigation is filed. | |||||
4. Redistribution. You may reproduce and distribute copies of the | |||||
Work or Derivative Works thereof in any medium, with or without | |||||
modifications, and in Source or Object form, provided that You | |||||
meet the following conditions: | |||||
(a) You must give any other recipients of the Work or | |||||
Derivative Works a copy of this License; and | |||||
(b) You must cause any modified files to carry prominent notices | |||||
stating that You changed the files; and | |||||
(c) You must retain, in the Source form of any Derivative Works | |||||
that You distribute, all copyright, patent, trademark, and | |||||
attribution notices from the Source form of the Work, | |||||
excluding those notices that do not pertain to any part of | |||||
the Derivative Works; and | |||||
(d) If the Work includes a "NOTICE" text file as part of its | |||||
distribution, then any Derivative Works that You distribute must | |||||
include a readable copy of the attribution notices contained | |||||
within such NOTICE file, excluding those notices that do not | |||||
pertain to any part of the Derivative Works, in at least one | |||||
of the following places: within a NOTICE text file distributed | |||||
as part of the Derivative Works; within the Source form or | |||||
documentation, if provided along with the Derivative Works; or, | |||||
within a display generated by the Derivative Works, if and | |||||
wherever such third-party notices normally appear. The contents | |||||
of the NOTICE file are for informational purposes only and | |||||
do not modify the License. You may add Your own attribution | |||||
notices within Derivative Works that You distribute, alongside | |||||
or as an addendum to the NOTICE text from the Work, provided | |||||
that such additional attribution notices cannot be construed | |||||
as modifying the License. | |||||
You may add Your own copyright statement to Your modifications and | |||||
may provide additional or different license terms and conditions | |||||
for use, reproduction, or distribution of Your modifications, or | |||||
for any such Derivative Works as a whole, provided Your use, | |||||
reproduction, and distribution of the Work otherwise complies with | |||||
the conditions stated in this License. | |||||
5. Submission of Contributions. Unless You explicitly state otherwise, | |||||
any Contribution intentionally submitted for inclusion in the Work | |||||
by You to the Licensor shall be under the terms and conditions of | |||||
this License, without any additional terms or conditions. | |||||
Notwithstanding the above, nothing herein shall supersede or modify | |||||
the terms of any separate license agreement you may have executed | |||||
with Licensor regarding such Contributions. | |||||
6. Trademarks. This License does not grant permission to use the trade | |||||
names, trademarks, service marks, or product names of the Licensor, | |||||
except as required for reasonable and customary use in describing the | |||||
origin of the Work and reproducing the content of the NOTICE file. | |||||
7. Disclaimer of Warranty. Unless required by applicable law or | |||||
agreed to in writing, Licensor provides the Work (and each | |||||
Contributor provides its Contributions) on an "AS IS" BASIS, | |||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | |||||
implied, including, without limitation, any warranties or conditions | |||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | |||||
PARTICULAR PURPOSE. You are solely responsible for determining the | |||||
appropriateness of using or redistributing the Work and assume any | |||||
risks associated with Your exercise of permissions under this License. | |||||
8. Limitation of Liability. In no event and under no legal theory, | |||||
whether in tort (including negligence), contract, or otherwise, | |||||
unless required by applicable law (such as deliberate and grossly | |||||
negligent acts) or agreed to in writing, shall any Contributor be | |||||
liable to You for damages, including any direct, indirect, special, | |||||
incidental, or consequential damages of any character arising as a | |||||
result of this License or out of the use or inability to use the | |||||
Work (including but not limited to damages for loss of goodwill, | |||||
work stoppage, computer failure or malfunction, or any and all | |||||
other commercial damages or losses), even if such Contributor | |||||
has been advised of the possibility of such damages. | |||||
9. Accepting Warranty or Additional Liability. While redistributing | |||||
the Work or Derivative Works thereof, You may choose to offer, | |||||
and charge a fee for, acceptance of support, warranty, indemnity, | |||||
or other liability obligations and/or rights consistent with this | |||||
License. However, in accepting such obligations, You may act only | |||||
on Your own behalf and on Your sole responsibility, not on behalf | |||||
of any other Contributor, and only if You agree to indemnify, | |||||
defend, and hold each Contributor harmless for any liability | |||||
incurred by, or claims asserted against, such Contributor by reason | |||||
of your accepting any such warranty or additional liability. | |||||
END OF TERMS AND CONDITIONS | |||||
Copyright {{copyright_year}}, {{author_name}} <{{author_email}}>. | |||||
Licensed under the Apache License, Version 2.0 (the "License"); | |||||
you may not use this file except in compliance with the License. | |||||
You may obtain a copy of the License at | |||||
http://www.apache.org/licenses/LICENSE-2.0 | |||||
Unless required by applicable law or agreed to in writing, software | |||||
distributed under the License is distributed on an "AS IS" BASIS, | |||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
See the License for the specific language governing permissions and | |||||
limitations under the License. | |||||
@ -0,0 +1,7 @@ | |||||
{erl_opts, [debug_info]}. | |||||
{deps, []}. | |||||
{shell, [ | |||||
% {config, "config/sys.config"}, | |||||
{apps, [{{name}}]} | |||||
]}. |
@ -1,3 +1,4 @@ | |||||
{{=@@ @@=}} | |||||
[ | [ | ||||
{ {{name}}, []} | |||||
{@@name@@, []} | |||||
]. | ]. |
@ -0,0 +1,15 @@ | |||||
{description, "OTP structure for executable programs (alias of 'release' template)"}. | |||||
{variables, [ | |||||
{name, "myapp", "Name of the OTP release. An app with this name will also be created."}, | |||||
{desc, "An OTP application", "Short description of the release's main app's purpose"} | |||||
]}. | |||||
{template, "app.erl", "{{name}}/{{apps_dir}}/{{name}}/src/{{name}}_app.erl"}. | |||||
{template, "sup.erl", "{{name}}/{{apps_dir}}/{{name}}/src/{{name}}_sup.erl"}. | |||||
{template, "otp_app.app.src", "{{name}}/{{apps_dir}}/{{name}}/src/{{name}}.app.src"}. | |||||
{template, "relx_rebar.config", "{{name}}/rebar.config"}. | |||||
{template, "sys.config", "{{name}}/config/sys.config"}. | |||||
{template, "vm.args", "{{name}}/config/vm.args"}. | |||||
{template, "gitignore", "{{name}}/.gitignore"}. | |||||
{template, "LICENSE", "{{name}}/LICENSE"}. | |||||
{template, "README.md", "{{name}}/README.md"}. | |||||
@ -1,24 +1,28 @@ | |||||
{"1.1.0", | {"1.1.0", | ||||
[{<<"bbmustache">>,{pkg,<<"bbmustache">>,<<"1.0.4">>},0}, | |||||
{<<"certifi">>,{pkg,<<"certifi">>,<<"0.4.0">>},0}, | |||||
{<<"cf">>,{pkg,<<"cf">>,<<"0.2.1">>},0}, | |||||
{<<"cth_readable">>,{pkg,<<"cth_readable">>,<<"1.2.3">>},0}, | |||||
{<<"erlware_commons">>,{pkg,<<"erlware_commons">>,<<"0.21.0">>},0}, | |||||
{<<"eunit_formatters">>,{pkg,<<"eunit_formatters">>,<<"0.3.1">>},0}, | |||||
{<<"getopt">>,{pkg,<<"getopt">>,<<"0.8.2">>},0}, | |||||
{<<"providers">>,{pkg,<<"providers">>,<<"1.6.0">>},0}, | |||||
{<<"relx">>,{pkg,<<"relx">>,<<"3.19.0">>},0}, | |||||
{<<"ssl_verify_hostname">>,{pkg,<<"ssl_verify_hostname">>,<<"1.0.5">>},0}]}. | |||||
[{<<"bbmustache">>,{pkg,<<"bbmustache">>,<<"1.6.0">>},0}, | |||||
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.3.1">>},0}, | |||||
{<<"cf">>,{pkg,<<"cf">>,<<"0.2.2">>},0}, | |||||
{<<"cth_readable">>,{pkg,<<"cth_readable">>,<<"1.4.2">>},0}, | |||||
{<<"erlware_commons">>,{pkg,<<"erlware_commons">>,<<"1.3.0">>},0}, | |||||
{<<"eunit_formatters">>,{pkg,<<"eunit_formatters">>,<<"0.5.0">>},0}, | |||||
{<<"getopt">>,{pkg,<<"getopt">>,<<"1.0.1">>},0}, | |||||
{<<"hex_core">>,{pkg,<<"hex_core">>,<<"0.2.0">>},0}, | |||||
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.0">>},0}, | |||||
{<<"providers">>,{pkg,<<"providers">>,<<"1.7.0">>},0}, | |||||
{<<"relx">>,{pkg,<<"relx">>,<<"3.27.0">>},0}, | |||||
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.3">>},0}]}. | |||||
[ | [ | ||||
{pkg_hash,[ | {pkg_hash,[ | ||||
{<<"bbmustache">>, <<"7BA94F971C5AFD7B6617918A4BB74705E36CAB36EB84B19B6A1B7EE06427AA38">>}, | |||||
{<<"certifi">>, <<"A7966EFB868B179023618D29A407548F70C52466BF1849B9E8EBD0E34B7EA11F">>}, | |||||
{<<"cf">>, <<"69D0B1349FD4D7D4DC55B7F407D29D7A840BF9A1EF5AF529F1EBE0CE153FC2AB">>}, | |||||
{<<"cth_readable">>, <<"293120673DFF82F0768612C5282E35C40CACC1B6F94FE99077438FD3749D0E27">>}, | |||||
{<<"erlware_commons">>, <<"A04433071AD7D112EDEFC75AC77719DD3E6753E697AC09428FC83D7564B80B15">>}, | |||||
{<<"eunit_formatters">>, <<"7A6FC351EB5B873E2356B8852EB751E20C13A72FBCA03393CF682B8483509573">>}, | |||||
{<<"getopt">>, <<"B17556DB683000BA50370B16C0619DF1337E7AF7ECBF7D64FBF8D1D6BCE3109B">>}, | |||||
{<<"providers">>, <<"DB0E2F9043AE60C0155205FCD238D68516331D0E5146155E33D1E79DC452964A">>}, | |||||
{<<"relx">>, <<"286DD5244B4786F56AAC75D5C8E2D1FB4CFD306810D4EC8548F3AE1B3AADB8F7">>}, | |||||
{<<"ssl_verify_hostname">>, <<"2E73E068CD6393526F9FA6D399353D7C9477D6886BA005F323B592D389FB47BE">>}]} | |||||
{<<"bbmustache">>, <<"7AC372AEC621A69C369DF237FBD9986CAABCDD6341089FE5F42E5A7A4AC706B8">>}, | |||||
{<<"certifi">>, <<"D0F424232390BF47D82DA8478022301C561CF6445B5B5FB6A84D49A9E76D2639">>}, | |||||
{<<"cf">>, <<"7F2913FFF90ABCABD0F489896CFEB0B0674F6C8DF6C10B17A83175448029896C">>}, | |||||
{<<"cth_readable">>, <<"0F57B4EB7DA7F5438F422312245F9143A1B3118C11B6BAE5C3D1391C9EE88322">>}, | |||||
{<<"erlware_commons">>, <<"1705CF2AB4212EF235C21971A55E22E2A39055C05B9C65C8848126865F42A07A">>}, | |||||
{<<"eunit_formatters">>, <<"6A9133943D36A465D804C1C5B6E6839030434B8879C5600D7DDB5B3BAD4CCB59">>}, | |||||
{<<"getopt">>, <<"C73A9FA687B217F2FF79F68A3B637711BB1936E712B521D8CE466B29CBF7808A">>}, | |||||
{<<"hex_core">>, <<"3A7EACCFB8ADD3FF05D950C10ED5BDB5D0C48C988EBBC5D7AE2A55498F0EFF1B">>}, | |||||
{<<"parse_trans">>, <<"09765507A3C7590A784615CFD421D101AEC25098D50B89D7AA1D66646BC571C1">>}, | |||||
{<<"providers">>, <<"BBF730563914328EC2511D205E6477A94831DB7297DE313B3872A2B26C562EAB">>}, | |||||
{<<"relx">>, <<"96CC7663EDCC02A8117AB0C64FE6D15BE79760C08726ABEAD1DAACE11BFBF75D">>}, | |||||
{<<"ssl_verify_fun">>, <<"6C49665D4326E26CD4A5B7BD54AA442B33DADFB7C5D59A0D0CD0BF5534BBFBD7">>}]} | |||||
]. | ]. |
@ -0,0 +1,159 @@ | |||||
-module(cth_retry). | |||||
%% Callbacks | |||||
-export([id/1]). | |||||
-export([init/2]). | |||||
-export([pre_init_per_suite/3]). | |||||
-export([post_init_per_suite/4]). | |||||
-export([pre_end_per_suite/3]). | |||||
-export([post_end_per_suite/4]). | |||||
-export([pre_init_per_group/3]). | |||||
-export([post_init_per_group/4]). | |||||
-export([pre_end_per_group/3]). | |||||
-export([post_end_per_group/4]). | |||||
-export([pre_init_per_testcase/3]). | |||||
-export([post_end_per_testcase/4]). | |||||
-export([on_tc_fail/3]). | |||||
-export([on_tc_skip/3, on_tc_skip/4]). | |||||
-export([terminate/1]). | |||||
-record(state, {id, suite, groups, acc=[]}). | |||||
%% @doc Return a unique id for this CTH. | |||||
id(_Opts) -> | |||||
{?MODULE, make_ref()}. | |||||
%% @doc Always called before any other callback function. Use this to initiate | |||||
%% any common state. | |||||
init(Id, _Opts) -> | |||||
{ok, #state{id=Id}}. | |||||
%% @doc Called before init_per_suite is called. | |||||
pre_init_per_suite(Suite,Config,State) -> | |||||
{Config, State#state{suite=Suite, groups=[]}}. | |||||
%% @doc Called after init_per_suite. | |||||
post_init_per_suite(_Suite,_Config,Return,State) -> | |||||
{Return, State}. | |||||
%% @doc Called before end_per_suite. | |||||
pre_end_per_suite(_Suite,Config,State) -> | |||||
{Config, State}. | |||||
%% @doc Called after end_per_suite. | |||||
post_end_per_suite(_Suite,_Config,Return,State) -> | |||||
{Return, State#state{suite=undefined, groups=[]}}. | |||||
%% @doc Called before each init_per_group. | |||||
pre_init_per_group(_Group,Config,State) -> | |||||
{Config, State}. | |||||
%% @doc Called after each init_per_group. | |||||
post_init_per_group(Group,_Config,Return, State=#state{groups=Groups}) -> | |||||
{Return, State#state{groups=[Group|Groups]}}. | |||||
%% @doc Called after each end_per_group. | |||||
pre_end_per_group(_Group,Config,State) -> | |||||
{Config, State}. | |||||
%% @doc Called after each end_per_group. | |||||
post_end_per_group(_Group,_Config,Return, State=#state{groups=Groups}) -> | |||||
{Return, State#state{groups=tl(Groups)}}. | |||||
%% @doc Called before each test case. | |||||
pre_init_per_testcase(_TC,Config,State) -> | |||||
{Config, State}. | |||||
%% @doc Called after each test case. | |||||
post_end_per_testcase(_TC,_Config,ok,State) -> | |||||
{ok, State}; | |||||
post_end_per_testcase(TC,_Config,Error,State=#state{suite=Suite, groups=Groups, acc=Acc}) -> | |||||
Test = case TC of | |||||
{_Group, Case} -> Case; | |||||
TC -> TC | |||||
end, | |||||
{Error, State#state{acc=[{Suite, Groups, Test}|Acc]}}. | |||||
%% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group, | |||||
%% post_end_per_group and post_end_per_testcase if the suite, group or test case failed. | |||||
on_tc_fail(_TC, _Reason, State) -> | |||||
State. | |||||
%% @doc Called when a test case is skipped by either user action | |||||
%% or due to an init function failing. (>= 19.3) | |||||
on_tc_skip(Suite, TC, {tc_auto_skip, _}, State=#state{suite=Suite, groups=Groups, acc=Acc}) -> | |||||
NewAcc = case TC of | |||||
init_per_testcase -> Acc; | |||||
end_per_testcase -> Acc; | |||||
{init_per_group,_} -> Acc; | |||||
{end_per_group, _} -> Acc; | |||||
init_per_suite -> Acc; | |||||
end_per_suite -> Acc; | |||||
{_Group, Case} -> [{Suite, Groups, Case}|Acc]; | |||||
TC -> [{Suite, Groups, TC}|Acc] | |||||
end, | |||||
State#state{suite=Suite, acc=NewAcc}; | |||||
on_tc_skip(Suite, _TC, _Reason, State) -> | |||||
State#state{suite=Suite}. | |||||
%% @doc Called when a test case is skipped by either user action | |||||
%% or due to an init function failing. (Pre-19.3) | |||||
on_tc_skip(TC, {tc_auto_skip, _}, State=#state{suite=Suite, groups=Groups, acc=Acc}) -> | |||||
NewAcc = case TC of | |||||
init_per_testcase -> Acc; | |||||
end_per_testcase -> Acc; | |||||
{init_per_group,_} -> Acc; | |||||
{end_per_group, _} -> Acc; | |||||
init_per_suite -> Acc; | |||||
end_per_suite -> Acc; | |||||
{_Group, Case} -> [{Suite, Groups, Case}|Acc]; | |||||
TC -> [{Suite, Groups, TC}|Acc] | |||||
end, | |||||
State#state{acc=NewAcc}; | |||||
on_tc_skip(_TC, _Reason, State) -> | |||||
State. | |||||
%% @doc Called when the scope of the CTH is done | |||||
terminate(#state{acc=[]}) -> | |||||
ok; | |||||
terminate(#state{acc=Acc}) -> | |||||
Spec = to_spec(Acc), | |||||
{ok, Cwd} = file:get_cwd(), | |||||
Path = filename:join(lists:droplast(filename:split(Cwd))++["retry.spec"]), | |||||
io:format(user, | |||||
"EXPERIMENTAL: Writing retry specification at ~s~n" | |||||
" call rebar3 ct with '--retry' to re-run failing cases.~n", | |||||
[Path]), | |||||
file:write_file(Path, Spec), | |||||
ok. | |||||
%%% Helpers | |||||
to_spec(List) -> | |||||
[to_spec_entry(X) || X <- merge(List)]. | |||||
merge([]) -> []; | |||||
merge([{Suite, Groups, Case}|T]) when is_atom(Case) -> | |||||
merge([{Suite, Groups, [Case]}|T]); | |||||
merge([{Suite, Groups, Cases}, {Suite, Groups, Case} | T]) -> | |||||
merge([{Suite, Groups, [Case|Cases]}|T]); | |||||
merge([{Suite, Groups, Cases} | T]) -> | |||||
[{Suite, Groups, Cases} | merge(T)]. | |||||
to_spec_entry({Suite, [], Cases}) -> | |||||
Dir = filename:dirname(proplists:get_value(source, Suite:module_info(compile))), | |||||
io_lib:format("~p.~n", [{cases, Dir, Suite, Cases}]); | |||||
to_spec_entry({Suite, Groups, Cases}) -> | |||||
Dir = filename:dirname(proplists:get_value(source, Suite:module_info(compile))), | |||||
ExpandedGroups = expand_groups(lists:reverse(Groups)), | |||||
io_lib:format("~p.~n", [{groups, Dir, Suite, ExpandedGroups, {cases,Cases}}]). | |||||
expand_groups([Group]) -> | |||||
{Group, []}; | |||||
expand_groups([H|T]) -> | |||||
{H,[],[expand_groups(T)]}. | |||||
@ -1,7 +1,58 @@ | |||||
%%% external alias for rebar_agent | |||||
%%% @doc external alias for `rebar_agent' for more convenient | |||||
%%% calls from a shell. | |||||
-module(r3). | -module(r3). | ||||
-export([do/1, do/2]). | |||||
-export([do/1, do/2, async_do/1, async_do/2, break/0, resume/0]). | |||||
-export(['$handle_undefined_function'/2]). | |||||
-include("rebar.hrl"). | |||||
%% @doc alias for `rebar_agent:do/1' | |||||
-spec do(atom()) -> ok | {error, term()}. | |||||
do(Command) -> rebar_agent:do(Command). | do(Command) -> rebar_agent:do(Command). | ||||
%% @doc alias for `rebar_agent:do/2' | |||||
-spec do(atom(), atom()) -> ok | {error, term()}. | |||||
do(Namespace, Command) -> rebar_agent:do(Namespace, Command). | do(Namespace, Command) -> rebar_agent:do(Namespace, Command). | ||||
%% @async_doc alias for `rebar_agent:async_do/1' | |||||
-spec async_do(atom()) -> ok | {error, term()}. | |||||
async_do(Command) -> rebar_agent:async_do(Command). | |||||
%% @async_doc alias for `rebar_agent:async_do/2' | |||||
-spec async_do(atom(), atom()) -> ok | {error, term()}. | |||||
async_do(Namespace, Command) -> rebar_agent:async_do(Namespace, Command). | |||||
break() -> | |||||
case whereis(rebar_agent) of % is the shell running | |||||
undefined -> | |||||
ok; | |||||
Pid -> | |||||
{dictionary, Dict} = process_info(Pid, dictionary), | |||||
case lists:keyfind(cmd_type, 1, Dict) of | |||||
{cmd_type, async} -> | |||||
Self = self(), | |||||
Ref = make_ref(), | |||||
spawn_link(fun() -> | |||||
register(r3_breakpoint_handler, self()), | |||||
receive | |||||
resume -> | |||||
Self ! Ref | |||||
end | |||||
end), | |||||
io:format(user, "~n=== BREAK ===~n", []), | |||||
receive | |||||
Ref -> ok | |||||
end; | |||||
_ -> | |||||
?DEBUG("ignoring breakpoint since command is not run " | |||||
"in async mode", []), | |||||
ok | |||||
end | |||||
end. | |||||
resume() -> | |||||
r3_breakpoint_handler ! resume, | |||||
ok. | |||||
%% @private defer to rebar_agent | |||||
'$handle_undefined_function'(Cmd, Args) -> | |||||
rebar_agent:'$handle_undefined_function'(Cmd, Args). |
@ -0,0 +1,315 @@ | |||||
-module(rebar_compiler). | |||||
-export([compile_all/2, | |||||
clean/2, | |||||
needs_compile/3, | |||||
ok_tuple/2, | |||||
error_tuple/4, | |||||
maybe_report/1, | |||||
format_error_source/2, | |||||
report/1]). | |||||
-include("rebar.hrl"). | |||||
-type extension() :: string(). | |||||
-type out_mappings() :: [{extension(), file:filename()}]. | |||||
-callback context(rebar_app_info:t()) -> #{src_dirs => [file:dirname()], | |||||
include_dirs => [file:dirname()], | |||||
src_ext => extension(), | |||||
out_mappings => out_mappings()}. | |||||
-callback needed_files(digraph:graph(), [file:filename()], out_mappings(), | |||||
rebar_app_info:t()) -> | |||||
{{[file:filename()], term()}, {[file:filename()], term()}}. | |||||
-callback dependencies(file:filename(), file:dirname(), [file:dirname()]) -> [file:filename()]. | |||||
-callback compile(file:filename(), out_mappings(), rebar_dict(), list()) -> | |||||
ok | {ok, [string()]} | {ok, [string()], [string()]}. | |||||
-callback clean([file:filename()], rebar_app_info:t()) -> _. | |||||
-define(DAG_VSN, 2). | |||||
-define(DAG_FILE, "source.dag"). | |||||
-type dag_v() :: {digraph:vertex(), term()} | 'false'. | |||||
-type dag_e() :: {digraph:vertex(), digraph:vertex()}. | |||||
-type dag() :: {list(dag_v()), list(dag_e()), list(string())}. | |||||
-record(dag, {vsn = ?DAG_VSN :: pos_integer(), | |||||
info = {[], [], []} :: dag()}). | |||||
-define(RE_PREFIX, "^(?!\\._)"). | |||||
compile_all(Compilers, AppInfo) -> | |||||
EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)), | |||||
%% Make sure that outdir is on the path | |||||
ok = rebar_file_utils:ensure_dir(EbinDir), | |||||
true = code:add_patha(filename:absname(EbinDir)), | |||||
%% necessary for erlang:function_exported/3 to work as expected | |||||
%% called here for clarity as it's required by both opts_changed/2 | |||||
%% and erl_compiler_opts_set/0 in needed_files | |||||
_ = code:ensure_loaded(compile), | |||||
lists:foreach(fun(CompilerMod) -> | |||||
run(CompilerMod, AppInfo), | |||||
run_on_extra_src_dirs(CompilerMod, AppInfo, fun run/2) | |||||
end, Compilers), | |||||
ok. | |||||
run(CompilerMod, AppInfo) -> | |||||
#{src_dirs := SrcDirs, | |||||
include_dirs := InclDirs, | |||||
src_ext := SrcExt, | |||||
out_mappings := Mappings} = CompilerMod:context(AppInfo), | |||||
BaseDir = rebar_utils:to_list(rebar_app_info:dir(AppInfo)), | |||||
EbinDir = rebar_utils:to_list(rebar_app_info:ebin_dir(AppInfo)), | |||||
BaseOpts = rebar_app_info:opts(AppInfo), | |||||
AbsInclDirs = [filename:join(BaseDir, InclDir) || InclDir <- InclDirs], | |||||
FoundFiles = find_source_files(BaseDir, SrcExt, SrcDirs, BaseOpts), | |||||
OutDir = rebar_app_info:out_dir(AppInfo), | |||||
AbsSrcDirs = [filename:join(BaseDir, SrcDir) || SrcDir <- SrcDirs], | |||||
G = init_dag(CompilerMod, AbsInclDirs, AbsSrcDirs, FoundFiles, OutDir, EbinDir), | |||||
{{FirstFiles, FirstFileOpts}, {RestFiles, Opts}} = CompilerMod:needed_files(G, FoundFiles, | |||||
Mappings, AppInfo), | |||||
true = digraph:delete(G), | |||||
compile_each(FirstFiles, FirstFileOpts, BaseOpts, Mappings, CompilerMod), | |||||
compile_each(RestFiles, Opts, BaseOpts, Mappings, CompilerMod). | |||||
compile_each([], _Opts, _Config, _Outs, _CompilerMod) -> | |||||
ok; | |||||
compile_each([Source | Rest], Opts, Config, Outs, CompilerMod) -> | |||||
case CompilerMod:compile(Source, Outs, Config, Opts) of | |||||
ok -> | |||||
?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]); | |||||
{ok, Warnings} -> | |||||
report(Warnings), | |||||
?DEBUG("~tsCompiled ~ts", [rebar_utils:indent(1), filename:basename(Source)]); | |||||
skipped -> | |||||
?DEBUG("~tsSkipped ~ts", [rebar_utils:indent(1), filename:basename(Source)]); | |||||
Error -> | |||||
NewSource = format_error_source(Source, Config), | |||||
?ERROR("Compiling ~ts failed", [NewSource]), | |||||
maybe_report(Error), | |||||
?DEBUG("Compilation failed: ~p", [Error]), | |||||
?FAIL | |||||
end, | |||||
compile_each(Rest, Opts, Config, Outs, CompilerMod). | |||||
%% @doc remove compiled artifacts from an AppDir. | |||||
-spec clean([module()], rebar_app_info:t()) -> 'ok'. | |||||
clean(Compilers, AppInfo) -> | |||||
lists:foreach(fun(CompilerMod) -> | |||||
clean_(CompilerMod, AppInfo), | |||||
run_on_extra_src_dirs(CompilerMod, AppInfo, fun clean_/2) | |||||
end, Compilers). | |||||
clean_(CompilerMod, AppInfo) -> | |||||
#{src_dirs := SrcDirs, | |||||
src_ext := SrcExt} = CompilerMod:context(AppInfo), | |||||
BaseDir = rebar_app_info:dir(AppInfo), | |||||
Opts = rebar_app_info:opts(AppInfo), | |||||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||||
FoundFiles = find_source_files(BaseDir, SrcExt, SrcDirs, Opts), | |||||
CompilerMod:clean(FoundFiles, AppInfo), | |||||
rebar_file_utils:rm_rf(dag_file(CompilerMod, EbinDir)). | |||||
-spec needs_compile(filename:all(), extension(), [{extension(), file:dirname()}]) -> boolean(). | |||||
needs_compile(Source, OutExt, Mappings) -> | |||||
Ext = filename:extension(Source), | |||||
BaseName = filename:basename(Source, Ext), | |||||
{_, OutDir} = lists:keyfind(OutExt, 1, Mappings), | |||||
Target = filename:join(OutDir, BaseName++OutExt), | |||||
filelib:last_modified(Source) > filelib:last_modified(Target). | |||||
run_on_extra_src_dirs(CompilerMod, AppInfo, Fun) -> | |||||
ExtraDirs = rebar_dir:extra_src_dirs(rebar_app_info:opts(AppInfo), []), | |||||
run_on_extra_src_dirs(ExtraDirs, CompilerMod, AppInfo, Fun). | |||||
run_on_extra_src_dirs([], _CompilerMod, _AppInfo, _Fun) -> | |||||
ok; | |||||
run_on_extra_src_dirs([Dir | Rest], CompilerMod, AppInfo, Fun) -> | |||||
case filelib:is_dir(filename:join(rebar_app_info:dir(AppInfo), Dir)) of | |||||
true -> | |||||
EbinDir = filename:join(rebar_app_info:out_dir(AppInfo), Dir), | |||||
AppInfo1 = rebar_app_info:ebin_dir(AppInfo, EbinDir), | |||||
AppInfo2 = rebar_app_info:set(AppInfo1, src_dirs, [Dir]), | |||||
AppInfo3 = rebar_app_info:set(AppInfo2, extra_src_dirs, ["src"]), | |||||
Fun(CompilerMod, AppInfo3); | |||||
_ -> | |||||
ok | |||||
end, | |||||
run_on_extra_src_dirs(Rest, CompilerMod, AppInfo, Fun). | |||||
%% These functions are here for the ultimate goal of getting rid of | |||||
%% rebar_base_compiler. This can't be done because of existing plugins. | |||||
ok_tuple(Source, Ws) -> | |||||
rebar_base_compiler:ok_tuple(Source, Ws). | |||||
error_tuple(Source, Es, Ws, Opts) -> | |||||
rebar_base_compiler:error_tuple(Source, Es, Ws, Opts). | |||||
maybe_report(Reportable) -> | |||||
rebar_base_compiler:maybe_report(Reportable). | |||||
format_error_source(Path, Opts) -> | |||||
rebar_base_compiler:format_error_source(Path, Opts). | |||||
report(Messages) -> | |||||
rebar_base_compiler:report(Messages). | |||||
%% private functions | |||||
find_source_files(BaseDir, SrcExt, SrcDirs, Opts) -> | |||||
SourceExtRe = "^(?!\\._).*\\" ++ SrcExt ++ [$$], | |||||
lists:flatmap(fun(SrcDir) -> | |||||
Recursive = rebar_dir:recursive(Opts, SrcDir), | |||||
rebar_utils:find_files_in_dirs([filename:join(BaseDir, SrcDir)], SourceExtRe, Recursive) | |||||
end, SrcDirs). | |||||
dag_file(CompilerMod, Dir) -> | |||||
filename:join([rebar_dir:local_cache_dir(Dir), CompilerMod, ?DAG_FILE]). | |||||
%% private graph functions | |||||
%% Get dependency graph of given Erls files and their dependencies (header files, | |||||
%% parse transforms, behaviours etc.) located in their directories or given | |||||
%% InclDirs. Note that last modification times stored in vertices already respect | |||||
%% dependencies induced by given graph G. | |||||
init_dag(Compiler, InclDirs, SrcDirs, Erls, Dir, EbinDir) -> | |||||
G = digraph:new([acyclic]), | |||||
try restore_dag(Compiler, G, InclDirs, Dir) | |||||
catch | |||||
_:_ -> | |||||
?WARN("Failed to restore ~ts file. Discarding it.~n", [dag_file(Compiler, Dir)]), | |||||
file:delete(dag_file(Compiler, Dir)) | |||||
end, | |||||
Dirs = lists:usort(InclDirs ++ SrcDirs), | |||||
%% A source file may have been renamed or deleted. Remove it from the graph | |||||
%% and remove any beam file for that source if it exists. | |||||
Modified = maybe_rm_beams_and_edges(G, EbinDir, Erls), | |||||
Modified1 = lists:foldl(update_dag_fun(G, Compiler, Dirs), Modified, Erls), | |||||
if Modified1 -> store_dag(Compiler, G, InclDirs, Dir); not Modified1 -> ok end, | |||||
G. | |||||
maybe_rm_beams_and_edges(G, Dir, Files) -> | |||||
Vertices = digraph:vertices(G), | |||||
case lists:filter(fun(File) -> | |||||
case filename:extension(File) =:= ".erl" of | |||||
true -> | |||||
maybe_rm_beam_and_edge(G, Dir, File); | |||||
false -> | |||||
false | |||||
end | |||||
end, lists:sort(Vertices) -- lists:sort(Files)) of | |||||
[] -> | |||||
false; | |||||
_ -> | |||||
true | |||||
end. | |||||
maybe_rm_beam_and_edge(G, OutDir, Source) -> | |||||
%% This is NOT a double check it is the only check that the source file is actually gone | |||||
case filelib:is_regular(Source) of | |||||
true -> | |||||
%% Actually exists, don't delete | |||||
false; | |||||
false -> | |||||
Target = target_base(OutDir, Source) ++ ".beam", | |||||
?DEBUG("Source ~ts is gone, deleting previous beam file if it exists ~ts", [Source, Target]), | |||||
file:delete(Target), | |||||
digraph:del_vertex(G, Source), | |||||
true | |||||
end. | |||||
target_base(OutDir, Source) -> | |||||
filename:join(OutDir, filename:basename(Source, ".erl")). | |||||
restore_dag(Compiler, G, InclDirs, Dir) -> | |||||
case file:read_file(dag_file(Compiler, Dir)) of | |||||
{ok, Data} -> | |||||
% Since externally passed InclDirs can influence dependency graph (see | |||||
% modify_dag), we have to check here that they didn't change. | |||||
#dag{vsn=?DAG_VSN, info={Vs, Es, InclDirs}} = | |||||
binary_to_term(Data), | |||||
lists:foreach( | |||||
fun({V, LastUpdated}) -> | |||||
digraph:add_vertex(G, V, LastUpdated) | |||||
end, Vs), | |||||
lists:foreach( | |||||
fun({_, V1, V2, _}) -> | |||||
digraph:add_edge(G, V1, V2) | |||||
end, Es); | |||||
{error, _} -> | |||||
ok | |||||
end. | |||||
store_dag(Compiler, G, InclDirs, Dir) -> | |||||
Vs = lists:map(fun(V) -> digraph:vertex(G, V) end, digraph:vertices(G)), | |||||
Es = lists:map(fun(E) -> digraph:edge(G, E) end, digraph:edges(G)), | |||||
File = dag_file(Compiler, Dir), | |||||
ok = filelib:ensure_dir(File), | |||||
Data = term_to_binary(#dag{info={Vs, Es, InclDirs}}, [{compressed, 2}]), | |||||
file:write_file(File, Data). | |||||
update_dag(G, Compiler, Dirs, Source) -> | |||||
case digraph:vertex(G, Source) of | |||||
{_, LastUpdated} -> | |||||
case filelib:last_modified(Source) of | |||||
0 -> | |||||
%% The file doesn't exist anymore, | |||||
%% erase it from the graph. | |||||
%% All the edges will be erased automatically. | |||||
digraph:del_vertex(G, Source), | |||||
modified; | |||||
LastModified when LastUpdated < LastModified -> | |||||
modify_dag(G, Compiler, Source, LastModified, filename:dirname(Source), Dirs); | |||||
_ -> | |||||
Modified = lists:foldl( | |||||
update_dag_fun(G, Compiler, Dirs), | |||||
false, digraph:out_neighbours(G, Source)), | |||||
MaxModified = update_max_modified_deps(G, Source), | |||||
case Modified orelse MaxModified > LastUpdated of | |||||
true -> modified; | |||||
false -> unmodified | |||||
end | |||||
end; | |||||
false -> | |||||
modify_dag(G, Compiler, Source, filelib:last_modified(Source), filename:dirname(Source), Dirs) | |||||
end. | |||||
modify_dag(G, Compiler, Source, LastModified, SourceDir, Dirs) -> | |||||
AbsIncls = Compiler:dependencies(Source, SourceDir, Dirs), | |||||
digraph:add_vertex(G, Source, LastModified), | |||||
digraph:del_edges(G, digraph:out_edges(G, Source)), | |||||
lists:foreach( | |||||
fun(Incl) -> | |||||
update_dag(G, Compiler, Dirs, Incl), | |||||
digraph:add_edge(G, Source, Incl) | |||||
end, AbsIncls), | |||||
modified. | |||||
update_dag_fun(G, Compiler, Dirs) -> | |||||
fun(Erl, Modified) -> | |||||
case update_dag(G, Compiler, Dirs, Erl) of | |||||
modified -> true; | |||||
unmodified -> Modified | |||||
end | |||||
end. | |||||
update_max_modified_deps(G, Source) -> | |||||
MaxModified = | |||||
lists:foldl(fun(File, Acc) -> | |||||
case digraph:vertex(G, File) of | |||||
{_, MaxModified} when MaxModified > Acc -> | |||||
MaxModified; | |||||
_ -> | |||||
Acc | |||||
end | |||||
end, 0, [Source | digraph:out_neighbours(G, Source)]), | |||||
digraph:add_vertex(G, Source, MaxModified), | |||||
MaxModified. |
@ -0,0 +1,359 @@ | |||||
-module(rebar_compiler_erl). | |||||
-behaviour(rebar_compiler). | |||||
-export([context/1, | |||||
needed_files/4, | |||||
dependencies/3, | |||||
compile/4, | |||||
clean/2]). | |||||
-include("rebar.hrl"). | |||||
context(AppInfo) -> | |||||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||||
Mappings = [{".beam", EbinDir}], | |||||
OutDir = rebar_app_info:dir(AppInfo), | |||||
SrcDirs = rebar_dir:src_dirs(rebar_app_info:opts(AppInfo), ["src"]), | |||||
ExistingSrcDirs = lists:filter(fun(D) -> | |||||
ec_file:is_dir(filename:join(OutDir, D)) | |||||
end, SrcDirs), | |||||
RebarOpts = rebar_app_info:opts(AppInfo), | |||||
ErlOpts = rebar_opts:erl_opts(RebarOpts), | |||||
ErlOptIncludes = proplists:get_all_values(i, ErlOpts), | |||||
InclDirs = lists:map(fun(Incl) -> filename:absname(Incl) end, ErlOptIncludes), | |||||
#{src_dirs => ExistingSrcDirs, | |||||
include_dirs => [filename:join([OutDir, "include"]) | InclDirs], | |||||
src_ext => ".erl", | |||||
out_mappings => Mappings}. | |||||
needed_files(Graph, FoundFiles, _, AppInfo) -> | |||||
OutDir = rebar_app_info:out_dir(AppInfo), | |||||
Dir = rebar_app_info:dir(AppInfo), | |||||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||||
RebarOpts = rebar_app_info:opts(AppInfo), | |||||
ErlOpts = rebar_opts:erl_opts(RebarOpts), | |||||
?DEBUG("erlopts ~p", [ErlOpts]), | |||||
?DEBUG("files to compile ~p", [FoundFiles]), | |||||
%% Make sure that the ebin dir is on the path | |||||
ok = rebar_file_utils:ensure_dir(EbinDir), | |||||
true = code:add_patha(filename:absname(EbinDir)), | |||||
{ParseTransforms, Rest} = split_source_files(FoundFiles, ErlOpts), | |||||
NeededErlFiles = case needed_files(Graph, ErlOpts, RebarOpts, OutDir, EbinDir, ParseTransforms) of | |||||
[] -> | |||||
needed_files(Graph, ErlOpts, RebarOpts, OutDir, EbinDir, Rest); | |||||
_ -> | |||||
%% at least one parse transform in the opts needs updating, so recompile all | |||||
FoundFiles | |||||
end, | |||||
{ErlFirstFiles, ErlOptsFirst} = erl_first_files(RebarOpts, ErlOpts, Dir, NeededErlFiles), | |||||
SubGraph = digraph_utils:subgraph(Graph, NeededErlFiles), | |||||
DepErlsOrdered = digraph_utils:topsort(SubGraph), | |||||
OtherErls = lists:reverse(DepErlsOrdered), | |||||
PrivIncludes = [{i, filename:join(OutDir, Src)} | |||||
|| Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])], | |||||
AdditionalOpts = PrivIncludes ++ [{i, filename:join(OutDir, "include")}, {i, OutDir}, return], | |||||
true = digraph:delete(SubGraph), | |||||
{{ErlFirstFiles, ErlOptsFirst ++ AdditionalOpts}, | |||||
{[Erl || Erl <- OtherErls, | |||||
not lists:member(Erl, ErlFirstFiles)], ErlOpts ++ AdditionalOpts}}. | |||||
dependencies(Source, SourceDir, Dirs) -> | |||||
{ok, Fd} = file:open(Source, [read]), | |||||
Incls = parse_attrs(Fd, [], SourceDir), | |||||
AbsIncls = expand_file_names(Incls, Dirs), | |||||
ok = file:close(Fd), | |||||
AbsIncls. | |||||
compile(Source, [{_, OutDir}], Config, ErlOpts) -> | |||||
case compile:file(Source, [{outdir, OutDir} | ErlOpts]) of | |||||
{ok, _Mod} -> | |||||
ok; | |||||
{ok, _Mod, []} -> | |||||
ok; | |||||
{ok, _Mod, Ws} -> | |||||
FormattedWs = format_error_sources(Ws, Config), | |||||
rebar_compiler:ok_tuple(Source, FormattedWs); | |||||
{error, Es, Ws} -> | |||||
error_tuple(Source, Es, Ws, Config, ErlOpts); | |||||
error -> | |||||
error | |||||
end. | |||||
clean(Files, AppInfo) -> | |||||
EbinDir = rebar_app_info:ebin_dir(AppInfo), | |||||
[begin | |||||
Source = filename:basename(File, ".erl"), | |||||
Target = target_base(EbinDir, Source) ++ ".beam", | |||||
file:delete(Target) | |||||
end || File <- Files]. | |||||
%% | |||||
error_tuple(Module, Es, Ws, AllOpts, Opts) -> | |||||
FormattedEs = format_error_sources(Es, AllOpts), | |||||
FormattedWs = format_error_sources(Ws, AllOpts), | |||||
rebar_compiler:error_tuple(Module, FormattedEs, FormattedWs, Opts). | |||||
format_error_sources(Es, Opts) -> | |||||
[{rebar_compiler:format_error_source(Src, Opts), Desc} | |||||
|| {Src, Desc} <- Es]. | |||||
%% Get files which need to be compiled first, i.e. those specified in erl_first_files | |||||
%% and parse_transform options. Also produce specific erl_opts for these first | |||||
%% files, so that yet to be compiled parse transformations are excluded from it. | |||||
erl_first_files(Opts, ErlOpts, Dir, NeededErlFiles) -> | |||||
ErlFirstFilesConf = rebar_opts:get(Opts, erl_first_files, []), | |||||
valid_erl_first_conf(ErlFirstFilesConf), | |||||
NeededSrcDirs = lists:usort(lists:map(fun filename:dirname/1, NeededErlFiles)), | |||||
%% NOTE: order of files here is important! | |||||
ErlFirstFiles = | |||||
[filename:join(Dir, File) || File <- ErlFirstFilesConf, | |||||
lists:member(filename:join(Dir, File), NeededErlFiles)], | |||||
{ParseTransforms, ParseTransformsErls} = | |||||
lists:unzip(lists:flatmap( | |||||
fun(PT) -> | |||||
PTerls = [filename:join(D, module_to_erl(PT)) || D <- NeededSrcDirs], | |||||
[{PT, PTerl} || PTerl <- PTerls, lists:member(PTerl, NeededErlFiles)] | |||||
end, proplists:get_all_values(parse_transform, ErlOpts))), | |||||
ErlOptsFirst = lists:filter(fun({parse_transform, PT}) -> | |||||
not lists:member(PT, ParseTransforms); | |||||
(_) -> | |||||
true | |||||
end, ErlOpts), | |||||
{ErlFirstFiles ++ ParseTransformsErls, ErlOptsFirst}. | |||||
split_source_files(SourceFiles, ErlOpts) -> | |||||
ParseTransforms = proplists:get_all_values(parse_transform, ErlOpts), | |||||
lists:partition(fun(Source) -> | |||||
lists:member(filename_to_atom(Source), ParseTransforms) | |||||
end, SourceFiles). | |||||
filename_to_atom(F) -> list_to_atom(filename:rootname(filename:basename(F))). | |||||
%% Get subset of SourceFiles which need to be recompiled, respecting | |||||
%% dependencies induced by given graph G. | |||||
needed_files(Graph, ErlOpts, RebarOpts, Dir, OutDir, SourceFiles) -> | |||||
lists:filter(fun(Source) -> | |||||
TargetBase = target_base(OutDir, Source), | |||||
Target = TargetBase ++ ".beam", | |||||
PrivIncludes = [{i, filename:join(Dir, Src)} | |||||
|| Src <- rebar_dir:all_src_dirs(RebarOpts, ["src"], [])], | |||||
AllOpts = [{outdir, filename:dirname(Target)} | |||||
,{i, filename:join(Dir, "include")} | |||||
,{i, Dir}] ++ PrivIncludes ++ ErlOpts, | |||||
digraph:vertex(Graph, Source) > {Source, filelib:last_modified(Target)} | |||||
orelse opts_changed(AllOpts, TargetBase) | |||||
orelse erl_compiler_opts_set() | |||||
end, SourceFiles). | |||||
target_base(OutDir, Source) -> | |||||
filename:join(OutDir, filename:basename(Source, ".erl")). | |||||
opts_changed(NewOpts, Target) -> | |||||
TotalOpts = case erlang:function_exported(compile, env_compiler_options, 0) of | |||||
true -> NewOpts ++ compile:env_compiler_options(); | |||||
false -> NewOpts | |||||
end, | |||||
case compile_info(Target) of | |||||
{ok, Opts} -> lists:any(fun effects_code_generation/1, lists:usort(TotalOpts) -- lists:usort(Opts)); | |||||
_ -> true | |||||
end. | |||||
effects_code_generation(Option) -> | |||||
case Option of | |||||
beam -> false; | |||||
report_warnings -> false; | |||||
report_errors -> false; | |||||
return_errors-> false; | |||||
return_warnings-> false; | |||||
report -> false; | |||||
warnings_as_errors -> false; | |||||
binary -> false; | |||||
verbose -> false; | |||||
{cwd,_} -> false; | |||||
{outdir, _} -> false; | |||||
_ -> true | |||||
end. | |||||
compile_info(Target) -> | |||||
case beam_lib:chunks(Target, [compile_info]) of | |||||
{ok, {_mod, Chunks}} -> | |||||
CompileInfo = proplists:get_value(compile_info, Chunks, []), | |||||
{ok, proplists:get_value(options, CompileInfo, [])}; | |||||
{error, beam_lib, Reason} -> | |||||
?WARN("Couldn't read debug info from ~p for reason: ~p", [Target, Reason]), | |||||
{error, Reason} | |||||
end. | |||||
erl_compiler_opts_set() -> | |||||
EnvSet = case os:getenv("ERL_COMPILER_OPTIONS") of | |||||
false -> false; | |||||
_ -> true | |||||
end, | |||||
%% return false if changed env opts would have been caught in opts_changed/2 | |||||
EnvSet andalso not erlang:function_exported(compile, env_compiler_options, 0). | |||||
valid_erl_first_conf(FileList) -> | |||||
Strs = filter_file_list(FileList), | |||||
case rebar_utils:is_list_of_strings(Strs) of | |||||
true -> true; | |||||
false -> ?ABORT("An invalid file list (~p) was provided as part of your erl_first_files directive", | |||||
[FileList]) | |||||
end. | |||||
filter_file_list(FileList) -> | |||||
Atoms = lists:filter( fun(X) -> is_atom(X) end, FileList), | |||||
case Atoms of | |||||
[] -> | |||||
FileList; | |||||
_ -> | |||||
atoms_in_erl_first_files_warning(Atoms), | |||||
lists:filter( fun(X) -> not(is_atom(X)) end, FileList) | |||||
end. | |||||
atoms_in_erl_first_files_warning(Atoms) -> | |||||
W = "You have provided atoms as file entries in erl_first_files; " | |||||
"erl_first_files only expects lists of filenames as strings. " | |||||
"The following modules (~p) may not work as expected and it is advised " | |||||
"that you change these entires to string format " | |||||
"(e.g., \"src/module.erl\") ", | |||||
?WARN(W, [Atoms]). | |||||
module_to_erl(Mod) -> | |||||
atom_to_list(Mod) ++ ".erl". | |||||
parse_attrs(Fd, Includes, Dir) -> | |||||
case io:parse_erl_form(Fd, "") of | |||||
{ok, Form, _Line} -> | |||||
case erl_syntax:type(Form) of | |||||
attribute -> | |||||
NewIncludes = process_attr(Form, Includes, Dir), | |||||
parse_attrs(Fd, NewIncludes, Dir); | |||||
_ -> | |||||
parse_attrs(Fd, Includes, Dir) | |||||
end; | |||||
{eof, _} -> | |||||
Includes; | |||||
_Err -> | |||||
parse_attrs(Fd, Includes, Dir) | |||||
end. | |||||
process_attr(Form, Includes, Dir) -> | |||||
AttrName = erl_syntax:atom_value(erl_syntax:attribute_name(Form)), | |||||
process_attr(AttrName, Form, Includes, Dir). | |||||
process_attr(import, Form, Includes, _Dir) -> | |||||
case erl_syntax_lib:analyze_import_attribute(Form) of | |||||
{Mod, _Funs} -> | |||||
[module_to_erl(Mod)|Includes]; | |||||
Mod -> | |||||
[module_to_erl(Mod)|Includes] | |||||
end; | |||||
process_attr(file, Form, Includes, _Dir) -> | |||||
{File, _} = erl_syntax_lib:analyze_file_attribute(Form), | |||||
[File|Includes]; | |||||
process_attr(include, Form, Includes, _Dir) -> | |||||
[FileNode] = erl_syntax:attribute_arguments(Form), | |||||
File = erl_syntax:string_value(FileNode), | |||||
[File|Includes]; | |||||
process_attr(include_lib, Form, Includes, Dir) -> | |||||
[FileNode] = erl_syntax:attribute_arguments(Form), | |||||
RawFile = erl_syntax:string_value(FileNode), | |||||
maybe_expand_include_lib_path(RawFile, Dir) ++ Includes; | |||||
process_attr(behavior, Form, Includes, _Dir) -> | |||||
process_attr(behaviour, Form, Includes, _Dir); | |||||
process_attr(behaviour, Form, Includes, _Dir) -> | |||||
[FileNode] = erl_syntax:attribute_arguments(Form), | |||||
File = module_to_erl(erl_syntax:atom_value(FileNode)), | |||||
[File|Includes]; | |||||
process_attr(compile, Form, Includes, _Dir) -> | |||||
[Arg] = erl_syntax:attribute_arguments(Form), | |||||
case erl_syntax:concrete(Arg) of | |||||
{parse_transform, Mod} -> | |||||
[module_to_erl(Mod)|Includes]; | |||||
{core_transform, Mod} -> | |||||
[module_to_erl(Mod)|Includes]; | |||||
L when is_list(L) -> | |||||
lists:foldl( | |||||
fun({parse_transform, Mod}, Acc) -> | |||||
[module_to_erl(Mod)|Acc]; | |||||
({core_transform, Mod}, Acc) -> | |||||
[module_to_erl(Mod)|Acc]; | |||||
(_, Acc) -> | |||||
Acc | |||||
end, Includes, L); | |||||
_ -> | |||||
Includes | |||||
end; | |||||
process_attr(_, _Form, Includes, _Dir) -> | |||||
Includes. | |||||
%% NOTE: If, for example, one of the entries in Files, refers to | |||||
%% gen_server.erl, that entry will be dropped. It is dropped because | |||||
%% such an entry usually refers to the beam file, and we don't pass a | |||||
%% list of OTP src dirs for finding gen_server.erl's full path. Also, | |||||
%% if gen_server.erl was modified, it's not rebar's task to compile a | |||||
%% new version of the beam file. Therefore, it's reasonable to drop | |||||
%% such entries. Also see process_attr(behaviour, Form, Includes). | |||||
-spec expand_file_names([file:filename()], | |||||
[file:filename()]) -> [file:filename()]. | |||||
expand_file_names(Files, Dirs) -> | |||||
%% We check if Files exist by itself or within the directories | |||||
%% listed in Dirs. | |||||
%% Return the list of files matched. | |||||
lists:flatmap( | |||||
fun(Incl) -> | |||||
case filelib:is_regular(Incl) of | |||||
true -> | |||||
[Incl]; | |||||
false -> | |||||
rebar_utils:find_files_in_dirs(Dirs, Incl, true) | |||||
end | |||||
end, Files). | |||||
%% Given a path like "stdlib/include/erl_compile.hrl", return | |||||
%% "OTP_INSTALL_DIR/lib/erlang/lib/stdlib-x.y.z/include/erl_compile.hrl". | |||||
%% Usually a simple [Lib, SubDir, File1] = filename:split(File) should | |||||
%% work, but to not crash when an unusual include_lib path is used, | |||||
%% utilize more elaborate logic. | |||||
maybe_expand_include_lib_path(File, Dir) -> | |||||
File1 = filename:basename(File), | |||||
case filename:split(filename:dirname(File)) of | |||||
[_] -> | |||||
warn_and_find_path(File, Dir); | |||||
[Lib | SubDir] -> | |||||
case code:lib_dir(list_to_atom(Lib), list_to_atom(filename:join(SubDir))) of | |||||
{error, bad_name} -> | |||||
warn_and_find_path(File, Dir); | |||||
AppDir -> | |||||
[filename:join(AppDir, File1)] | |||||
end | |||||
end. | |||||
%% The use of -include_lib was probably incorrect by the user but lets try to make it work. | |||||
%% We search in the outdir and outdir/../include to see if the header exists. | |||||
warn_and_find_path(File, Dir) -> | |||||
SrcHeader = filename:join(Dir, File), | |||||
case filelib:is_regular(SrcHeader) of | |||||
true -> | |||||
[SrcHeader]; | |||||
false -> | |||||
IncludeDir = filename:join(rebar_utils:droplast(filename:split(Dir))++["include"]), | |||||
IncludeHeader = filename:join(IncludeDir, File), | |||||
case filelib:is_regular(IncludeHeader) of | |||||
true -> | |||||
[filename:join(IncludeDir, File)]; | |||||
false -> | |||||
[] | |||||
end | |||||
end. |
@ -0,0 +1,101 @@ | |||||
-module(rebar_compiler_mib). | |||||
-behaviour(rebar_compiler). | |||||
-export([context/1, | |||||
needed_files/4, | |||||
dependencies/3, | |||||
compile/4, | |||||
clean/2]). | |||||
-include("rebar.hrl"). | |||||
-include_lib("stdlib/include/erl_compile.hrl"). | |||||
context(AppInfo) -> | |||||
Dir = rebar_app_info:dir(AppInfo), | |||||
Mappings = [{".bin", filename:join([Dir, "priv", "mibs"])}, | |||||
{".hrl", filename:join(Dir, "include")}], | |||||
#{src_dirs => ["mibs"], | |||||
include_dirs => [], | |||||
src_ext => ".mib", | |||||
out_mappings => Mappings}. | |||||
needed_files(_, FoundFiles, _, AppInfo) -> | |||||
RebarOpts = rebar_app_info:opts(AppInfo), | |||||
MibFirstConf = rebar_opts:get(RebarOpts, mib_first_files, []), | |||||
valid_mib_first_conf(MibFirstConf), | |||||
Dir = rebar_app_info:dir(AppInfo), | |||||
MibFirstFiles = [filename:join(Dir, File) || File <- MibFirstConf], | |||||
%% Remove first files from found files | |||||
RestFiles = [Source || Source <- FoundFiles, not lists:member(Source, MibFirstFiles)], | |||||
Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), mib_opts, []), | |||||
{{MibFirstFiles, Opts}, {RestFiles, Opts}}. | |||||
valid_mib_first_conf(FileList) -> | |||||
Strs = filter_file_list(FileList), | |||||
case rebar_utils:is_list_of_strings(Strs) of | |||||
true -> true; | |||||
false -> ?ABORT("An invalid file list (~p) was provided as part of your mib_first_files directive", | |||||
[FileList]) | |||||
end. | |||||
filter_file_list(FileList) -> | |||||
Atoms = lists:filter( fun(X) -> is_atom(X) end, FileList), | |||||
case Atoms of | |||||
[] -> | |||||
FileList; | |||||
_ -> | |||||
atoms_in_mib_first_files_warning(Atoms), | |||||
lists:filter( fun(X) -> not(is_atom(X)) end, FileList) | |||||
end. | |||||
atoms_in_mib_first_files_warning(Atoms) -> | |||||
W = "You have provided atoms as file entries in mib_first_files; " | |||||
"mib_first_files only expects lists of filenames as strings. " | |||||
"The following MIBs (~p) may not work as expected and it is advised " | |||||
"that you change these entires to string format " | |||||
"(e.g., \"mibs/SOME-MIB.mib\") ", | |||||
?WARN(W, [Atoms]). | |||||
dependencies(_, _, _) -> | |||||
[]. | |||||
compile(Source, OutDirs, _, Opts) -> | |||||
{_, BinOut} = lists:keyfind(".bin", 1, OutDirs), | |||||
{_, HrlOut} = lists:keyfind(".hrl", 1, OutDirs), | |||||
ok = rebar_file_utils:ensure_dir(BinOut), | |||||
ok = rebar_file_utils:ensure_dir(HrlOut), | |||||
Mib = filename:join(BinOut, filename:basename(Source, ".mib")), | |||||
HrlFilename = Mib ++ ".hrl", | |||||
AllOpts = [{outdir, BinOut}, {i, [BinOut]}] ++ Opts, | |||||
case snmpc:compile(Source, AllOpts) of | |||||
{ok, _} -> | |||||
MibToHrlOpts = | |||||
case proplists:get_value(verbosity, AllOpts, undefined) of | |||||
undefined -> | |||||
#options{specific = [], | |||||
cwd = rebar_dir:get_cwd()}; | |||||
Verbosity -> | |||||
#options{specific = [{verbosity, Verbosity}], | |||||
cwd = rebar_dir:get_cwd()} | |||||
end, | |||||
ok = snmpc:mib_to_hrl(Mib, Mib, MibToHrlOpts), | |||||
rebar_file_utils:mv(HrlFilename, HrlOut), | |||||
ok; | |||||
{error, compilation_failed} -> | |||||
?FAIL | |||||
end. | |||||
clean(MibFiles, AppInfo) -> | |||||
AppDir = rebar_app_info:dir(AppInfo), | |||||
MIBs = [filename:rootname(filename:basename(MIB)) || MIB <- MibFiles], | |||||
rebar_file_utils:delete_each( | |||||
[filename:join([AppDir, "include", MIB++".hrl"]) || MIB <- MIBs]), | |||||
ok = rebar_file_utils:rm_rf(filename:join([AppDir, "priv/mibs/*.bin"])). |
@ -0,0 +1,64 @@ | |||||
-module(rebar_compiler_xrl). | |||||
-behaviour(rebar_compiler). | |||||
-export([context/1, | |||||
needed_files/4, | |||||
dependencies/3, | |||||
compile/4, | |||||
clean/2]). | |||||
-export([update_opts/2]). | |||||
context(AppInfo) -> | |||||
Dir = rebar_app_info:dir(AppInfo), | |||||
Mappings = [{".erl", filename:join([Dir, "src"])}], | |||||
#{src_dirs => ["src"], | |||||
include_dirs => [], | |||||
src_ext => ".xrl", | |||||
out_mappings => Mappings}. | |||||
needed_files(_, FoundFiles, Mappings, AppInfo) -> | |||||
FirstFiles = [], | |||||
%% Remove first files from found files | |||||
RestFiles = [Source || Source <- FoundFiles, | |||||
not lists:member(Source, FirstFiles), | |||||
rebar_compiler:needs_compile(Source, ".erl", Mappings)], | |||||
Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), xrl_opts, []), | |||||
Opts1 = update_opts(Opts, AppInfo), | |||||
{{FirstFiles, Opts1}, {RestFiles, Opts1}}. | |||||
dependencies(_, _, _) -> | |||||
[]. | |||||
compile(Source, [{_, _}], _, Opts) -> | |||||
case leex:file(Source, [{return, true} | Opts]) of | |||||
{ok, _} -> | |||||
ok; | |||||
{ok, _Mod, Ws} -> | |||||
rebar_compiler:ok_tuple(Source, Ws); | |||||
{error, Es, Ws} -> | |||||
rebar_compiler:error_tuple(Source, Es, Ws, Opts) | |||||
end. | |||||
clean(XrlFiles, _AppInfo) -> | |||||
rebar_file_utils:delete_each( | |||||
[rebar_utils:to_list(re:replace(F, "\\.xrl$", ".erl", [unicode])) | |||||
|| F <- XrlFiles]). | |||||
%% make includefile options absolute paths | |||||
update_opts(Opts, AppInfo) -> | |||||
OutDir = rebar_app_info:out_dir(AppInfo), | |||||
lists:map(fun({includefile, I}) -> | |||||
case filename:pathtype(I) =:= relative of | |||||
true -> | |||||
{includefile, filename:join(OutDir, I)}; | |||||
false -> | |||||
{includefile, I} | |||||
end; | |||||
(O) -> | |||||
O | |||||
end, Opts). |
@ -0,0 +1,51 @@ | |||||
-module(rebar_compiler_yrl). | |||||
-behaviour(rebar_compiler). | |||||
-export([context/1, | |||||
needed_files/4, | |||||
dependencies/3, | |||||
compile/4, | |||||
clean/2]). | |||||
context(AppInfo) -> | |||||
Dir = rebar_app_info:dir(AppInfo), | |||||
Mappings = [{".erl", filename:join([Dir, "src"])}], | |||||
#{src_dirs => ["src"], | |||||
include_dirs => [], | |||||
src_ext => ".yrl", | |||||
out_mappings => Mappings}. | |||||
needed_files(_, FoundFiles, Mappings, AppInfo) -> | |||||
FirstFiles = [], | |||||
%% Remove first files from found files | |||||
RestFiles = [Source || Source <- FoundFiles, | |||||
not lists:member(Source, FirstFiles), | |||||
rebar_compiler:needs_compile(Source, ".erl", Mappings)], | |||||
Opts = rebar_opts:get(rebar_app_info:opts(AppInfo), yrl_opts, []), | |||||
Opts1 = rebar_compiler_xrl:update_opts(Opts, AppInfo), | |||||
{{FirstFiles, Opts1}, {RestFiles, Opts1}}. | |||||
dependencies(_, _, _) -> | |||||
[]. | |||||
compile(Source, [{_, OutDir}], _, Opts) -> | |||||
BaseName = filename:basename(Source, ".yrl"), | |||||
Target = filename:join([OutDir, BaseName]), | |||||
AllOpts = [{parserfile, Target}, {return, true} | Opts], | |||||
case yecc:file(Source, AllOpts) of | |||||
{ok, _} -> | |||||
ok; | |||||
{ok, _Mod, Ws} -> | |||||
rebar_compiler:ok_tuple(Source, Ws); | |||||
{error, Es, Ws} -> | |||||
rebar_compiler:error_tuple(Source, Es, Ws, AllOpts) | |||||
end. | |||||
clean(YrlFiles, _AppInfo) -> | |||||
rebar_file_utils:delete_each( | |||||
[rebar_utils:to_list(re:replace(F, "\\.yrl$", ".erl", [unicode])) | |||||
|| F <- YrlFiles]). |
@ -0,0 +1,86 @@ | |||||
-module(rebar_env). | |||||
-export([create_env/1, | |||||
create_env/2]). | |||||
-include("rebar.hrl"). | |||||
%% @doc The following environment variables are exported when running | |||||
%% a hook (absolute paths): | |||||
%% | |||||
%% REBAR_DEPS_DIR = rebar_dir:deps_dir/1 | |||||
%% REBAR_BUILD_DIR = rebar_dir:base_dir/1 | |||||
%% REBAR_ROOT_DIR = rebar_dir:root_dir/1 | |||||
%% REBAR_CHECKOUTS_DIR = rebar_dir:checkouts_dir/1 | |||||
%% REBAR_PLUGINS_DIR = rebar_dir:plugins_dir/1 | |||||
%% REBAR_GLOBAL_CONFIG_DIR = rebar_dir:global_config_dir/1 | |||||
%% REBAR_GLOBAL_CACHE_DIR = rebar_dir:global_cache_dir/1 | |||||
%% REBAR_TEMPLATE_DIR = rebar_dir:template_dir/1 | |||||
%% REBAR_APP_DIRS = rebar_dir:lib_dirs/1 | |||||
%% REBAR_SRC_DIRS = rebar_dir:src_dirs/1 | |||||
%% | |||||
%% autoconf compatible variables | |||||
%% (see: http://www.gnu.org/software/autoconf/manual/autoconf.html#Erlang-Libraries): | |||||
%% ERLANG_ERTS_VER = erlang:system_info(version) | |||||
%% ERLANG_ROOT_DIR = code:root_dir/0 | |||||
%% ERLANG_LIB_DIR_erl_interface = code:lib_dir(erl_interface) | |||||
%% ERLANG_LIB_VER_erl_interface = version part of path returned by code:lib_dir(erl_interface) | |||||
%% ERL = ERLANG_ROOT_DIR/bin/erl | |||||
%% ERLC = ERLANG_ROOT_DIR/bin/erl | |||||
%% | |||||
-spec create_env(rebar_state:t()) -> proplists:proplist(). | |||||
create_env(State) -> | |||||
Opts = rebar_state:opts(State), | |||||
create_env(State, Opts). | |||||
-spec create_env(rebar_state:t(), rebar_dict()) -> proplists:proplist(). | |||||
create_env(State, Opts) -> | |||||
BaseDir = rebar_dir:base_dir(State), | |||||
EnvVars = [ | |||||
{"REBAR_DEPS_DIR", filename:absname(rebar_dir:deps_dir(State))}, | |||||
{"REBAR_BUILD_DIR", filename:absname(rebar_dir:base_dir(State))}, | |||||
{"REBAR_ROOT_DIR", filename:absname(rebar_dir:root_dir(State))}, | |||||
{"REBAR_CHECKOUTS_DIR", filename:absname(rebar_dir:checkouts_dir(State))}, | |||||
{"REBAR_PLUGINS_DIR", filename:absname(rebar_dir:plugins_dir(State))}, | |||||
{"REBAR_GLOBAL_CONFIG_DIR", filename:absname(rebar_dir:global_config_dir(State))}, | |||||
{"REBAR_GLOBAL_CACHE_DIR", filename:absname(rebar_dir:global_cache_dir(Opts))}, | |||||
{"REBAR_TEMPLATE_DIR", filename:absname(rebar_dir:template_dir(State))}, | |||||
{"REBAR_APP_DIRS", join_dirs(BaseDir, rebar_dir:lib_dirs(State))}, | |||||
{"REBAR_SRC_DIRS", join_dirs(BaseDir, rebar_dir:all_src_dirs(Opts))}, | |||||
{"ERLANG_ERTS_VER", erlang:system_info(version)}, | |||||
{"ERLANG_ROOT_DIR", code:root_dir()}, | |||||
{"ERL", filename:join([code:root_dir(), "bin", "erl"])}, | |||||
{"ERLC", filename:join([code:root_dir(), "bin", "erlc"])}, | |||||
{"ERLANG_ARCH" , rebar_api:wordsize()}, | |||||
{"ERLANG_TARGET", rebar_api:get_arch()} | |||||
], | |||||
EInterfaceVars = create_erl_interface_env(), | |||||
lists:append([EnvVars, EInterfaceVars]). | |||||
-spec create_erl_interface_env() -> list(). | |||||
create_erl_interface_env() -> | |||||
case code:lib_dir(erl_interface) of | |||||
{error, bad_name} -> | |||||
?WARN("erl_interface is missing. ERLANG_LIB_DIR_erl_interface and " | |||||
"ERLANG_LIB_VER_erl_interface will not be added to the environment.", []), | |||||
[]; | |||||
Dir -> | |||||
[ | |||||
{"ERLANG_LIB_DIR_erl_interface", Dir}, | |||||
{"ERLANG_LIB_VER_erl_interface", re_version(Dir)} | |||||
] | |||||
end. | |||||
%% ==================================================================== | |||||
%% Internal functions | |||||
%% ==================================================================== | |||||
join_dirs(BaseDir, Dirs) -> | |||||
rebar_string:join([filename:join(BaseDir, Dir) || Dir <- Dirs], ":"). | |||||
re_version(Path) -> | |||||
case re:run(Path, "^.*-(?<VER>[^/-]*)$", [{capture,[1],list}, unicode]) of | |||||
nomatch -> ""; | |||||
{match, [Ver]} -> Ver | |||||
end. |
@ -0,0 +1,142 @@ | |||||
-module(rebar_hex_repos). | |||||
-export([from_state/2, | |||||
get_repo_config/2, | |||||
auth_config/1, | |||||
update_auth_config/2, | |||||
format_error/1]). | |||||
-ifdef(TEST). | |||||
%% exported for test purposes | |||||
-export([repos/1, merge_repos/1]). | |||||
-endif. | |||||
-include("rebar.hrl"). | |||||
-include_lib("providers/include/providers.hrl"). | |||||
-export_type([repo/0]). | |||||
-type repo() :: #{name => unicode:unicode_binary(), | |||||
api_url => binary(), | |||||
api_key => binary(), | |||||
repo_url => binary(), | |||||
repo_public_key => binary(), | |||||
repo_verify => binary()}. | |||||
from_state(BaseConfig, State) -> | |||||
HexConfig = rebar_state:get(State, hex, []), | |||||
Repos = repos(HexConfig), | |||||
%% auth is stored in a separate config file since the plugin generates and modifies it | |||||
Auth = ?MODULE:auth_config(State), | |||||
%% add base config entries that are specific to use by rebar3 and not overridable | |||||
Repos1 = merge_with_base_and_auth(Repos, BaseConfig, Auth), | |||||
%% merge organizations parent repo options into each oraganization repo | |||||
update_organizations(Repos1). | |||||
-spec get_repo_config(unicode:unicode_binary(), rebar_state:t() | [repo()]) | |||||
-> {ok, repo()} | error. | |||||
get_repo_config(RepoName, Repos) when is_list(Repos) -> | |||||
case ec_lists:find(fun(#{name := N}) -> N =:= RepoName end, Repos) of | |||||
error -> | |||||
throw(?PRV_ERROR({repo_not_found, RepoName})); | |||||
{ok, RepoConfig} -> | |||||
{ok, RepoConfig} | |||||
end; | |||||
get_repo_config(RepoName, State) -> | |||||
Resources = rebar_state:resources(State), | |||||
#{repos := Repos} = rebar_resource_v2:find_resource_state(pkg, Resources), | |||||
get_repo_config(RepoName, Repos). | |||||
merge_with_base_and_auth(Repos, BaseConfig, Auth) -> | |||||
[maps:merge(maps:get(maps:get(name, Repo), Auth, #{}), | |||||
maps:merge(Repo, BaseConfig)) || Repo <- Repos]. | |||||
%% A user's list of repos are merged by name while keeping the order | |||||
%% intact. The order is based on the first use of a repo by name in the | |||||
%% list. The default repo is appended to the user's list. | |||||
repos(HexConfig) -> | |||||
HexDefaultConfig = default_repo(), | |||||
case [R || R <- HexConfig, element(1, R) =:= repos] of | |||||
[] -> | |||||
[HexDefaultConfig]; | |||||
%% we only care if the first element is a replace entry | |||||
[{repos, replace, Repos} | _]-> | |||||
merge_repos(Repos); | |||||
Repos -> | |||||
RepoList = repo_list(Repos), | |||||
merge_repos(RepoList ++ [HexDefaultConfig]) | |||||
end. | |||||
-spec merge_repos([repo()]) -> [repo()]. | |||||
merge_repos(Repos) -> | |||||
lists:foldl(fun(R=#{name := Name}, ReposAcc) -> | |||||
%% private organizations include the parent repo before a : | |||||
case rebar_string:split(Name, <<":">>) of | |||||
[Repo, Org] -> | |||||
update_repo_list(R#{name => Name, | |||||
organization => Org, | |||||
parent => Repo}, ReposAcc); | |||||
_ -> | |||||
update_repo_list(R, ReposAcc) | |||||
end | |||||
end, [], Repos). | |||||
update_organizations(Repos) -> | |||||
lists:map(fun(Repo=#{organization := Organization, | |||||
parent := ParentName}) -> | |||||
{ok, Parent} = get_repo_config(ParentName, Repos), | |||||
ParentRepoUrl = rebar_utils:to_list(maps:get(repo_url, Parent)), | |||||
{ok, RepoUrl} = | |||||
rebar_utils:url_append_path(ParentRepoUrl, | |||||
filename:join("repos", rebar_utils:to_list(Organization))), | |||||
%% still let the organization config override this constructed repo url | |||||
maps:merge(Parent#{repo_url => rebar_utils:to_binary(RepoUrl)}, Repo); | |||||
(Repo) -> | |||||
Repo | |||||
end, Repos). | |||||
update_repo_list(R=#{name := N}, [H=#{name := HN} | Rest]) when N =:= HN -> | |||||
[maps:merge(R, H) | Rest]; | |||||
update_repo_list(R, [H | Rest]) -> | |||||
[H | update_repo_list(R, Rest)]; | |||||
update_repo_list(R, []) -> | |||||
[R]. | |||||
default_repo() -> | |||||
HexDefaultConfig = hex_core:default_config(), | |||||
HexDefaultConfig#{name => ?PUBLIC_HEX_REPO}. | |||||
repo_list([]) -> | |||||
[]; | |||||
repo_list([{repos, Repos} | T]) -> | |||||
Repos ++ repo_list(T); | |||||
repo_list([{repos, replace, Repos} | T]) -> | |||||
Repos ++ repo_list(T). | |||||
format_error({repo_not_found, RepoName}) -> | |||||
io_lib:format("The repo ~ts was not found in the configuration.", [RepoName]). | |||||
%% auth functions | |||||
%% authentication is in a separate config file because the hex plugin updates it | |||||
-spec auth_config_file(rebar_state:t()) -> file:filename_all(). | |||||
auth_config_file(State) -> | |||||
filename:join(rebar_dir:global_config_dir(State), ?HEX_AUTH_FILE). | |||||
-spec auth_config(rebar_state:t()) -> map(). | |||||
auth_config(State) -> | |||||
case file:consult(auth_config_file(State)) of | |||||
{ok, [Config]} -> | |||||
Config; | |||||
_ -> | |||||
#{} | |||||
end. | |||||
-spec update_auth_config(map(), rebar_state:t()) -> ok. | |||||
update_auth_config(Updates, State) -> | |||||
Config = auth_config(State), | |||||
AuthConfigFile = auth_config_file(State), | |||||
ok = filelib:ensure_dir(AuthConfigFile), | |||||
NewConfig = iolist_to_binary([io_lib:print(maps:merge(Config, Updates)) | ".\n"]), | |||||
ok = file:write_file(AuthConfigFile, NewConfig). |
@ -0,0 +1,211 @@ | |||||
-module(rebar_paths). | |||||
-include("rebar.hrl"). | |||||
-type target() :: deps | plugins. | |||||
-type targets() :: [target(), ...]. | |||||
-export_type([target/0, targets/0]). | |||||
-export([set_paths/2, unset_paths/2]). | |||||
-export([clashing_apps/2]). | |||||
-ifdef(TEST). | |||||
-export([misloaded_modules/2]). | |||||
-endif. | |||||
-spec set_paths(targets(), rebar_state:t()) -> ok. | |||||
set_paths(UserTargets, State) -> | |||||
Targets = normalize_targets(UserTargets), | |||||
GroupPaths = path_groups(Targets, State), | |||||
Paths = lists:append(lists:reverse([P || {_, P} <- GroupPaths])), | |||||
code:add_pathsa(Paths), | |||||
AppGroups = app_groups(Targets, State), | |||||
purge_and_load(AppGroups, sets:new()), | |||||
ok. | |||||
-spec unset_paths(targets(), rebar_state:t()) -> ok. | |||||
unset_paths(UserTargets, State) -> | |||||
Targets = normalize_targets(UserTargets), | |||||
GroupPaths = path_groups(Targets, State), | |||||
Paths = lists:append([P || {_, P} <- GroupPaths]), | |||||
[code:del_path(P) || P <- Paths], | |||||
purge(Paths, code:all_loaded()), | |||||
ok. | |||||
-spec clashing_apps(targets(), rebar_state:t()) -> [{target(), [binary()]}]. | |||||
clashing_apps(Targets, State) -> | |||||
AppGroups = app_groups(Targets, State), | |||||
AppNames = [{G, sets:from_list( | |||||
[rebar_app_info:name(App) || App <- Apps] | |||||
)} || {G, Apps} <- AppGroups], | |||||
clashing_app_names(sets:new(), AppNames, []). | |||||
%%%%%%%%%%%%%%% | |||||
%%% PRIVATE %%% | |||||
%%%%%%%%%%%%%%% | |||||
%% The paths are to be set in the reverse order; i.e. the default | |||||
%% path is always last when possible (minimize cases where a build | |||||
%% tool version clashes with an app's), and put the highest priorities | |||||
%% first. | |||||
-spec normalize_targets(targets()) -> targets(). | |||||
normalize_targets(List) -> | |||||
%% Plan for the eventuality of getting values piped in | |||||
%% from future versions of rebar3, possibly from plugins and so on, | |||||
%% which means we'd risk failing kind of violently. We only support | |||||
%% deps and plugins | |||||
TmpList = lists:foldl( | |||||
fun(deps, [deps | _] = Acc) -> Acc; | |||||
(plugins, [plugins | _] = Acc) -> Acc; | |||||
(deps, Acc) -> [deps | Acc -- [deps]]; | |||||
(plugins, Acc) -> [plugins | Acc -- [plugins]]; | |||||
(_, Acc) -> Acc | |||||
end, | |||||
[], | |||||
List | |||||
), | |||||
lists:reverse(TmpList). | |||||
purge_and_load([], _) -> | |||||
ok; | |||||
purge_and_load([{_Group, Apps}|Rest], Seen) -> | |||||
%% We have: a list of all applications in the current priority group, | |||||
%% a list of all loaded modules with their active path, and a list of | |||||
%% seen applications. | |||||
%% | |||||
%% We do the following: | |||||
%% 1. identify the apps that have not been solved yet | |||||
%% 2. find the paths for all apps in the current group | |||||
%% 3. unload and reload apps that may have changed paths in order | |||||
%% to get updated module lists and specs | |||||
%% (we ignore started apps and apps that have not run for this) | |||||
%% This part turns out to be the bottleneck of this module, so | |||||
%% to speed it up, using clash detection proves useful: | |||||
%% only reload apps that clashed since others are unlikely to | |||||
%% conflict in significant ways | |||||
%% 4. create a list of modules to check from that app list—only loaded | |||||
%% modules make sense to check. | |||||
%% 5. check the modules to match their currently loaded paths with | |||||
%% the path set from the apps in the current group; modules | |||||
%% that differ must be purged; others can stay | |||||
%% 1) | |||||
AppNames = [AppName || App <- Apps, | |||||
AppName <- [rebar_app_info:name(App)], | |||||
not sets:is_element(AppName, Seen)], | |||||
GoodApps = [App || AppName <- AppNames, | |||||
App <- Apps, | |||||
rebar_app_info:name(App) =:= AppName], | |||||
%% 2) | |||||
%% (no need for extra_src_dirs since those get put into ebin; | |||||
%% also no need for OTP libs; we want to allow overtaking them) | |||||
GoodAppPaths = [rebar_app_info:ebin_dir(App) || App <- GoodApps], | |||||
%% 3) | |||||
[begin | |||||
AtomApp = binary_to_atom(AppName, utf8), | |||||
%% blind load/unload won't interrupt an already-running app, | |||||
%% preventing odd errors, maybe! | |||||
case application:unload(AtomApp) of | |||||
ok -> application:load(AtomApp); | |||||
_ -> ok | |||||
end | |||||
end || AppName <- AppNames, | |||||
%% Shouldn't unload ourselves; rebar runs without ever | |||||
%% being started and unloading breaks logging! | |||||
AppName =/= <<"rebar">>], | |||||
%% 4) | |||||
CandidateMods = lists:append( | |||||
%% Start by asking the currently loaded app (if loaded) | |||||
%% since it would be the primary source of conflicting modules | |||||
[case application:get_key(AppName, modules) of | |||||
{ok, Mods} -> | |||||
Mods; | |||||
undefined -> | |||||
%% if not found, parse the app file on disk, in case | |||||
%% the app's modules are used without it being loaded; | |||||
%% invalidate the cache in case we're proceeding during | |||||
%% compilation steps by setting the app details to `[]', which | |||||
%% is its empty value; the details will then be reloaded | |||||
%% from disk when found | |||||
case rebar_app_info:app_details(rebar_app_info:app_details(App, [])) of | |||||
[] -> []; | |||||
Details -> proplists:get_value(modules, Details, []) | |||||
end | |||||
end || App <- GoodApps, | |||||
AppName <- [binary_to_atom(rebar_app_info:name(App), utf8)]] | |||||
), | |||||
ModPaths = [{Mod,Path} || Mod <- CandidateMods, | |||||
erlang:function_exported(Mod, module_info, 0), | |||||
{file, Path} <- [code:is_loaded(Mod)]], | |||||
%% 5) | |||||
Mods = misloaded_modules(GoodAppPaths, ModPaths), | |||||
[purge_mod(Mod) || Mod <- Mods], | |||||
purge_and_load(Rest, sets:union(Seen, sets:from_list(AppNames))). | |||||
purge(Paths, ModPaths) -> | |||||
SortedPaths = lists:sort(Paths), | |||||
lists:map(fun purge_mod/1, | |||||
[Mod || {Mod, Path} <- ModPaths, | |||||
is_list(Path), % not 'preloaded' or mocked | |||||
any_prefix(Path, SortedPaths)] | |||||
). | |||||
misloaded_modules(GoodAppPaths, ModPaths) -> | |||||
%% Identify paths that are invalid; i.e. app paths that cover an | |||||
%% app in the desired group, but are not in the desired group. | |||||
lists:usort( | |||||
[Mod || {Mod, Path} <- ModPaths, | |||||
is_list(Path), % not 'preloaded' or mocked | |||||
not any_prefix(Path, GoodAppPaths)] | |||||
). | |||||
any_prefix(Path, Paths) -> | |||||
lists:any(fun(P) -> lists:prefix(P, Path) end, Paths). | |||||
%% assume paths currently set are good; only unload a module so next call | |||||
%% uses the correctly set paths | |||||
purge_mod(Mod) -> | |||||
code:soft_purge(Mod) andalso code:delete(Mod). | |||||
%% This is a tricky O(n²) check since we want to | |||||
%% know whether an app clashes with any of the top priority groups. | |||||
%% | |||||
%% For example, let's say we have `[deps, plugins]', then we want | |||||
%% to find the plugins that clash with deps: | |||||
%% | |||||
%% `[{deps, [ClashingPlugins]}, {plugins, []}]' | |||||
%% | |||||
%% In case we'd ever have alternative or additional types, we can | |||||
%% find all clashes from other 'groups'. | |||||
clashing_app_names(_, [], Acc) -> | |||||
lists:reverse(Acc); | |||||
clashing_app_names(PrevNames, [{G,AppNames} | Rest], Acc) -> | |||||
CurrentNames = sets:subtract(AppNames, PrevNames), | |||||
NextNames = sets:subtract(sets:union([A || {_, A} <- Rest]), PrevNames), | |||||
Clashes = sets:intersection(CurrentNames, NextNames), | |||||
NewAcc = [{G, sets:to_list(Clashes)} | Acc], | |||||
clashing_app_names(sets:union(PrevNames, CurrentNames), Rest, NewAcc). | |||||
path_groups(Targets, State) -> | |||||
[{Target, get_paths(Target, State)} || Target <- Targets]. | |||||
app_groups(Targets, State) -> | |||||
[{Target, get_apps(Target, State)} || Target <- Targets]. | |||||
get_paths(deps, State) -> | |||||
rebar_state:code_paths(State, all_deps); | |||||
get_paths(plugins, State) -> | |||||
rebar_state:code_paths(State, all_plugin_deps). | |||||
get_apps(deps, State) -> | |||||
%% The code paths for deps also include the top level apps | |||||
%% and the extras, which we don't have here; we have to | |||||
%% add the apps by hand | |||||
case rebar_state:project_apps(State) of | |||||
undefined -> []; | |||||
List -> List | |||||
end ++ | |||||
rebar_state:all_deps(State); | |||||
get_apps(plugins, State) -> | |||||
rebar_state:all_plugin_deps(State). |
@ -0,0 +1,138 @@ | |||||
%%% @doc Meta-provider that dynamically compiles providers | |||||
%%% to run aliased commands. | |||||
%%% | |||||
%%% This is hackish and out-there, but this module has graduated | |||||
%%% from a plugin at https://github.com/tsloughter/rebar_alias after | |||||
%%% years of stability. Only some error checks were added | |||||
-module(rebar_prv_alias). | |||||
-export([init/1]). | |||||
-include("rebar.hrl"). | |||||
%% =================================================================== | |||||
%% Public API | |||||
%% =================================================================== | |||||
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}. | |||||
init(State) -> | |||||
Aliases = rebar_state:get(State, alias, []), | |||||
lists:foldl(fun({Alias, Cmds}, {ok, StateAcc}) -> | |||||
case validate_provider(Alias, Cmds, State) of | |||||
true -> init_alias(Alias, Cmds, StateAcc); | |||||
false -> {ok, State} | |||||
end | |||||
end, {ok, State}, Aliases). | |||||
init_alias(Alias, Cmds, State) -> | |||||
Module = list_to_atom("rebar_prv_alias_" ++ atom_to_list(Alias)), | |||||
MF = module(Module), | |||||
EF = exports(), | |||||
FF = do_func(Cmds), | |||||
{ok, _, Bin} = compile:forms([MF, EF, FF]), | |||||
code:load_binary(Module, "none", Bin), | |||||
Provider = providers:create([ | |||||
{name, Alias}, | |||||
{module, Module}, | |||||
{bare, true}, | |||||
{deps, []}, | |||||
{example, example(Alias)}, | |||||
{opts, []}, | |||||
{short_desc, desc(Cmds)}, | |||||
{desc, desc(Cmds)} | |||||
]), | |||||
{ok, rebar_state:add_provider(State, Provider)}. | |||||
validate_provider(Alias, Cmds, State) -> | |||||
%% This would be caught and prevented anyway, but the warning | |||||
%% is friendlier | |||||
case providers:get_provider(Alias, rebar_state:providers(State)) of | |||||
not_found -> | |||||
%% check for circular deps in the alias. | |||||
case not proplists:is_defined(Alias, Cmds) of | |||||
true -> true; | |||||
false -> | |||||
?WARN("Alias ~p contains itself and would never " | |||||
"terminate. It will be ignored.", | |||||
[Alias]), | |||||
false | |||||
end; | |||||
_ -> | |||||
?WARN("Alias ~p is already the name of a command in " | |||||
"the default namespace and will be ignored.", | |||||
[Alias]), | |||||
false | |||||
end. | |||||
example(Alias) -> | |||||
"rebar3 " ++ atom_to_list(Alias). | |||||
desc(Cmds) -> | |||||
"Equivalent to running: rebar3 do " | |||||
++ rebar_string:join(lists:map(fun to_desc/1, Cmds), ","). | |||||
to_desc({Cmd, Args}) when is_list(Args) -> | |||||
atom_to_list(Cmd) ++ " " ++ Args; | |||||
to_desc({Namespace, Cmd}) -> | |||||
atom_to_list(Namespace) ++ " " ++ atom_to_list(Cmd); | |||||
to_desc({Namespace, Cmd, Args}) -> | |||||
atom_to_list(Namespace) ++ " " ++ atom_to_list(Cmd) ++ " " ++ Args; | |||||
to_desc(Cmd) -> | |||||
atom_to_list(Cmd). | |||||
module(Name) -> | |||||
{attribute, 1, module, Name}. | |||||
exports() -> | |||||
{attribute, 1, export, [{do, 1}]}. | |||||
do_func(Cmds) -> | |||||
{function, 1, do, 1, | |||||
[{clause, 1, | |||||
[{var, 1, 'State'}], | |||||
[], | |||||
[{call, 1, | |||||
{remote, 1, {atom, 1, rebar_prv_do}, {atom, 1, do_tasks}}, | |||||
[make_args(Cmds), {var, 1, 'State'}]}]}]}. | |||||
make_args(Cmds) -> | |||||
make_list( | |||||
lists:map(fun make_tuple/1, | |||||
lists:map(fun make_arg/1, Cmds))). | |||||
make_arg({Namespace, Command, Args}) when is_atom(Namespace), is_atom(Command) -> | |||||
{make_atom(Namespace), | |||||
make_atom(Command), | |||||
make_list([make_string(A) || A <- split_args(Args)])}; | |||||
make_arg({Namespace, Command}) when is_atom(Namespace), is_atom(Command) -> | |||||
{make_atom(Namespace), make_atom(Command)}; | |||||
make_arg({Cmd, Args}) -> | |||||
{make_string(Cmd), make_list([make_string(A) || A <- split_args(Args)])}; | |||||
make_arg(Cmd) -> | |||||
{make_string(Cmd), make_list([])}. | |||||
make_tuple(Tuple) -> | |||||
{tuple, 1, tuple_to_list(Tuple)}. | |||||
make_list(List) -> | |||||
lists:foldr( | |||||
fun(Elem, Acc) -> {cons, 1, Elem, Acc} end, | |||||
{nil, 1}, | |||||
List). | |||||
make_string(Atom) when is_atom(Atom) -> | |||||
make_string(atom_to_list(Atom)); | |||||
make_string(String) when is_list(String) -> | |||||
{string, 1, String}. | |||||
make_atom(Atom) when is_atom(Atom) -> | |||||
{atom, 1, Atom}. | |||||
%% In case someone used the long option format, the option needs to get | |||||
%% separated from its value. | |||||
split_args(Args) -> | |||||
rebar_string:lexemes( | |||||
lists:map(fun($=) -> 32; (C) -> C end, Args), | |||||
" "). |
@ -0,0 +1,37 @@ | |||||
%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*- | |||||
%% ex: ts=4 sw=4 et | |||||
-module(rebar_prv_get_deps). | |||||
-behaviour(provider). | |||||
-export([init/1, | |||||
do/1, | |||||
format_error/1]). | |||||
-define(PROVIDER, 'get-deps'). | |||||
-define(DEPS, [lock]). | |||||
%% =================================================================== | |||||
%% Public API | |||||
%% =================================================================== | |||||
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}. | |||||
init(State) -> | |||||
Provider = providers:create([{name, ?PROVIDER}, | |||||
{module, ?MODULE}, | |||||
{deps, ?DEPS}, | |||||
{bare, true}, | |||||
{example, "rebar3 get-deps"}, | |||||
{short_desc, "Fetch dependencies."}, | |||||
{desc, "Fetch project dependencies."}, | |||||
{opts, []}, | |||||
{profiles, []}]), | |||||
{ok, rebar_state:add_provider(State, Provider)}. | |||||
-spec do(rebar_state:t()) -> {ok, rebar_state:t()}. | |||||
do(State) -> {ok, State}. | |||||
-spec format_error(any()) -> iolist(). | |||||
format_error(Reason) -> | |||||
io_lib:format("~p", [Reason]). |
@ -0,0 +1,47 @@ | |||||
%% -*- erlang-indent-level: 4;indent-tabs-mode: nil -*- | |||||
%% ex: ts=4 sw=4 et | |||||
-module(rebar_prv_repos). | |||||
-behaviour(provider). | |||||
-export([init/1, | |||||
do/1, | |||||
format_error/1]). | |||||
-include("rebar.hrl"). | |||||
-define(PROVIDER, repos). | |||||
-define(DEPS, []). | |||||
%% =================================================================== | |||||
%% Public API | |||||
%% =================================================================== | |||||
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}. | |||||
init(State) -> | |||||
Provider = providers:create( | |||||
[{name, ?PROVIDER}, | |||||
{module, ?MODULE}, | |||||
{bare, false}, | |||||
{deps, ?DEPS}, | |||||
{example, "rebar3 repos"}, | |||||
{short_desc, "Print current package repository configuration"}, | |||||
{desc, "Display repository configuration for debugging purpose"}, | |||||
{opts, []}]), | |||||
State1 = rebar_state:add_provider(State, Provider), | |||||
{ok, State1}. | |||||
-spec do(rebar_state:t()) -> {ok, rebar_state:t()} | {error, string()}. | |||||
do(State) -> | |||||
Resources = rebar_state:resources(State), | |||||
#{repos := Repos} = rebar_resource_v2:find_resource_state(pkg, Resources), | |||||
?CONSOLE("Repos:", []), | |||||
%%TODO: do some formatting | |||||
?CONSOLE("~p", [Repos]), | |||||
{ok, State}. | |||||
-spec format_error(any()) -> iolist(). | |||||
format_error(Reason) -> | |||||
io_lib:format("~p", [Reason]). |