From df58b46de25e71134f343c6f6443e6de98b59bae Mon Sep 17 00:00:00 2001 From: SisMaker <156736github> Date: Mon, 20 Dec 2021 22:53:34 +0800 Subject: [PATCH] =?UTF-8?q?ft:=20=E5=88=9D=E5=A7=8B=E5=8C=96=E6=8F=90?= =?UTF-8?q?=E4=BA=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 29 + LICENSE | 21 + README.md | 9 + include/cow_inline.hrl | 447 +++ include/cow_parse.hrl | 83 + rebar.config | 21 + src/eWSrv.app.src | 11 + src/eWSrv_app.erl | 11 + src/eWSrv_sup.erl | 29 + src/wsLib/cow_base64url.erl | 81 + src/wsLib/cow_cookie.erl | 428 ++ src/wsLib/cow_date.erl | 434 ++ src/wsLib/cow_hpack.erl | 1449 +++++++ src/wsLib/cow_hpack_dec_huffman_lookup.hrl | 4132 ++++++++++++++++++++ src/wsLib/cow_http.erl | 426 ++ src/wsLib/cow_http2.erl | 483 +++ src/wsLib/cow_http2_machine.erl | 1647 ++++++++ src/wsLib/cow_http_hd.erl | 3622 +++++++++++++++++ src/wsLib/cow_http_struct_hd.erl | 420 ++ src/wsLib/cow_http_te.erl | 373 ++ src/wsLib/cow_iolists.erl | 95 + src/wsLib/cow_link.erl | 445 +++ src/wsLib/cow_mimetypes.erl | 1045 +++++ src/wsLib/cow_mimetypes.erl.src | 61 + src/wsLib/cow_multipart.erl | 775 ++++ src/wsLib/cow_qs.erl | 563 +++ src/wsLib/cow_spdy.erl | 313 ++ src/wsLib/cow_spdy.hrl | 181 + src/wsLib/cow_sse.erl | 348 ++ src/wsLib/cow_uri.erl | 339 ++ src/wsLib/cow_uri_template.erl | 356 ++ src/wsLib/cow_ws.erl | 741 ++++ src/wsNet/ranch.erl | 625 +++ src/wsNet/ranch_acceptor.erl | 72 + src/wsNet/ranch_acceptors_sup.erl | 103 + src/wsNet/ranch_app.erl | 48 + src/wsNet/ranch_conns_sup.erl | 508 +++ src/wsNet/ranch_conns_sup_sup.erl | 42 + src/wsNet/ranch_crc32c.erl | 115 + src/wsNet/ranch_embedded_sup.erl | 36 + src/wsNet/ranch_listener_sup.erl | 48 + src/wsNet/ranch_protocol.erl | 23 + src/wsNet/ranch_proxy_header.erl | 1007 +++++ src/wsNet/ranch_server.erl | 279 ++ src/wsNet/ranch_server_proxy.erl | 67 + src/wsNet/ranch_ssl.erl | 341 ++ src/wsNet/ranch_sup.erl | 39 + src/wsNet/ranch_tcp.erl | 287 ++ src/wsNet/ranch_transport.erl | 157 + src/wsSrv/cowboy.erl | 105 + src/wsSrv/cowboy_app.erl | 27 + src/wsSrv/cowboy_bstr.erl | 123 + src/wsSrv/cowboy_children.erl | 192 + src/wsSrv/cowboy_clear.erl | 60 + src/wsSrv/cowboy_clock.erl | 221 ++ src/wsSrv/cowboy_compress_h.erl | 249 ++ src/wsSrv/cowboy_constraints.erl | 174 + src/wsSrv/cowboy_handler.erl | 57 + src/wsSrv/cowboy_http.erl | 1523 ++++++++ src/wsSrv/cowboy_http2.erl | 1220 ++++++ src/wsSrv/cowboy_loop.erl | 108 + src/wsSrv/cowboy_metrics_h.erl | 331 ++ src/wsSrv/cowboy_middleware.erl | 24 + src/wsSrv/cowboy_req.erl | 1016 +++++ src/wsSrv/cowboy_rest.erl | 1637 ++++++++ src/wsSrv/cowboy_router.erl | 603 +++ src/wsSrv/cowboy_static.erl | 418 ++ src/wsSrv/cowboy_stream.erl | 193 + src/wsSrv/cowboy_stream_h.erl | 324 ++ src/wsSrv/cowboy_sub_protocol.erl | 24 + src/wsSrv/cowboy_sup.erl | 30 + src/wsSrv/cowboy_tls.erl | 56 + src/wsSrv/cowboy_tracer_h.erl | 192 + src/wsSrv/cowboy_websocket.erl | 707 ++++ 74 files changed, 32829 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 include/cow_inline.hrl create mode 100644 include/cow_parse.hrl create mode 100644 rebar.config create mode 100644 src/eWSrv.app.src create mode 100644 src/eWSrv_app.erl create mode 100644 src/eWSrv_sup.erl create mode 100644 src/wsLib/cow_base64url.erl create mode 100644 src/wsLib/cow_cookie.erl create mode 100644 src/wsLib/cow_date.erl create mode 100644 src/wsLib/cow_hpack.erl create mode 100644 src/wsLib/cow_hpack_dec_huffman_lookup.hrl create mode 100644 src/wsLib/cow_http.erl create mode 100644 src/wsLib/cow_http2.erl create mode 100644 src/wsLib/cow_http2_machine.erl create mode 100644 src/wsLib/cow_http_hd.erl create mode 100644 src/wsLib/cow_http_struct_hd.erl create mode 100644 src/wsLib/cow_http_te.erl create mode 100644 src/wsLib/cow_iolists.erl create mode 100644 src/wsLib/cow_link.erl create mode 100644 src/wsLib/cow_mimetypes.erl create mode 100644 src/wsLib/cow_mimetypes.erl.src create mode 100644 src/wsLib/cow_multipart.erl create mode 100644 src/wsLib/cow_qs.erl create mode 100644 src/wsLib/cow_spdy.erl create mode 100644 src/wsLib/cow_spdy.hrl create mode 100644 src/wsLib/cow_sse.erl create mode 100644 src/wsLib/cow_uri.erl create mode 100644 src/wsLib/cow_uri_template.erl create mode 100644 src/wsLib/cow_ws.erl create mode 100644 src/wsNet/ranch.erl create mode 100644 src/wsNet/ranch_acceptor.erl create mode 100644 src/wsNet/ranch_acceptors_sup.erl create mode 100644 src/wsNet/ranch_app.erl create mode 100644 src/wsNet/ranch_conns_sup.erl create mode 100644 src/wsNet/ranch_conns_sup_sup.erl create mode 100644 src/wsNet/ranch_crc32c.erl create mode 100644 src/wsNet/ranch_embedded_sup.erl create mode 100644 src/wsNet/ranch_listener_sup.erl create mode 100644 src/wsNet/ranch_protocol.erl create mode 100644 src/wsNet/ranch_proxy_header.erl create mode 100644 src/wsNet/ranch_server.erl create mode 100644 src/wsNet/ranch_server_proxy.erl create mode 100644 src/wsNet/ranch_ssl.erl create mode 100644 src/wsNet/ranch_sup.erl create mode 100644 src/wsNet/ranch_tcp.erl create mode 100644 src/wsNet/ranch_transport.erl create mode 100644 src/wsSrv/cowboy.erl create mode 100644 src/wsSrv/cowboy_app.erl create mode 100644 src/wsSrv/cowboy_bstr.erl create mode 100644 src/wsSrv/cowboy_children.erl create mode 100644 src/wsSrv/cowboy_clear.erl create mode 100644 src/wsSrv/cowboy_clock.erl create mode 100644 src/wsSrv/cowboy_compress_h.erl create mode 100644 src/wsSrv/cowboy_constraints.erl create mode 100644 src/wsSrv/cowboy_handler.erl create mode 100644 src/wsSrv/cowboy_http.erl create mode 100644 src/wsSrv/cowboy_http2.erl create mode 100644 src/wsSrv/cowboy_loop.erl create mode 100644 src/wsSrv/cowboy_metrics_h.erl create mode 100644 src/wsSrv/cowboy_middleware.erl create mode 100644 src/wsSrv/cowboy_req.erl create mode 100644 src/wsSrv/cowboy_rest.erl create mode 100644 src/wsSrv/cowboy_router.erl create mode 100644 src/wsSrv/cowboy_static.erl create mode 100644 src/wsSrv/cowboy_stream.erl create mode 100644 src/wsSrv/cowboy_stream_h.erl create mode 100644 src/wsSrv/cowboy_sub_protocol.erl create mode 100644 src/wsSrv/cowboy_sup.erl create mode 100644 src/wsSrv/cowboy_tls.erl create mode 100644 src/wsSrv/cowboy_tracer_h.erl create mode 100644 src/wsSrv/cowboy_websocket.erl diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0ad44f1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,29 @@ +.eunit +*.o +*.beam +*.plt +erl_crash.dump +.concrete/DEV_MODE + +# rebar 2.x +.rebar +rel/example_project +ebin/* +deps + +# rebar 3 +.rebar3 +_build/ +_checkouts/ +rebar.lock + +# idea +.idea +*.iml +cmake-build* +CMakeLists.txt + +# nif compile temp file +*.pdb +*.d +compile_commands.json \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..9b25337 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 AICells + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..dfaff10 --- /dev/null +++ b/README.md @@ -0,0 +1,9 @@ +eWSrv +===== + +An OTP application + +Build +----- + + $ rebar3 compile diff --git a/include/cow_inline.hrl b/include/cow_inline.hrl new file mode 100644 index 0000000..f0d12eb --- /dev/null +++ b/include/cow_inline.hrl @@ -0,0 +1,447 @@ +%% Copyright (c) 2014-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-ifndef(COW_INLINE_HRL). +-define(COW_INLINE_HRL, 1). + +%% LC(Character) + +-define(LC(C), case C of + $A -> $a; + $B -> $b; + $C -> $c; + $D -> $d; + $E -> $e; + $F -> $f; + $G -> $g; + $H -> $h; + $I -> $i; + $J -> $j; + $K -> $k; + $L -> $l; + $M -> $m; + $N -> $n; + $O -> $o; + $P -> $p; + $Q -> $q; + $R -> $r; + $S -> $s; + $T -> $t; + $U -> $u; + $V -> $v; + $W -> $w; + $X -> $x; + $Y -> $y; + $Z -> $z; + _ -> C +end). + +%% LOWER(Bin) +%% +%% Lowercase the entire binary string in a binary comprehension. + +-define(LOWER(Bin), << << ?LC(C) >> || << C >> <= Bin >>). + +%% LOWERCASE(Function, Rest, Acc, ...) +%% +%% To be included at the end of a case block. +%% Defined for up to 10 extra arguments. + +-define(LOWER(Function, Rest, Acc), case C of + $A -> Function(Rest, << Acc/binary, $a >>); + $B -> Function(Rest, << Acc/binary, $b >>); + $C -> Function(Rest, << Acc/binary, $c >>); + $D -> Function(Rest, << Acc/binary, $d >>); + $E -> Function(Rest, << Acc/binary, $e >>); + $F -> Function(Rest, << Acc/binary, $f >>); + $G -> Function(Rest, << Acc/binary, $g >>); + $H -> Function(Rest, << Acc/binary, $h >>); + $I -> Function(Rest, << Acc/binary, $i >>); + $J -> Function(Rest, << Acc/binary, $j >>); + $K -> Function(Rest, << Acc/binary, $k >>); + $L -> Function(Rest, << Acc/binary, $l >>); + $M -> Function(Rest, << Acc/binary, $m >>); + $N -> Function(Rest, << Acc/binary, $n >>); + $O -> Function(Rest, << Acc/binary, $o >>); + $P -> Function(Rest, << Acc/binary, $p >>); + $Q -> Function(Rest, << Acc/binary, $q >>); + $R -> Function(Rest, << Acc/binary, $r >>); + $S -> Function(Rest, << Acc/binary, $s >>); + $T -> Function(Rest, << Acc/binary, $t >>); + $U -> Function(Rest, << Acc/binary, $u >>); + $V -> Function(Rest, << Acc/binary, $v >>); + $W -> Function(Rest, << Acc/binary, $w >>); + $X -> Function(Rest, << Acc/binary, $x >>); + $Y -> Function(Rest, << Acc/binary, $y >>); + $Z -> Function(Rest, << Acc/binary, $z >>); + C -> Function(Rest, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, Acc), case C of + $A -> Function(Rest, A0, << Acc/binary, $a >>); + $B -> Function(Rest, A0, << Acc/binary, $b >>); + $C -> Function(Rest, A0, << Acc/binary, $c >>); + $D -> Function(Rest, A0, << Acc/binary, $d >>); + $E -> Function(Rest, A0, << Acc/binary, $e >>); + $F -> Function(Rest, A0, << Acc/binary, $f >>); + $G -> Function(Rest, A0, << Acc/binary, $g >>); + $H -> Function(Rest, A0, << Acc/binary, $h >>); + $I -> Function(Rest, A0, << Acc/binary, $i >>); + $J -> Function(Rest, A0, << Acc/binary, $j >>); + $K -> Function(Rest, A0, << Acc/binary, $k >>); + $L -> Function(Rest, A0, << Acc/binary, $l >>); + $M -> Function(Rest, A0, << Acc/binary, $m >>); + $N -> Function(Rest, A0, << Acc/binary, $n >>); + $O -> Function(Rest, A0, << Acc/binary, $o >>); + $P -> Function(Rest, A0, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, << Acc/binary, $q >>); + $R -> Function(Rest, A0, << Acc/binary, $r >>); + $S -> Function(Rest, A0, << Acc/binary, $s >>); + $T -> Function(Rest, A0, << Acc/binary, $t >>); + $U -> Function(Rest, A0, << Acc/binary, $u >>); + $V -> Function(Rest, A0, << Acc/binary, $v >>); + $W -> Function(Rest, A0, << Acc/binary, $w >>); + $X -> Function(Rest, A0, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, << Acc/binary, $z >>); + C -> Function(Rest, A0, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, Acc), case C of + $A -> Function(Rest, A0, A1, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, Acc), case C of + $A -> Function(Rest, A0, A1, A2, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, A3, Acc), case C of + $A -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, A3, A4, Acc), case C of + $A -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, A3, A4, A5, Acc), case C of + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, A3, A4, A5, A6, Acc), case C of + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, Acc), case C of + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, Acc), case C of + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, C >>) +end). + +-define(LOWER(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, Acc), case C of + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, C >>) +end). + +%% HEX(C) + +-define(HEX(C), (?HEXHL(C bsr 4)), (?HEXHL(C band 16#0f))). + +-define(HEXHL(HL), + case HL of + 0 -> $0; + 1 -> $1; + 2 -> $2; + 3 -> $3; + 4 -> $4; + 5 -> $5; + 6 -> $6; + 7 -> $7; + 8 -> $8; + 9 -> $9; + 10 -> $A; + 11 -> $B; + 12 -> $C; + 13 -> $D; + 14 -> $E; + 15 -> $F + end +). + +%% UNHEX(H, L) + +-define(UNHEX(H, L), (?UNHEX(H) bsl 4 bor ?UNHEX(L))). + +-define(UNHEX(C), + case C of + $0 -> 0; + $1 -> 1; + $2 -> 2; + $3 -> 3; + $4 -> 4; + $5 -> 5; + $6 -> 6; + $7 -> 7; + $8 -> 8; + $9 -> 9; + $A -> 10; + $B -> 11; + $C -> 12; + $D -> 13; + $E -> 14; + $F -> 15; + $a -> 10; + $b -> 11; + $c -> 12; + $d -> 13; + $e -> 14; + $f -> 15 + end +). + +-endif. diff --git a/include/cow_parse.hrl b/include/cow_parse.hrl new file mode 100644 index 0000000..ee4af70 --- /dev/null +++ b/include/cow_parse.hrl @@ -0,0 +1,83 @@ +%% Copyright (c) 2015-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-ifndef(COW_PARSE_HRL). +-define(COW_PARSE_HRL, 1). + +-define(IS_ALPHA(C), + (C =:= $a) or (C =:= $b) or (C =:= $c) or (C =:= $d) or (C =:= $e) or + (C =:= $f) or (C =:= $g) or (C =:= $h) or (C =:= $i) or (C =:= $j) or + (C =:= $k) or (C =:= $l) or (C =:= $m) or (C =:= $n) or (C =:= $o) or + (C =:= $p) or (C =:= $q) or (C =:= $r) or (C =:= $s) or (C =:= $t) or + (C =:= $u) or (C =:= $v) or (C =:= $w) or (C =:= $x) or (C =:= $y) or + (C =:= $z) or + (C =:= $A) or (C =:= $B) or (C =:= $C) or (C =:= $D) or (C =:= $E) or + (C =:= $F) or (C =:= $G) or (C =:= $H) or (C =:= $I) or (C =:= $J) or + (C =:= $K) or (C =:= $L) or (C =:= $M) or (C =:= $N) or (C =:= $O) or + (C =:= $P) or (C =:= $Q) or (C =:= $R) or (C =:= $S) or (C =:= $T) or + (C =:= $U) or (C =:= $V) or (C =:= $W) or (C =:= $X) or (C =:= $Y) or + (C =:= $Z) +). + +-define(IS_ALPHANUM(C), ?IS_ALPHA(C) or ?IS_DIGIT(C)). +-define(IS_CHAR(C), C > 0, C < 128). + +-define(IS_DIGIT(C), + (C =:= $0) or (C =:= $1) or (C =:= $2) or (C =:= $3) or (C =:= $4) or + (C =:= $5) or (C =:= $6) or (C =:= $7) or (C =:= $8) or (C =:= $9)). + +-define(IS_ETAGC(C), C =:= 16#21; C >= 16#23, C =/= 16#7f). + +-define(IS_HEX(C), + ?IS_DIGIT(C) or + (C =:= $a) or (C =:= $b) or (C =:= $c) or + (C =:= $d) or (C =:= $e) or (C =:= $f) or + (C =:= $A) or (C =:= $B) or (C =:= $C) or + (C =:= $D) or (C =:= $E) or (C =:= $F)). + +-define(IS_LHEX(C), + ?IS_DIGIT(C) or + (C =:= $a) or (C =:= $b) or (C =:= $c) or + (C =:= $d) or (C =:= $e) or (C =:= $f)). + +-define(IS_TOKEN(C), + ?IS_ALPHA(C) or ?IS_DIGIT(C) or + (C =:= $!) or (C =:= $#) or (C =:= $$) or (C =:= $%) or (C =:= $&) or + (C =:= $') or (C =:= $*) or (C =:= $+) or (C =:= $-) or (C =:= $.) or + (C =:= $^) or (C =:= $_) or (C =:= $`) or (C =:= $|) or (C =:= $~)). + +-define(IS_TOKEN68(C), + ?IS_ALPHA(C) or ?IS_DIGIT(C) or + (C =:= $-) or (C =:= $.) or (C =:= $_) or + (C =:= $~) or (C =:= $+) or (C =:= $/)). + +-define(IS_URI_UNRESERVED(C), + ?IS_ALPHA(C) or ?IS_DIGIT(C) or + (C =:= $-) or (C =:= $.) or (C =:= $_) or (C =:= $~)). + +-define(IS_URI_GEN_DELIMS(C), + (C =:= $:) or (C =:= $/) or (C =:= $?) or (C =:= $#) or + (C =:= $[) or (C =:= $]) or (C =:= $@)). + +-define(IS_URI_SUB_DELIMS(C), + (C =:= $!) or (C =:= $$) or (C =:= $&) or (C =:= $') or + (C =:= $() or (C =:= $)) or (C =:= $*) or (C =:= $+) or + (C =:= $,) or (C =:= $;) or (C =:= $=)). + +-define(IS_VCHAR(C), C =:= $\t; C > 31, C < 127). +-define(IS_VCHAR_OBS(C), C =:= $\t; C > 31, C =/= 127). +-define(IS_WS(C), (C =:= $\s) or (C =:= $\t)). +-define(IS_WS_COMMA(C), ?IS_WS(C) or (C =:= $,)). + +-endif. diff --git a/rebar.config b/rebar.config new file mode 100644 index 0000000..5212a84 --- /dev/null +++ b/rebar.config @@ -0,0 +1,21 @@ +{erl_opts, [ + debug_info, + verbose, + warn_export_vars, + warn_shadow_vars, + warn_obsolete_guard, + warn_missing_spec, + warn_untyped_record +]}. + +{deps, [ + {eFmt, ".*", {git, "http://sismaker.tpddns.cn:53000/SisMaker/eFmt.git", {branch, "master"}}}, + {eGbh, ".*", {git, "http://sismaker.tpddns.cn:53000/SisMaker/eGbh.git", {branch, "master"}}}, + {eSync, ".*", {git, "http://sismaker.tpddns.cn:53000/SisMaker/eSync.git", {branch, "master"}}}, + {jiffy, ".*", {git, "http://sismaker.tpddns.cn:53000/SisMaker/jiffy.git", {branch, "master"}}} +]}. + +{shell, [ + % {config, "config/sys.config"}, + {apps, [eWSrv]} +]}. diff --git a/src/eWSrv.app.src b/src/eWSrv.app.src new file mode 100644 index 0000000..f8bccc6 --- /dev/null +++ b/src/eWSrv.app.src @@ -0,0 +1,11 @@ +{application, eWSrv, + [{description, "erlang web server"}, + {vsn, "0.1.0"}, + {registered, []}, + {mod, {eWSrv_app, []}}, + {applications, [kernel, stdlib, crypto, ssl]}, + {env, []}, + {modules, []}, + {licenses, ["MIT"]}, + {links, []} + ]}. diff --git a/src/eWSrv_app.erl b/src/eWSrv_app.erl new file mode 100644 index 0000000..230bdc4 --- /dev/null +++ b/src/eWSrv_app.erl @@ -0,0 +1,11 @@ +-module(eWSrv_app). + +-behaviour(application). + +-export([start/2, stop/1]). + +start(_StartType, _StartArgs) -> + eWSrv_sup:start_link(). + +stop(_State) -> + ok. diff --git a/src/eWSrv_sup.erl b/src/eWSrv_sup.erl new file mode 100644 index 0000000..69f3140 --- /dev/null +++ b/src/eWSrv_sup.erl @@ -0,0 +1,29 @@ +-module(eWSrv_sup). + +-behaviour(supervisor). + +-export([start_link/0]). + +-export([init/1]). + +-define(SERVER, ?MODULE). + +start_link() -> + supervisor:start_link({local, ?SERVER}, ?MODULE, []). + +%% sup_flags() = #{strategy => strategy(), % optional +%% intensity => non_neg_integer(), % optional +%% period => pos_integer()} % optional +%% child_spec() = #{id => child_id(), % mandatory +%% start => mfargs(), % mandatory +%% restart => restart(), % optional +%% shutdown => shutdown(), % optional +%% type => worker(), % optional +%% modules => modules()} % optional +init([]) -> + SupFlags = #{strategy => one_for_all, + intensity => 0, + period => 1}, + ChildSpecs = [], + {ok, {SupFlags, ChildSpecs}}. + diff --git a/src/wsLib/cow_base64url.erl b/src/wsLib/cow_base64url.erl new file mode 100644 index 0000000..17ec46c --- /dev/null +++ b/src/wsLib/cow_base64url.erl @@ -0,0 +1,81 @@ +%% Copyright (c) 2017-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% This module implements "base64url" following the algorithm +%% found in Appendix C of RFC7515. The option #{padding => false} +%% must be given to reproduce this variant exactly. The default +%% will leave the padding characters. +-module(cow_base64url). + +-export([decode/1]). +-export([decode/2]). +-export([encode/1]). +-export([encode/2]). + +-ifdef(TEST). +-include_lib("proper/include/proper.hrl"). +-endif. + +decode(Enc) -> + decode(Enc, #{}). + +decode(Enc0, Opts) -> + Enc1 = << << case C of + $- -> $+; + $_ -> $/; + _ -> C + end >> || << C >> <= Enc0 >>, + Enc = case Opts of + #{padding := false} -> + case byte_size(Enc1) rem 4 of + 0 -> Enc1; + 2 -> << Enc1/binary, "==" >>; + 3 -> << Enc1/binary, "=" >> + end; + _ -> + Enc1 + end, + base64:decode(Enc). + +encode(Dec) -> + encode(Dec, #{}). + +encode(Dec, Opts) -> + encode(base64:encode(Dec), Opts, <<>>). + +encode(<<$+, R/bits>>, Opts, Acc) -> encode(R, Opts, <>); +encode(<<$/, R/bits>>, Opts, Acc) -> encode(R, Opts, <>); +encode(<<$=, _/bits>>, #{padding := false}, Acc) -> Acc; +encode(<>, Opts, Acc) -> encode(R, Opts, <>); +encode(<<>>, _, Acc) -> Acc. + +-ifdef(TEST). + +rfc7515_test() -> + Dec = <<3,236,255,224,193>>, + Enc = <<"A-z_4ME">>, + Pad = <<"A-z_4ME=">>, + Dec = decode(<>), + Dec = decode(Enc, #{padding => false}), + Pad = encode(Dec), + Enc = encode(Dec, #{padding => false}), + ok. + +prop_identity() -> + ?FORALL(B, binary(), B =:= decode(encode(B))). + +prop_identity_no_padding() -> + ?FORALL(B, binary(), B =:= decode(encode(B, #{padding => false}), #{padding => false})). + +-endif. diff --git a/src/wsLib/cow_cookie.erl b/src/wsLib/cow_cookie.erl new file mode 100644 index 0000000..93a8e61 --- /dev/null +++ b/src/wsLib/cow_cookie.erl @@ -0,0 +1,428 @@ +%% Copyright (c) 2013-2020, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_cookie). + +-export([parse_cookie/1]). +-export([parse_set_cookie/1]). +-export([cookie/1]). +-export([setcookie/3]). + +-type cookie_attrs() :: #{ + expires => calendar:datetime(), + max_age => calendar:datetime(), + domain => binary(), + path => binary(), + secure => true, + http_only => true, + same_site => strict | lax | none +}. +-export_type([cookie_attrs/0]). + +-type cookie_opts() :: #{ + domain => binary(), + http_only => boolean(), + max_age => non_neg_integer(), + path => binary(), + same_site => strict | lax | none, + secure => boolean() +}. +-export_type([cookie_opts/0]). + +-include("cow_inline.hrl"). + +%% Cookie header. + +-spec parse_cookie(binary()) -> [{binary(), binary()}]. +parse_cookie(Cookie) -> + parse_cookie(Cookie, []). + +parse_cookie(<<>>, Acc) -> + lists:reverse(Acc); +parse_cookie(<< $\s, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(<< $\t, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(<< $,, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(<< $;, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(Cookie, Acc) -> + parse_cookie_name(Cookie, Acc, <<>>). + +parse_cookie_name(<<>>, Acc, Name) -> + lists:reverse([{<<>>, parse_cookie_trim(Name)}|Acc]); +parse_cookie_name(<< $=, _/binary >>, _, <<>>) -> + error(badarg); +parse_cookie_name(<< $=, Rest/binary >>, Acc, Name) -> + parse_cookie_value(Rest, Acc, Name, <<>>); +parse_cookie_name(<< $,, _/binary >>, _, _) -> + error(badarg); +parse_cookie_name(<< $;, Rest/binary >>, Acc, Name) -> + parse_cookie(Rest, [{<<>>, parse_cookie_trim(Name)}|Acc]); +parse_cookie_name(<< $\t, _/binary >>, _, _) -> + error(badarg); +parse_cookie_name(<< $\r, _/binary >>, _, _) -> + error(badarg); +parse_cookie_name(<< $\n, _/binary >>, _, _) -> + error(badarg); +parse_cookie_name(<< $\013, _/binary >>, _, _) -> + error(badarg); +parse_cookie_name(<< $\014, _/binary >>, _, _) -> + error(badarg); +parse_cookie_name(<< C, Rest/binary >>, Acc, Name) -> + parse_cookie_name(Rest, Acc, << Name/binary, C >>). + +parse_cookie_value(<<>>, Acc, Name, Value) -> + lists:reverse([{Name, parse_cookie_trim(Value)}|Acc]); +parse_cookie_value(<< $;, Rest/binary >>, Acc, Name, Value) -> + parse_cookie(Rest, [{Name, parse_cookie_trim(Value)}|Acc]); +parse_cookie_value(<< $\t, _/binary >>, _, _, _) -> + error(badarg); +parse_cookie_value(<< $\r, _/binary >>, _, _, _) -> + error(badarg); +parse_cookie_value(<< $\n, _/binary >>, _, _, _) -> + error(badarg); +parse_cookie_value(<< $\013, _/binary >>, _, _, _) -> + error(badarg); +parse_cookie_value(<< $\014, _/binary >>, _, _, _) -> + error(badarg); +parse_cookie_value(<< C, Rest/binary >>, Acc, Name, Value) -> + parse_cookie_value(Rest, Acc, Name, << Value/binary, C >>). + +parse_cookie_trim(Value = <<>>) -> + Value; +parse_cookie_trim(Value) -> + case binary:last(Value) of + $\s -> + Size = byte_size(Value) - 1, + << Value2:Size/binary, _ >> = Value, + parse_cookie_trim(Value2); + _ -> + Value + end. + +-ifdef(TEST). +parse_cookie_test_() -> + %% {Value, Result}. + Tests = [ + {<<"name=value; name2=value2">>, [ + {<<"name">>, <<"value">>}, + {<<"name2">>, <<"value2">>} + ]}, + %% Space in value. + {<<"foo=Thu Jul 11 2013 15:38:43 GMT+0400 (MSK)">>, + [{<<"foo">>, <<"Thu Jul 11 2013 15:38:43 GMT+0400 (MSK)">>}]}, + %% Comma in value. Google Analytics sets that kind of cookies. + {<<"refk=sOUZDzq2w2; sk=B602064E0139D842D620C7569640DBB4C81C45080651" + "9CC124EF794863E10E80; __utma=64249653.825741573.1380181332.1400" + "015657.1400019557.703; __utmb=64249653.1.10.1400019557; __utmc=" + "64249653; __utmz=64249653.1400019557.703.13.utmcsr=bluesky.chic" + "agotribune.com|utmccn=(referral)|utmcmd=referral|utmcct=/origin" + "als/chi-12-indispensable-digital-tools-bsi,0,0.storygallery">>, [ + {<<"refk">>, <<"sOUZDzq2w2">>}, + {<<"sk">>, <<"B602064E0139D842D620C7569640DBB4C81C45080651" + "9CC124EF794863E10E80">>}, + {<<"__utma">>, <<"64249653.825741573.1380181332.1400" + "015657.1400019557.703">>}, + {<<"__utmb">>, <<"64249653.1.10.1400019557">>}, + {<<"__utmc">>, <<"64249653">>}, + {<<"__utmz">>, <<"64249653.1400019557.703.13.utmcsr=bluesky.chic" + "agotribune.com|utmccn=(referral)|utmcmd=referral|utmcct=/origin" + "als/chi-12-indispensable-digital-tools-bsi,0,0.storygallery">>} + ]}, + %% Potential edge cases (initially from Mochiweb). + {<<"foo=\\x">>, [{<<"foo">>, <<"\\x">>}]}, + {<<"foo=;bar=">>, [{<<"foo">>, <<>>}, {<<"bar">>, <<>>}]}, + {<<"foo=\\\";;bar=good ">>, + [{<<"foo">>, <<"\\\"">>}, {<<"bar">>, <<"good">>}]}, + {<<"foo=\"\\\";bar=good">>, + [{<<"foo">>, <<"\"\\\"">>}, {<<"bar">>, <<"good">>}]}, + {<<>>, []}, %% Flash player. + {<<"foo=bar , baz=wibble ">>, [{<<"foo">>, <<"bar , baz=wibble">>}]}, + %% Technically invalid, but seen in the wild + {<<"foo">>, [{<<>>, <<"foo">>}]}, + {<<"foo ">>, [{<<>>, <<"foo">>}]}, + {<<"foo;">>, [{<<>>, <<"foo">>}]}, + {<<"bar;foo=1">>, [{<<>>, <<"bar">>}, {<<"foo">>, <<"1">>}]} + ], + [{V, fun() -> R = parse_cookie(V) end} || {V, R} <- Tests]. + +parse_cookie_error_test_() -> + %% Value. + Tests = [ + <<"=">> + ], + [{V, fun() -> {'EXIT', {badarg, _}} = (catch parse_cookie(V)) end} || V <- Tests]. +-endif. + +%% Set-Cookie header. + +-spec parse_set_cookie(binary()) + -> {ok, binary(), binary(), cookie_attrs()} + | ignore. +parse_set_cookie(SetCookie) -> + {NameValuePair, UnparsedAttrs} = take_until_semicolon(SetCookie, <<>>), + {Name, Value} = case binary:split(NameValuePair, <<$=>>) of + [Value0] -> {<<>>, trim(Value0)}; + [Name0, Value0] -> {trim(Name0), trim(Value0)} + end, + case {Name, Value} of + {<<>>, <<>>} -> + ignore; + _ -> + Attrs = parse_set_cookie_attrs(UnparsedAttrs, #{}), + {ok, Name, Value, Attrs} + end. + +parse_set_cookie_attrs(<<>>, Attrs) -> + Attrs; +parse_set_cookie_attrs(<<$;,Rest0/bits>>, Attrs) -> + {Av, Rest} = take_until_semicolon(Rest0, <<>>), + {Name, Value} = case binary:split(Av, <<$=>>) of + [Name0] -> {trim(Name0), <<>>}; + [Name0, Value0] -> {trim(Name0), trim(Value0)} + end, + case parse_set_cookie_attr(?LOWER(Name), Value) of + {ok, AttrName, AttrValue} -> + parse_set_cookie_attrs(Rest, Attrs#{AttrName => AttrValue}); + {ignore, AttrName} -> + parse_set_cookie_attrs(Rest, maps:remove(AttrName, Attrs)); + ignore -> + parse_set_cookie_attrs(Rest, Attrs) + end. + +take_until_semicolon(Rest = <<$;,_/bits>>, Acc) -> {Acc, Rest}; +take_until_semicolon(<>, Acc) -> take_until_semicolon(R, <>); +take_until_semicolon(<<>>, Acc) -> {Acc, <<>>}. + +trim(String) -> + string:trim(String, both, [$\s, $\t]). + +parse_set_cookie_attr(<<"expires">>, Value) -> + try cow_date:parse_date(Value) of + DateTime -> + {ok, expires, DateTime} + catch _:_ -> + ignore + end; +parse_set_cookie_attr(<<"max-age">>, Value) -> + try binary_to_integer(Value) of + MaxAge when MaxAge =< 0 -> + %% Year 0 corresponds to 1 BC. + {ok, max_age, {{0, 1, 1}, {0, 0, 0}}}; + MaxAge -> + CurrentTime = erlang:universaltime(), + {ok, max_age, calendar:gregorian_seconds_to_datetime( + calendar:datetime_to_gregorian_seconds(CurrentTime) + MaxAge)} + catch _:_ -> + ignore + end; +parse_set_cookie_attr(<<"domain">>, Value) -> + case Value of + <<>> -> + ignore; + <<".",Rest/bits>> -> + {ok, domain, ?LOWER(Rest)}; + _ -> + {ok, domain, ?LOWER(Value)} + end; +parse_set_cookie_attr(<<"path">>, Value) -> + case Value of + <<"/",_/bits>> -> + {ok, path, Value}; + %% When the path is not absolute, or the path is empty, the default-path will be used. + %% Note that the default-path is also used when there are no path attributes, + %% so we are simply ignoring the attribute here. + _ -> + {ignore, path} + end; +parse_set_cookie_attr(<<"secure">>, _) -> + {ok, secure, true}; +parse_set_cookie_attr(<<"httponly">>, _) -> + {ok, http_only, true}; +parse_set_cookie_attr(<<"samesite">>, Value) -> + case ?LOWER(Value) of + <<"strict">> -> + {ok, same_site, strict}; + <<"lax">> -> + {ok, same_site, lax}; + %% Clients may have different defaults than "None". + <<"none">> -> + {ok, same_site, none}; + %% Unknown values and lack of value are equivalent. + _ -> + ignore + end; +parse_set_cookie_attr(_, _) -> + ignore. + +-ifdef(TEST). +parse_set_cookie_test_() -> + Tests = [ + {<<"a=b">>, {ok, <<"a">>, <<"b">>, #{}}}, + {<<"a=b; Secure">>, {ok, <<"a">>, <<"b">>, #{secure => true}}}, + {<<"a=b; HttpOnly">>, {ok, <<"a">>, <<"b">>, #{http_only => true}}}, + {<<"a=b; Expires=Wed, 21 Oct 2015 07:28:00 GMT; Expires=Wed, 21 Oct 2015 07:29:00 GMT">>, + {ok, <<"a">>, <<"b">>, #{expires => {{2015,10,21},{7,29,0}}}}}, + {<<"a=b; Max-Age=999; Max-Age=0">>, + {ok, <<"a">>, <<"b">>, #{max_age => {{0,1,1},{0,0,0}}}}}, + {<<"a=b; Domain=example.org; Domain=foo.example.org">>, + {ok, <<"a">>, <<"b">>, #{domain => <<"foo.example.org">>}}}, + {<<"a=b; Path=/path/to/resource; Path=/">>, + {ok, <<"a">>, <<"b">>, #{path => <<"/">>}}}, + {<<"a=b; SameSite=Lax; SameSite=Strict">>, + {ok, <<"a">>, <<"b">>, #{same_site => strict}}} + ], + [{SetCookie, fun() -> Res = parse_set_cookie(SetCookie) end} + || {SetCookie, Res} <- Tests]. +-endif. + +%% Build a cookie header. + +-spec cookie([{iodata(), iodata()}]) -> iolist(). +cookie([]) -> + []; +cookie([{<<>>, Value}]) -> + [Value]; +cookie([{Name, Value}]) -> + [Name, $=, Value]; +cookie([{<<>>, Value}|Tail]) -> + [Value, $;, $\s|cookie(Tail)]; +cookie([{Name, Value}|Tail]) -> + [Name, $=, Value, $;, $\s|cookie(Tail)]. + +-ifdef(TEST). +cookie_test_() -> + Tests = [ + {[], <<>>}, + {[{<<"a">>, <<"b">>}], <<"a=b">>}, + {[{<<"a">>, <<"b">>}, {<<"c">>, <<"d">>}], <<"a=b; c=d">>}, + {[{<<>>, <<"b">>}, {<<"c">>, <<"d">>}], <<"b; c=d">>}, + {[{<<"a">>, <<"b">>}, {<<>>, <<"d">>}], <<"a=b; d">>} + ], + [{Res, fun() -> Res = iolist_to_binary(cookie(Cookies)) end} + || {Cookies, Res} <- Tests]. +-endif. + +%% Convert a cookie name, value and options to its iodata form. +%% +%% Initially from Mochiweb: +%% * Copyright 2007 Mochi Media, Inc. +%% Initial binary implementation: +%% * Copyright 2011 Thomas Burdick +%% +%% @todo Rename the function to set_cookie eventually. + +-spec setcookie(iodata(), iodata(), cookie_opts()) -> iolist(). +setcookie(Name, Value, Opts) -> + nomatch = binary:match(iolist_to_binary(Name), [<<$=>>, <<$,>>, <<$;>>, + <<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]), + nomatch = binary:match(iolist_to_binary(Value), [<<$,>>, <<$;>>, + <<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]), + [Name, <<"=">>, Value, <<"; Version=1">>, attributes(maps:to_list(Opts))]. + +attributes([]) -> []; +attributes([{domain, Domain}|Tail]) -> [<<"; Domain=">>, Domain|attributes(Tail)]; +attributes([{http_only, false}|Tail]) -> attributes(Tail); +attributes([{http_only, true}|Tail]) -> [<<"; HttpOnly">>|attributes(Tail)]; +%% MSIE requires an Expires date in the past to delete a cookie. +attributes([{max_age, 0}|Tail]) -> + [<<"; Expires=Thu, 01-Jan-1970 00:00:01 GMT; Max-Age=0">>|attributes(Tail)]; +attributes([{max_age, MaxAge}|Tail]) when is_integer(MaxAge), MaxAge > 0 -> + Secs = calendar:datetime_to_gregorian_seconds(calendar:universal_time()), + Expires = cow_date:rfc2109(calendar:gregorian_seconds_to_datetime(Secs + MaxAge)), + [<<"; Expires=">>, Expires, <<"; Max-Age=">>, integer_to_list(MaxAge)|attributes(Tail)]; +attributes([Opt={max_age, _}|_]) -> + error({badarg, Opt}); +attributes([{path, Path}|Tail]) -> [<<"; Path=">>, Path|attributes(Tail)]; +attributes([{secure, false}|Tail]) -> attributes(Tail); +attributes([{secure, true}|Tail]) -> [<<"; Secure">>|attributes(Tail)]; +attributes([{same_site, lax}|Tail]) -> [<<"; SameSite=Lax">>|attributes(Tail)]; +attributes([{same_site, strict}|Tail]) -> [<<"; SameSite=Strict">>|attributes(Tail)]; +attributes([{same_site, none}|Tail]) -> [<<"; SameSite=None">>|attributes(Tail)]; +%% Skip unknown options. +attributes([_|Tail]) -> attributes(Tail). + +-ifdef(TEST). +setcookie_test_() -> + %% {Name, Value, Opts, Result} + Tests = [ + {<<"Customer">>, <<"WILE_E_COYOTE">>, + #{http_only => true, domain => <<"acme.com">>}, + <<"Customer=WILE_E_COYOTE; Version=1; " + "Domain=acme.com; HttpOnly">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + #{path => <<"/acme">>}, + <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + #{secure => true}, + <<"Customer=WILE_E_COYOTE; Version=1; Secure">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + #{secure => false, http_only => false}, + <<"Customer=WILE_E_COYOTE; Version=1">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + #{same_site => lax}, + <<"Customer=WILE_E_COYOTE; Version=1; SameSite=Lax">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + #{same_site => strict}, + <<"Customer=WILE_E_COYOTE; Version=1; SameSite=Strict">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + #{path => <<"/acme">>, badoption => <<"negatory">>}, + <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>} + ], + [{R, fun() -> R = iolist_to_binary(setcookie(N, V, O)) end} + || {N, V, O, R} <- Tests]. + +setcookie_max_age_test() -> + F = fun(N, V, O) -> + binary:split(iolist_to_binary( + setcookie(N, V, O)), <<";">>, [global]) + end, + [<<"Customer=WILE_E_COYOTE">>, + <<" Version=1">>, + <<" Expires=", _/binary>>, + <<" Max-Age=111">>, + <<" Secure">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>, + #{max_age => 111, secure => true}), + case catch F(<<"Customer">>, <<"WILE_E_COYOTE">>, #{max_age => -111}) of + {'EXIT', {{badarg, {max_age, -111}}, _}} -> ok + end, + [<<"Customer=WILE_E_COYOTE">>, + <<" Version=1">>, + <<" Expires=", _/binary>>, + <<" Max-Age=86417">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>, + #{max_age => 86417}), + ok. + +setcookie_failures_test_() -> + F = fun(N, V) -> + try setcookie(N, V, #{}) of + _ -> + false + catch _:_ -> + true + end + end, + Tests = [ + {<<"Na=me">>, <<"Value">>}, + {<<"Name;">>, <<"Value">>}, + {<<"\r\name">>, <<"Value">>}, + {<<"Name">>, <<"Value;">>}, + {<<"Name">>, <<"\value">>} + ], + [{iolist_to_binary(io_lib:format("{~p, ~p} failure", [N, V])), + fun() -> true = F(N, V) end} + || {N, V} <- Tests]. +-endif. diff --git a/src/wsLib/cow_date.erl b/src/wsLib/cow_date.erl new file mode 100644 index 0000000..36ce861 --- /dev/null +++ b/src/wsLib/cow_date.erl @@ -0,0 +1,434 @@ +%% Copyright (c) 2013-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_date). + +-export([parse_date/1]). +-export([rfc1123/1]). +-export([rfc2109/1]). +-export([rfc7231/1]). + +-ifdef(TEST). +-include_lib("proper/include/proper.hrl"). +-endif. + +%% @doc Parse the HTTP date (IMF-fixdate, rfc850, asctime). + +-define(DIGITS(A, B), ((A - $0) * 10 + (B - $0))). +-define(DIGITS(A, B, C, D), ((A - $0) * 1000 + (B - $0) * 100 + (C - $0) * 10 + (D - $0))). + +-spec parse_date(binary()) -> calendar:datetime(). +parse_date(DateBin) -> + Date = {{_, _, D}, {H, M, S}} = http_date(DateBin), + true = D >= 0 andalso D =< 31, + true = H >= 0 andalso H =< 23, + true = M >= 0 andalso M =< 59, + true = S >= 0 andalso S =< 60, %% Leap second. + Date. + +http_date(<<"Mon, ", D1, D2, " ", R/bits >>) -> fixdate(R, ?DIGITS(D1, D2)); +http_date(<<"Tue, ", D1, D2, " ", R/bits >>) -> fixdate(R, ?DIGITS(D1, D2)); +http_date(<<"Wed, ", D1, D2, " ", R/bits >>) -> fixdate(R, ?DIGITS(D1, D2)); +http_date(<<"Thu, ", D1, D2, " ", R/bits >>) -> fixdate(R, ?DIGITS(D1, D2)); +http_date(<<"Fri, ", D1, D2, " ", R/bits >>) -> fixdate(R, ?DIGITS(D1, D2)); +http_date(<<"Sat, ", D1, D2, " ", R/bits >>) -> fixdate(R, ?DIGITS(D1, D2)); +http_date(<<"Sun, ", D1, D2, " ", R/bits >>) -> fixdate(R, ?DIGITS(D1, D2)); +http_date(<<"Monday, ", D1, D2, "-", R/bits >>) -> rfc850_date(R, ?DIGITS(D1, D2)); +http_date(<<"Tuesday, ", D1, D2, "-", R/bits >>) -> rfc850_date(R, ?DIGITS(D1, D2)); +http_date(<<"Wednesday, ", D1, D2, "-", R/bits >>) -> rfc850_date(R, ?DIGITS(D1, D2)); +http_date(<<"Thursday, ", D1, D2, "-", R/bits >>) -> rfc850_date(R, ?DIGITS(D1, D2)); +http_date(<<"Friday, ", D1, D2, "-", R/bits >>) -> rfc850_date(R, ?DIGITS(D1, D2)); +http_date(<<"Saturday, ", D1, D2, "-", R/bits >>) -> rfc850_date(R, ?DIGITS(D1, D2)); +http_date(<<"Sunday, ", D1, D2, "-", R/bits >>) -> rfc850_date(R, ?DIGITS(D1, D2)); +http_date(<<"Mon ", R/bits >>) -> asctime_date(R); +http_date(<<"Tue ", R/bits >>) -> asctime_date(R); +http_date(<<"Wed ", R/bits >>) -> asctime_date(R); +http_date(<<"Thu ", R/bits >>) -> asctime_date(R); +http_date(<<"Fri ", R/bits >>) -> asctime_date(R); +http_date(<<"Sat ", R/bits >>) -> asctime_date(R); +http_date(<<"Sun ", R/bits >>) -> asctime_date(R). + +fixdate(<<"Jan ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 1, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Feb ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 2, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Mar ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 3, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Apr ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 4, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"May ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 5, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Jun ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 6, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Jul ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 7, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Aug ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 8, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Sep ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 9, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Oct ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 10, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Nov ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 11, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +fixdate(<<"Dec ", Y1, Y2, Y3, Y4, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 12, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}. + +rfc850_date(<<"Jan-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 1, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Feb-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 2, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Mar-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 3, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Apr-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 4, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"May-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 5, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Jun-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 6, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Jul-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 7, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Aug-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 8, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Sep-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 9, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Oct-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 10, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Nov-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 11, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +rfc850_date(<<"Dec-", Y1, Y2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " GMT">>, Day) -> + {{rfc850_year(?DIGITS(Y1, Y2)), 12, Day}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}. + +rfc850_year(Y) when Y > 50 -> Y + 1900; +rfc850_year(Y) -> Y + 2000. + +asctime_date(<<"Jan ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 1, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Feb ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 2, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Mar ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 3, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Apr ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 4, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"May ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 5, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Jun ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 6, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Jul ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 7, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Aug ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 8, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Sep ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 9, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Oct ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 10, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Nov ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 11, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}; +asctime_date(<<"Dec ", D1, D2, " ", H1, H2, ":", M1, M2, ":", S1, S2, " ", Y1, Y2, Y3, Y4 >>) -> + {{?DIGITS(Y1, Y2, Y3, Y4), 12, asctime_day(D1, D2)}, {?DIGITS(H1, H2), ?DIGITS(M1, M2), ?DIGITS(S1, S2)}}. + +asctime_day($\s, D2) -> (D2 - $0); +asctime_day(D1, D2) -> (D1 - $0) * 10 + (D2 - $0). + +-ifdef(TEST). +day_name() -> oneof(["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]). +day_name_l() -> oneof(["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]). +year() -> integer(1951, 2050). +month() -> integer(1, 12). +day() -> integer(1, 31). +hour() -> integer(0, 23). +minute() -> integer(0, 59). +second() -> integer(0, 60). + +fixdate_gen() -> + ?LET({DayName, Y, Mo, D, H, Mi, S}, + {day_name(), year(), month(), day(), hour(), minute(), second()}, + {{{Y, Mo, D}, {H, Mi, S}}, + list_to_binary([DayName, ", ", pad_int(D), " ", month(Mo), " ", integer_to_binary(Y), + " ", pad_int(H), ":", pad_int(Mi), ":", pad_int(S), " GMT"])}). + +rfc850_gen() -> + ?LET({DayName, Y, Mo, D, H, Mi, S}, + {day_name_l(), year(), month(), day(), hour(), minute(), second()}, + {{{Y, Mo, D}, {H, Mi, S}}, + list_to_binary([DayName, ", ", pad_int(D), "-", month(Mo), "-", pad_int(Y rem 100), + " ", pad_int(H), ":", pad_int(Mi), ":", pad_int(S), " GMT"])}). + +asctime_gen() -> + ?LET({DayName, Y, Mo, D, H, Mi, S}, + {day_name(), year(), month(), day(), hour(), minute(), second()}, + {{{Y, Mo, D}, {H, Mi, S}}, + list_to_binary([DayName, " ", month(Mo), " ", + if D < 10 -> << $\s, (D + $0) >>; true -> integer_to_binary(D) end, + " ", pad_int(H), ":", pad_int(Mi), ":", pad_int(S), " ", integer_to_binary(Y)])}). + +prop_http_date() -> + ?FORALL({Date, DateBin}, + oneof([fixdate_gen(), rfc850_gen(), asctime_gen()]), + Date =:= parse_date(DateBin)). + +http_date_test_() -> + Tests = [ + {<<"Sun, 06 Nov 1994 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}}, + {<<"Sunday, 06-Nov-94 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}}, + {<<"Sun Nov 6 08:49:37 1994">>, {{1994, 11, 6}, {8, 49, 37}}} + ], + [{V, fun() -> R = http_date(V) end} || {V, R} <- Tests]. + +horse_http_date_fixdate() -> + horse:repeat(200000, + http_date(<<"Sun, 06 Nov 1994 08:49:37 GMT">>) + ). + +horse_http_date_rfc850() -> + horse:repeat(200000, + http_date(<<"Sunday, 06-Nov-94 08:49:37 GMT">>) + ). + +horse_http_date_asctime() -> + horse:repeat(200000, + http_date(<<"Sun Nov 6 08:49:37 1994">>) + ). +-endif. + +%% @doc Return the date formatted according to RFC1123. + +-spec rfc1123(calendar:datetime()) -> binary(). +rfc1123(DateTime) -> + rfc7231(DateTime). + +%% @doc Return the date formatted according to RFC2109. + +-spec rfc2109(calendar:datetime()) -> binary(). +rfc2109({Date = {Y, Mo, D}, {H, Mi, S}}) -> + Wday = calendar:day_of_the_week(Date), + << (weekday(Wday))/binary, ", ", + (pad_int(D))/binary, "-", + (month(Mo))/binary, "-", + (year(Y))/binary, " ", + (pad_int(H))/binary, ":", + (pad_int(Mi))/binary, ":", + (pad_int(S))/binary, " GMT" >>. + +-ifdef(TEST). +rfc2109_test_() -> + Tests = [ + {<<"Sat, 14-May-2011 14:25:33 GMT">>, {{2011, 5, 14}, {14, 25, 33}}}, + {<<"Sun, 01-Jan-2012 00:00:00 GMT">>, {{2012, 1, 1}, { 0, 0, 0}}} + ], + [{R, fun() -> R = rfc2109(D) end} || {R, D} <- Tests]. + +horse_rfc2109_20130101_000000() -> + horse:repeat(100000, + rfc2109({{2013, 1, 1}, {0, 0, 0}}) + ). + +horse_rfc2109_20131231_235959() -> + horse:repeat(100000, + rfc2109({{2013, 12, 31}, {23, 59, 59}}) + ). + +horse_rfc2109_12340506_070809() -> + horse:repeat(100000, + rfc2109({{1234, 5, 6}, {7, 8, 9}}) + ). +-endif. + +%% @doc Return the date formatted according to RFC7231. + +-spec rfc7231(calendar:datetime()) -> binary(). +rfc7231({Date = {Y, Mo, D}, {H, Mi, S}}) -> + Wday = calendar:day_of_the_week(Date), + << (weekday(Wday))/binary, ", ", + (pad_int(D))/binary, " ", + (month(Mo))/binary, " ", + (year(Y))/binary, " ", + (pad_int(H))/binary, ":", + (pad_int(Mi))/binary, ":", + (pad_int(S))/binary, " GMT" >>. + +-ifdef(TEST). +rfc7231_test_() -> + Tests = [ + {<<"Sat, 14 May 2011 14:25:33 GMT">>, {{2011, 5, 14}, {14, 25, 33}}}, + {<<"Sun, 01 Jan 2012 00:00:00 GMT">>, {{2012, 1, 1}, { 0, 0, 0}}} + ], + [{R, fun() -> R = rfc7231(D) end} || {R, D} <- Tests]. + +horse_rfc7231_20130101_000000() -> + horse:repeat(100000, + rfc7231({{2013, 1, 1}, {0, 0, 0}}) + ). + +horse_rfc7231_20131231_235959() -> + horse:repeat(100000, + rfc7231({{2013, 12, 31}, {23, 59, 59}}) + ). + +horse_rfc7231_12340506_070809() -> + horse:repeat(100000, + rfc7231({{1234, 5, 6}, {7, 8, 9}}) + ). +-endif. + +%% Internal. + +-spec pad_int(0..59) -> <<_:16>>. +pad_int( 0) -> <<"00">>; +pad_int( 1) -> <<"01">>; +pad_int( 2) -> <<"02">>; +pad_int( 3) -> <<"03">>; +pad_int( 4) -> <<"04">>; +pad_int( 5) -> <<"05">>; +pad_int( 6) -> <<"06">>; +pad_int( 7) -> <<"07">>; +pad_int( 8) -> <<"08">>; +pad_int( 9) -> <<"09">>; +pad_int(10) -> <<"10">>; +pad_int(11) -> <<"11">>; +pad_int(12) -> <<"12">>; +pad_int(13) -> <<"13">>; +pad_int(14) -> <<"14">>; +pad_int(15) -> <<"15">>; +pad_int(16) -> <<"16">>; +pad_int(17) -> <<"17">>; +pad_int(18) -> <<"18">>; +pad_int(19) -> <<"19">>; +pad_int(20) -> <<"20">>; +pad_int(21) -> <<"21">>; +pad_int(22) -> <<"22">>; +pad_int(23) -> <<"23">>; +pad_int(24) -> <<"24">>; +pad_int(25) -> <<"25">>; +pad_int(26) -> <<"26">>; +pad_int(27) -> <<"27">>; +pad_int(28) -> <<"28">>; +pad_int(29) -> <<"29">>; +pad_int(30) -> <<"30">>; +pad_int(31) -> <<"31">>; +pad_int(32) -> <<"32">>; +pad_int(33) -> <<"33">>; +pad_int(34) -> <<"34">>; +pad_int(35) -> <<"35">>; +pad_int(36) -> <<"36">>; +pad_int(37) -> <<"37">>; +pad_int(38) -> <<"38">>; +pad_int(39) -> <<"39">>; +pad_int(40) -> <<"40">>; +pad_int(41) -> <<"41">>; +pad_int(42) -> <<"42">>; +pad_int(43) -> <<"43">>; +pad_int(44) -> <<"44">>; +pad_int(45) -> <<"45">>; +pad_int(46) -> <<"46">>; +pad_int(47) -> <<"47">>; +pad_int(48) -> <<"48">>; +pad_int(49) -> <<"49">>; +pad_int(50) -> <<"50">>; +pad_int(51) -> <<"51">>; +pad_int(52) -> <<"52">>; +pad_int(53) -> <<"53">>; +pad_int(54) -> <<"54">>; +pad_int(55) -> <<"55">>; +pad_int(56) -> <<"56">>; +pad_int(57) -> <<"57">>; +pad_int(58) -> <<"58">>; +pad_int(59) -> <<"59">>; +pad_int(60) -> <<"60">>; +pad_int(Int) -> integer_to_binary(Int). + +-spec weekday(1..7) -> <<_:24>>. +weekday(1) -> <<"Mon">>; +weekday(2) -> <<"Tue">>; +weekday(3) -> <<"Wed">>; +weekday(4) -> <<"Thu">>; +weekday(5) -> <<"Fri">>; +weekday(6) -> <<"Sat">>; +weekday(7) -> <<"Sun">>. + +-spec month(1..12) -> <<_:24>>. +month( 1) -> <<"Jan">>; +month( 2) -> <<"Feb">>; +month( 3) -> <<"Mar">>; +month( 4) -> <<"Apr">>; +month( 5) -> <<"May">>; +month( 6) -> <<"Jun">>; +month( 7) -> <<"Jul">>; +month( 8) -> <<"Aug">>; +month( 9) -> <<"Sep">>; +month(10) -> <<"Oct">>; +month(11) -> <<"Nov">>; +month(12) -> <<"Dec">>. + +-spec year(pos_integer()) -> <<_:32>>. +year(1970) -> <<"1970">>; +year(1971) -> <<"1971">>; +year(1972) -> <<"1972">>; +year(1973) -> <<"1973">>; +year(1974) -> <<"1974">>; +year(1975) -> <<"1975">>; +year(1976) -> <<"1976">>; +year(1977) -> <<"1977">>; +year(1978) -> <<"1978">>; +year(1979) -> <<"1979">>; +year(1980) -> <<"1980">>; +year(1981) -> <<"1981">>; +year(1982) -> <<"1982">>; +year(1983) -> <<"1983">>; +year(1984) -> <<"1984">>; +year(1985) -> <<"1985">>; +year(1986) -> <<"1986">>; +year(1987) -> <<"1987">>; +year(1988) -> <<"1988">>; +year(1989) -> <<"1989">>; +year(1990) -> <<"1990">>; +year(1991) -> <<"1991">>; +year(1992) -> <<"1992">>; +year(1993) -> <<"1993">>; +year(1994) -> <<"1994">>; +year(1995) -> <<"1995">>; +year(1996) -> <<"1996">>; +year(1997) -> <<"1997">>; +year(1998) -> <<"1998">>; +year(1999) -> <<"1999">>; +year(2000) -> <<"2000">>; +year(2001) -> <<"2001">>; +year(2002) -> <<"2002">>; +year(2003) -> <<"2003">>; +year(2004) -> <<"2004">>; +year(2005) -> <<"2005">>; +year(2006) -> <<"2006">>; +year(2007) -> <<"2007">>; +year(2008) -> <<"2008">>; +year(2009) -> <<"2009">>; +year(2010) -> <<"2010">>; +year(2011) -> <<"2011">>; +year(2012) -> <<"2012">>; +year(2013) -> <<"2013">>; +year(2014) -> <<"2014">>; +year(2015) -> <<"2015">>; +year(2016) -> <<"2016">>; +year(2017) -> <<"2017">>; +year(2018) -> <<"2018">>; +year(2019) -> <<"2019">>; +year(2020) -> <<"2020">>; +year(2021) -> <<"2021">>; +year(2022) -> <<"2022">>; +year(2023) -> <<"2023">>; +year(2024) -> <<"2024">>; +year(2025) -> <<"2025">>; +year(2026) -> <<"2026">>; +year(2027) -> <<"2027">>; +year(2028) -> <<"2028">>; +year(2029) -> <<"2029">>; +year(Year) -> integer_to_binary(Year). diff --git a/src/wsLib/cow_hpack.erl b/src/wsLib/cow_hpack.erl new file mode 100644 index 0000000..4a02d79 --- /dev/null +++ b/src/wsLib/cow_hpack.erl @@ -0,0 +1,1449 @@ +%% Copyright (c) 2015-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% The current implementation is not suitable for use in +%% intermediaries as the information about headers that +%% should never be indexed is currently lost. + +-module(cow_hpack). +-dialyzer(no_improper_lists). + +-export([init/0]). +-export([init/1]). +-export([set_max_size/2]). + +-export([decode/1]). +-export([decode/2]). + +-export([encode/1]). +-export([encode/2]). +-export([encode/3]). + +-record(state, { + size = 0 :: non_neg_integer(), + max_size = 4096 :: non_neg_integer(), + configured_max_size = 4096 :: non_neg_integer(), + dyn_table = [] :: [{pos_integer(), {binary(), binary()}}] +}). + +-opaque state() :: #state{}. +-export_type([state/0]). + +-type opts() :: map(). +-export_type([opts/0]). + +-ifdef(TEST). +-include_lib("proper/include/proper.hrl"). +-endif. + +%% State initialization. + +-spec init() -> state(). +init() -> + #state{}. + +-spec init(non_neg_integer()) -> state(). +init(MaxSize) -> + #state{max_size=MaxSize, configured_max_size=MaxSize}. + +%% Update the configured max size. +%% +%% When decoding, the local endpoint also needs to send a SETTINGS +%% frame with this value and it is then up to the remote endpoint +%% to decide what actual limit it will use. The actual limit is +%% signaled via dynamic table size updates in the encoded data. +%% +%% When encoding, the local endpoint will call this function after +%% receiving a SETTINGS frame with this value. The encoder will +%% then use this value as the new max after signaling via a dynamic +%% table size update. The value given as argument may be lower +%% than the one received in the SETTINGS. + +-spec set_max_size(non_neg_integer(), State) -> State when State::state(). +set_max_size(MaxSize, State) -> + State#state{configured_max_size=MaxSize}. + +%% Decoding. + +-spec decode(binary()) -> {cow_http:headers(), state()}. +decode(Data) -> + decode(Data, init()). + +-spec decode(binary(), State) -> {cow_http:headers(), State} when State::state(). +%% Dynamic table size update is only allowed at the beginning of a HEADERS block. +decode(<< 0:2, 1:1, Rest/bits >>, State=#state{configured_max_size=ConfigMaxSize}) -> + {MaxSize, Rest2} = dec_int5(Rest), + if + MaxSize =< ConfigMaxSize -> + State2 = table_update_size(MaxSize, State), + decode(Rest2, State2) + end; +decode(Data, State) -> + decode(Data, State, []). + +decode(<<>>, State, Acc) -> + {lists:reverse(Acc), State}; +%% Indexed header field representation. +decode(<< 1:1, Rest/bits >>, State, Acc) -> + dec_indexed(Rest, State, Acc); +%% Literal header field with incremental indexing: new name. +decode(<< 0:1, 1:1, 0:6, Rest/bits >>, State, Acc) -> + dec_lit_index_new_name(Rest, State, Acc); +%% Literal header field with incremental indexing: indexed name. +decode(<< 0:1, 1:1, Rest/bits >>, State, Acc) -> + dec_lit_index_indexed_name(Rest, State, Acc); +%% Literal header field without indexing: new name. +decode(<< 0:8, Rest/bits >>, State, Acc) -> + dec_lit_no_index_new_name(Rest, State, Acc); +%% Literal header field without indexing: indexed name. +decode(<< 0:4, Rest/bits >>, State, Acc) -> + dec_lit_no_index_indexed_name(Rest, State, Acc); +%% Literal header field never indexed: new name. +%% @todo Keep track of "never indexed" headers. +decode(<< 0:3, 1:1, 0:4, Rest/bits >>, State, Acc) -> + dec_lit_no_index_new_name(Rest, State, Acc); +%% Literal header field never indexed: indexed name. +%% @todo Keep track of "never indexed" headers. +decode(<< 0:3, 1:1, Rest/bits >>, State, Acc) -> + dec_lit_no_index_indexed_name(Rest, State, Acc). + +%% Indexed header field representation. + +%% We do the integer decoding inline where appropriate, falling +%% back to dec_big_int for larger values. +dec_indexed(<<2#1111111:7, 0:1, Int:7, Rest/bits>>, State, Acc) -> + {Name, Value} = table_get(127 + Int, State), + decode(Rest, State, [{Name, Value}|Acc]); +dec_indexed(<<2#1111111:7, Rest0/bits>>, State, Acc) -> + {Index, Rest} = dec_big_int(Rest0, 127, 0), + {Name, Value} = table_get(Index, State), + decode(Rest, State, [{Name, Value}|Acc]); +dec_indexed(<>, State, Acc) -> + {Name, Value} = table_get(Index, State), + decode(Rest, State, [{Name, Value}|Acc]). + +%% Literal header field with incremental indexing. + +dec_lit_index_new_name(Rest, State, Acc) -> + {Name, Rest2} = dec_str(Rest), + dec_lit_index(Rest2, State, Acc, Name). + +%% We do the integer decoding inline where appropriate, falling +%% back to dec_big_int for larger values. +dec_lit_index_indexed_name(<<2#111111:6, 0:1, Int:7, Rest/bits>>, State, Acc) -> + Name = table_get_name(63 + Int, State), + dec_lit_index(Rest, State, Acc, Name); +dec_lit_index_indexed_name(<<2#111111:6, Rest0/bits>>, State, Acc) -> + {Index, Rest} = dec_big_int(Rest0, 63, 0), + Name = table_get_name(Index, State), + dec_lit_index(Rest, State, Acc, Name); +dec_lit_index_indexed_name(<>, State, Acc) -> + Name = table_get_name(Index, State), + dec_lit_index(Rest, State, Acc, Name). + +dec_lit_index(Rest, State, Acc, Name) -> + {Value, Rest2} = dec_str(Rest), + State2 = table_insert({Name, Value}, State), + decode(Rest2, State2, [{Name, Value}|Acc]). + +%% Literal header field without indexing. + +dec_lit_no_index_new_name(Rest, State, Acc) -> + {Name, Rest2} = dec_str(Rest), + dec_lit_no_index(Rest2, State, Acc, Name). + +%% We do the integer decoding inline where appropriate, falling +%% back to dec_big_int for larger values. +dec_lit_no_index_indexed_name(<<2#1111:4, 0:1, Int:7, Rest/bits>>, State, Acc) -> + Name = table_get_name(15 + Int, State), + dec_lit_no_index(Rest, State, Acc, Name); +dec_lit_no_index_indexed_name(<<2#1111:4, Rest0/bits>>, State, Acc) -> + {Index, Rest} = dec_big_int(Rest0, 15, 0), + Name = table_get_name(Index, State), + dec_lit_no_index(Rest, State, Acc, Name); +dec_lit_no_index_indexed_name(<>, State, Acc) -> + Name = table_get_name(Index, State), + dec_lit_no_index(Rest, State, Acc, Name). + +dec_lit_no_index(Rest, State, Acc, Name) -> + {Value, Rest2} = dec_str(Rest), + decode(Rest2, State, [{Name, Value}|Acc]). + +%% @todo Literal header field never indexed. + +%% Decode an integer. + +%% The HPACK format has 4 different integer prefixes length (from 4 to 7) +%% and each can be used to create an indefinite length integer if all bits +%% of the prefix are set to 1. + +dec_int5(<< 2#11111:5, Rest/bits >>) -> + dec_big_int(Rest, 31, 0); +dec_int5(<< Int:5, Rest/bits >>) -> + {Int, Rest}. + +dec_big_int(<< 0:1, Value:7, Rest/bits >>, Int, M) -> + {Int + (Value bsl M), Rest}; +dec_big_int(<< 1:1, Value:7, Rest/bits >>, Int, M) -> + dec_big_int(Rest, Int + (Value bsl M), M + 7). + +%% Decode a string. + +dec_str(<<0:1, 2#1111111:7, Rest0/bits>>) -> + {Length, Rest1} = dec_big_int(Rest0, 127, 0), + <> = Rest1, + {Str, Rest}; +dec_str(<<0:1, Length:7, Rest0/bits>>) -> + <> = Rest0, + {Str, Rest}; +dec_str(<<1:1, 2#1111111:7, Rest0/bits>>) -> + {Length, Rest} = dec_big_int(Rest0, 127, 0), + dec_huffman(Rest, Length, 0, <<>>); +dec_str(<<1:1, Length:7, Rest/bits>>) -> + dec_huffman(Rest, Length, 0, <<>>). + +%% We use a lookup table that allows us to benefit from +%% the binary match context optimization. A more naive +%% implementation using bit pattern matching cannot reuse +%% a match context because it wouldn't always match on +%% byte boundaries. +%% +%% See cow_hpack_dec_huffman_lookup.hrl for more details. + +dec_huffman(<>, Len, Huff0, Acc) when Len > 1 -> + {_, CharA, Huff1} = dec_huffman_lookup(Huff0, A), + {_, CharB, Huff} = dec_huffman_lookup(Huff1, B), + case {CharA, CharB} of + {undefined, undefined} -> dec_huffman(R, Len - 1, Huff, Acc); + {CharA, undefined} -> dec_huffman(R, Len - 1, Huff, <>); + {undefined, CharB} -> dec_huffman(R, Len - 1, Huff, <>); + {CharA, CharB} -> dec_huffman(R, Len - 1, Huff, <>) + end; +dec_huffman(<>, 1, Huff0, Acc) -> + {_, CharA, Huff} = dec_huffman_lookup(Huff0, A), + {ok, CharB, _} = dec_huffman_lookup(Huff, B), + case {CharA, CharB} of + %% {undefined, undefined} (> 7-bit final padding) is rejected with a crash. + {CharA, undefined} -> + {<>, Rest}; + {undefined, CharB} -> + {<>, Rest}; + _ -> + {<>, Rest} + end; +%% Can only be reached when the string length to decode is 0. +dec_huffman(Rest, 0, _, <<>>) -> + {<<>>, Rest}. + +-include("cow_hpack_dec_huffman_lookup.hrl"). + +-ifdef(TEST). +%% Test case extracted from h2spec. +decode_reject_eos_test() -> + {'EXIT', _} = (catch decode(<<16#0085f2b24a84ff874951fffffffa7f:120>>)), + ok. + +req_decode_test() -> + %% First request (raw then huffman). + {Headers1, State1} = decode(<< 16#828684410f7777772e6578616d706c652e636f6d:160 >>), + {Headers1, State1} = decode(<< 16#828684418cf1e3c2e5f23a6ba0ab90f4ff:136 >>), + Headers1 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>} + ], + #state{size=57, dyn_table=[{57,{<<":authority">>, <<"www.example.com">>}}]} = State1, + %% Second request (raw then huffman). + {Headers2, State2} = decode(<< 16#828684be58086e6f2d6361636865:112 >>, State1), + {Headers2, State2} = decode(<< 16#828684be5886a8eb10649cbf:96 >>, State1), + Headers2 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"cache-control">>, <<"no-cache">>} + ], + #state{size=110, dyn_table=[ + {53,{<<"cache-control">>, <<"no-cache">>}}, + {57,{<<":authority">>, <<"www.example.com">>}}]} = State2, + %% Third request (raw then huffman). + {Headers3, State3} = decode(<< 16#828785bf400a637573746f6d2d6b65790c637573746f6d2d76616c7565:232 >>, State2), + {Headers3, State3} = decode(<< 16#828785bf408825a849e95ba97d7f8925a849e95bb8e8b4bf:192 >>, State2), + Headers3 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/index.html">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"custom-key">>, <<"custom-value">>} + ], + #state{size=164, dyn_table=[ + {54,{<<"custom-key">>, <<"custom-value">>}}, + {53,{<<"cache-control">>, <<"no-cache">>}}, + {57,{<<":authority">>, <<"www.example.com">>}}]} = State3, + ok. + +resp_decode_test() -> + %% Use a max_size of 256 to trigger header evictions. + State0 = init(256), + %% First response (raw then huffman). + {Headers1, State1} = decode(<< 16#4803333032580770726976617465611d4d6f6e2c203231204f637420323031332032303a31333a323120474d546e1768747470733a2f2f7777772e6578616d706c652e636f6d:560 >>, State0), + {Headers1, State1} = decode(<< 16#488264025885aec3771a4b6196d07abe941054d444a8200595040b8166e082a62d1bff6e919d29ad171863c78f0b97c8e9ae82ae43d3:432 >>, State0), + Headers1 = [ + {<<":status">>, <<"302">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=222, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State1, + %% Second response (raw then huffman). + {Headers2, State2} = decode(<< 16#4803333037c1c0bf:64 >>, State1), + {Headers2, State2} = decode(<< 16#4883640effc1c0bf:64 >>, State1), + Headers2 = [ + {<<":status">>, <<"307">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=222, dyn_table=[ + {42,{<<":status">>, <<"307">>}}, + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}]} = State2, + %% Third response (raw then huffman). + {Headers3, State3} = decode(<< 16#88c1611d4d6f6e2c203231204f637420323031332032303a31333a323220474d54c05a04677a69707738666f6f3d4153444a4b48514b425a584f5157454f50495541585157454f49553b206d61782d6167653d333630303b2076657273696f6e3d31:784 >>, State2), + {Headers3, State3} = decode(<< 16#88c16196d07abe941054d444a8200595040b8166e084a62d1bffc05a839bd9ab77ad94e7821dd7f2e6c7b335dfdfcd5b3960d5af27087f3672c1ab270fb5291f9587316065c003ed4ee5b1063d5007:632 >>, State2), + Headers3 = [ + {<<":status">>, <<"200">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:22 GMT">>}, + {<<"location">>, <<"https://www.example.com">>}, + {<<"content-encoding">>, <<"gzip">>}, + {<<"set-cookie">>, <<"foo=ASDJKHQKBZXOQWEOPIUAXQWEOIU; max-age=3600; version=1">>} + ], + #state{size=215, dyn_table=[ + {98,{<<"set-cookie">>, <<"foo=ASDJKHQKBZXOQWEOPIUAXQWEOIU; max-age=3600; version=1">>}}, + {52,{<<"content-encoding">>, <<"gzip">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:22 GMT">>}}]} = State3, + ok. + +table_update_decode_test() -> + %% Use a max_size of 256 to trigger header evictions + %% when the code is not updating the max size. + State0 = init(256), + %% First response (raw then huffman). + {Headers1, State1} = decode(<< 16#4803333032580770726976617465611d4d6f6e2c203231204f637420323031332032303a31333a323120474d546e1768747470733a2f2f7777772e6578616d706c652e636f6d:560 >>, State0), + {Headers1, State1} = decode(<< 16#488264025885aec3771a4b6196d07abe941054d444a8200595040b8166e082a62d1bff6e919d29ad171863c78f0b97c8e9ae82ae43d3:432 >>, State0), + Headers1 = [ + {<<":status">>, <<"302">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=222, configured_max_size=256, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State1, + %% Set a new configured max_size to avoid header evictions. + State2 = set_max_size(512, State1), + %% Second response with the table size update (raw then huffman). + MaxSize = enc_big_int(512 - 31, <<>>), + {Headers2, State3} = decode( + iolist_to_binary([<< 2#00111111>>, MaxSize, <<16#4803333037c1c0bf:64>>]), + State2), + {Headers2, State3} = decode( + iolist_to_binary([<< 2#00111111>>, MaxSize, <<16#4883640effc1c0bf:64>>]), + State2), + Headers2 = [ + {<<":status">>, <<"307">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=264, configured_max_size=512, dyn_table=[ + {42,{<<":status">>, <<"307">>}}, + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State3, + ok. + +table_update_decode_smaller_test() -> + %% Use a max_size of 256 to trigger header evictions + %% when the code is not updating the max size. + State0 = init(256), + %% First response (raw then huffman). + {Headers1, State1} = decode(<< 16#4803333032580770726976617465611d4d6f6e2c203231204f637420323031332032303a31333a323120474d546e1768747470733a2f2f7777772e6578616d706c652e636f6d:560 >>, State0), + {Headers1, State1} = decode(<< 16#488264025885aec3771a4b6196d07abe941054d444a8200595040b8166e082a62d1bff6e919d29ad171863c78f0b97c8e9ae82ae43d3:432 >>, State0), + Headers1 = [ + {<<":status">>, <<"302">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=222, configured_max_size=256, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State1, + %% Set a new configured max_size to avoid header evictions. + State2 = set_max_size(512, State1), + %% Second response with the table size update smaller than the limit (raw then huffman). + MaxSize = enc_big_int(400 - 31, <<>>), + {Headers2, State3} = decode( + iolist_to_binary([<< 2#00111111>>, MaxSize, <<16#4803333037c1c0bf:64>>]), + State2), + {Headers2, State3} = decode( + iolist_to_binary([<< 2#00111111>>, MaxSize, <<16#4883640effc1c0bf:64>>]), + State2), + Headers2 = [ + {<<":status">>, <<"307">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=264, configured_max_size=512, dyn_table=[ + {42,{<<":status">>, <<"307">>}}, + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State3, + ok. + +table_update_decode_too_large_test() -> + %% Use a max_size of 256 to trigger header evictions + %% when the code is not updating the max size. + State0 = init(256), + %% First response (raw then huffman). + {Headers1, State1} = decode(<< 16#4803333032580770726976617465611d4d6f6e2c203231204f637420323031332032303a31333a323120474d546e1768747470733a2f2f7777772e6578616d706c652e636f6d:560 >>, State0), + {Headers1, State1} = decode(<< 16#488264025885aec3771a4b6196d07abe941054d444a8200595040b8166e082a62d1bff6e919d29ad171863c78f0b97c8e9ae82ae43d3:432 >>, State0), + Headers1 = [ + {<<":status">>, <<"302">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=222, configured_max_size=256, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State1, + %% Set a new configured max_size to avoid header evictions. + State2 = set_max_size(512, State1), + %% Second response with the table size update (raw then huffman). + MaxSize = enc_big_int(1024 - 31, <<>>), + {'EXIT', _} = (catch decode( + iolist_to_binary([<< 2#00111111>>, MaxSize, <<16#4803333037c1c0bf:64>>]), + State2)), + {'EXIT', _} = (catch decode( + iolist_to_binary([<< 2#00111111>>, MaxSize, <<16#4883640effc1c0bf:64>>]), + State2)), + ok. + +table_update_decode_zero_test() -> + State0 = init(256), + %% First response (raw then huffman). + {Headers1, State1} = decode(<< 16#4803333032580770726976617465611d4d6f6e2c203231204f637420323031332032303a31333a323120474d546e1768747470733a2f2f7777772e6578616d706c652e636f6d:560 >>, State0), + {Headers1, State1} = decode(<< 16#488264025885aec3771a4b6196d07abe941054d444a8200595040b8166e082a62d1bff6e919d29ad171863c78f0b97c8e9ae82ae43d3:432 >>, State0), + Headers1 = [ + {<<":status">>, <<"302">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + #state{size=222, configured_max_size=256, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State1, + %% Set a new configured max_size to avoid header evictions. + State2 = set_max_size(512, State1), + %% Second response with the table size update (raw then huffman). + %% We set the table size to 0 to evict all values before setting + %% it to 512 so we only get the second request indexed. + MaxSize = enc_big_int(512 - 31, <<>>), + {Headers1, State3} = decode(iolist_to_binary([ + <<2#00100000, 2#00111111>>, MaxSize, + <<16#4803333032580770726976617465611d4d6f6e2c203231204f637420323031332032303a31333a323120474d546e1768747470733a2f2f7777772e6578616d706c652e636f6d:560>>]), + State2), + {Headers1, State3} = decode(iolist_to_binary([ + <<2#00100000, 2#00111111>>, MaxSize, + <<16#488264025885aec3771a4b6196d07abe941054d444a8200595040b8166e082a62d1bff6e919d29ad171863c78f0b97c8e9ae82ae43d3:432>>]), + State2), + #state{size=222, configured_max_size=512, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State3, + ok. + +horse_decode_raw() -> + horse:repeat(20000, + do_horse_decode_raw() + ). + +do_horse_decode_raw() -> + {_, State1} = decode(<<16#828684410f7777772e6578616d706c652e636f6d:160>>), + {_, State2} = decode(<<16#828684be58086e6f2d6361636865:112>>, State1), + {_, _} = decode(<<16#828785bf400a637573746f6d2d6b65790c637573746f6d2d76616c7565:232>>, State2), + ok. + +horse_decode_huffman() -> + horse:repeat(20000, + do_horse_decode_huffman() + ). + +do_horse_decode_huffman() -> + {_, State1} = decode(<<16#828684418cf1e3c2e5f23a6ba0ab90f4ff:136>>), + {_, State2} = decode(<<16#828684be5886a8eb10649cbf:96>>, State1), + {_, _} = decode(<<16#828785bf408825a849e95ba97d7f8925a849e95bb8e8b4bf:192>>, State2), + ok. +-endif. + +%% Encoding. + +-spec encode(cow_http:headers()) -> {iodata(), state()}. +encode(Headers) -> + encode(Headers, init(), huffman, []). + +-spec encode(cow_http:headers(), State) -> {iodata(), State} when State::state(). +encode(Headers, State=#state{max_size=MaxSize, configured_max_size=MaxSize}) -> + encode(Headers, State, huffman, []); +encode(Headers, State0=#state{configured_max_size=MaxSize}) -> + State1 = table_update_size(MaxSize, State0), + {Data, State} = encode(Headers, State1, huffman, []), + {[enc_int5(MaxSize, 2#001)|Data], State}. + +-spec encode(cow_http:headers(), State, opts()) -> {iodata(), State} when State::state(). +encode(Headers, State=#state{max_size=MaxSize, configured_max_size=MaxSize}, Opts) -> + encode(Headers, State, huffman_opt(Opts), []); +encode(Headers, State0=#state{configured_max_size=MaxSize}, Opts) -> + State1 = table_update_size(MaxSize, State0), + {Data, State} = encode(Headers, State1, huffman_opt(Opts), []), + {[enc_int5(MaxSize, 2#001)|Data], State}. + +huffman_opt(#{huffman := false}) -> no_huffman; +huffman_opt(_) -> huffman. + +%% @todo Handle cases where no/never indexing is expected. +encode([], State, _, Acc) -> + {lists:reverse(Acc), State}; +encode([{Name, Value0}|Tail], State, HuffmanOpt, Acc) -> + %% We conditionally call iolist_to_binary/1 because a small + %% but noticeable speed improvement happens when we do this. + Value = if + is_binary(Value0) -> Value0; + true -> iolist_to_binary(Value0) + end, + Header = {Name, Value}, + case table_find(Header, State) of + %% Indexed header field representation. + {field, Index} -> + encode(Tail, State, HuffmanOpt, + [enc_int7(Index, 2#1)|Acc]); + %% Literal header field representation: indexed name. + {name, Index} -> + State2 = table_insert(Header, State), + encode(Tail, State2, HuffmanOpt, + [[enc_int6(Index, 2#01)|enc_str(Value, HuffmanOpt)]|Acc]); + %% Literal header field representation: new name. + not_found -> + State2 = table_insert(Header, State), + encode(Tail, State2, HuffmanOpt, + [[<< 0:1, 1:1, 0:6 >>|[enc_str(Name, HuffmanOpt)|enc_str(Value, HuffmanOpt)]]|Acc]) + end. + +%% Encode an integer. + +enc_int5(Int, Prefix) when Int < 31 -> + << Prefix:3, Int:5 >>; +enc_int5(Int, Prefix) -> + enc_big_int(Int - 31, << Prefix:3, 2#11111:5 >>). + +enc_int6(Int, Prefix) when Int < 63 -> + << Prefix:2, Int:6 >>; +enc_int6(Int, Prefix) -> + enc_big_int(Int - 63, << Prefix:2, 2#111111:6 >>). + +enc_int7(Int, Prefix) when Int < 127 -> + << Prefix:1, Int:7 >>; +enc_int7(Int, Prefix) -> + enc_big_int(Int - 127, << Prefix:1, 2#1111111:7 >>). + +enc_big_int(Int, Acc) when Int < 128 -> + <>; +enc_big_int(Int, Acc) -> + enc_big_int(Int bsr 7, <>). + +%% Encode a string. + +enc_str(Str, huffman) -> + Str2 = enc_huffman(Str, <<>>), + [enc_int7(byte_size(Str2), 2#1)|Str2]; +enc_str(Str, no_huffman) -> + [enc_int7(byte_size(Str), 2#0)|Str]. + +enc_huffman(<<>>, Acc) -> + case bit_size(Acc) rem 8 of + 1 -> << Acc/bits, 2#1111111:7 >>; + 2 -> << Acc/bits, 2#111111:6 >>; + 3 -> << Acc/bits, 2#11111:5 >>; + 4 -> << Acc/bits, 2#1111:4 >>; + 5 -> << Acc/bits, 2#111:3 >>; + 6 -> << Acc/bits, 2#11:2 >>; + 7 -> << Acc/bits, 2#1:1 >>; + 0 -> Acc + end; +enc_huffman(<< 0, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111000:13 >>); +enc_huffman(<< 1, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011000:23 >>); +enc_huffman(<< 2, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100010:28 >>); +enc_huffman(<< 3, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100011:28 >>); +enc_huffman(<< 4, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100100:28 >>); +enc_huffman(<< 5, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100101:28 >>); +enc_huffman(<< 6, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100110:28 >>); +enc_huffman(<< 7, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111100111:28 >>); +enc_huffman(<< 8, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101000:28 >>); +enc_huffman(<< 9, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101010:24 >>); +enc_huffman(<< 10, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111100:30 >>); +enc_huffman(<< 11, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101001:28 >>); +enc_huffman(<< 12, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101010:28 >>); +enc_huffman(<< 13, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111101:30 >>); +enc_huffman(<< 14, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101011:28 >>); +enc_huffman(<< 15, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101100:28 >>); +enc_huffman(<< 16, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101101:28 >>); +enc_huffman(<< 17, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101110:28 >>); +enc_huffman(<< 18, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111101111:28 >>); +enc_huffman(<< 19, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110000:28 >>); +enc_huffman(<< 20, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110001:28 >>); +enc_huffman(<< 21, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110010:28 >>); +enc_huffman(<< 22, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111111111110:30 >>); +enc_huffman(<< 23, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110011:28 >>); +enc_huffman(<< 24, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110100:28 >>); +enc_huffman(<< 25, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110101:28 >>); +enc_huffman(<< 26, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110110:28 >>); +enc_huffman(<< 27, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111110111:28 >>); +enc_huffman(<< 28, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111000:28 >>); +enc_huffman(<< 29, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111001:28 >>); +enc_huffman(<< 30, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111010:28 >>); +enc_huffman(<< 31, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111011:28 >>); +enc_huffman(<< 32, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010100:6 >>); +enc_huffman(<< 33, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111000:10 >>); +enc_huffman(<< 34, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111001:10 >>); +enc_huffman(<< 35, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111010:12 >>); +enc_huffman(<< 36, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111001:13 >>); +enc_huffman(<< 37, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010101:6 >>); +enc_huffman(<< 38, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111000:8 >>); +enc_huffman(<< 39, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111010:11 >>); +enc_huffman(<< 40, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111010:10 >>); +enc_huffman(<< 41, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111011:10 >>); +enc_huffman(<< 42, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111001:8 >>); +enc_huffman(<< 43, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111011:11 >>); +enc_huffman(<< 44, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111010:8 >>); +enc_huffman(<< 45, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010110:6 >>); +enc_huffman(<< 46, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#010111:6 >>); +enc_huffman(<< 47, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011000:6 >>); +enc_huffman(<< 48, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00000:5 >>); +enc_huffman(<< 49, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00001:5 >>); +enc_huffman(<< 50, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00010:5 >>); +enc_huffman(<< 51, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011001:6 >>); +enc_huffman(<< 52, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011010:6 >>); +enc_huffman(<< 53, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011011:6 >>); +enc_huffman(<< 54, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011100:6 >>); +enc_huffman(<< 55, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011101:6 >>); +enc_huffman(<< 56, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011110:6 >>); +enc_huffman(<< 57, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#011111:6 >>); +enc_huffman(<< 58, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011100:7 >>); +enc_huffman(<< 59, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111011:8 >>); +enc_huffman(<< 60, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111100:15 >>); +enc_huffman(<< 61, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100000:6 >>); +enc_huffman(<< 62, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111011:12 >>); +enc_huffman(<< 63, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111100:10 >>); +enc_huffman(<< 64, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111010:13 >>); +enc_huffman(<< 65, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100001:6 >>); +enc_huffman(<< 66, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011101:7 >>); +enc_huffman(<< 67, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011110:7 >>); +enc_huffman(<< 68, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1011111:7 >>); +enc_huffman(<< 69, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100000:7 >>); +enc_huffman(<< 70, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100001:7 >>); +enc_huffman(<< 71, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100010:7 >>); +enc_huffman(<< 72, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100011:7 >>); +enc_huffman(<< 73, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100100:7 >>); +enc_huffman(<< 74, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100101:7 >>); +enc_huffman(<< 75, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100110:7 >>); +enc_huffman(<< 76, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1100111:7 >>); +enc_huffman(<< 77, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101000:7 >>); +enc_huffman(<< 78, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101001:7 >>); +enc_huffman(<< 79, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101010:7 >>); +enc_huffman(<< 80, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101011:7 >>); +enc_huffman(<< 81, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101100:7 >>); +enc_huffman(<< 82, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101101:7 >>); +enc_huffman(<< 83, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101110:7 >>); +enc_huffman(<< 84, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1101111:7 >>); +enc_huffman(<< 85, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110000:7 >>); +enc_huffman(<< 86, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110001:7 >>); +enc_huffman(<< 87, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110010:7 >>); +enc_huffman(<< 88, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111100:8 >>); +enc_huffman(<< 89, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110011:7 >>); +enc_huffman(<< 90, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111101:8 >>); +enc_huffman(<< 91, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111011:13 >>); +enc_huffman(<< 92, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110000:19 >>); +enc_huffman(<< 93, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111100:13 >>); +enc_huffman(<< 94, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111100:14 >>); +enc_huffman(<< 95, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100010:6 >>); +enc_huffman(<< 96, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111101:15 >>); +enc_huffman(<< 97, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00011:5 >>); +enc_huffman(<< 98, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100011:6 >>); +enc_huffman(<< 99, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00100:5 >>); +enc_huffman(<< 100, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100100:6 >>); +enc_huffman(<< 101, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00101:5 >>); +enc_huffman(<< 102, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100101:6 >>); +enc_huffman(<< 103, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100110:6 >>); +enc_huffman(<< 104, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#100111:6 >>); +enc_huffman(<< 105, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00110:5 >>); +enc_huffman(<< 106, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110100:7 >>); +enc_huffman(<< 107, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110101:7 >>); +enc_huffman(<< 108, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101000:6 >>); +enc_huffman(<< 109, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101001:6 >>); +enc_huffman(<< 110, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101010:6 >>); +enc_huffman(<< 111, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#00111:5 >>); +enc_huffman(<< 112, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101011:6 >>); +enc_huffman(<< 113, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110110:7 >>); +enc_huffman(<< 114, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101100:6 >>); +enc_huffman(<< 115, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#01000:5 >>); +enc_huffman(<< 116, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#01001:5 >>); +enc_huffman(<< 117, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#101101:6 >>); +enc_huffman(<< 118, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1110111:7 >>); +enc_huffman(<< 119, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111000:7 >>); +enc_huffman(<< 120, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111001:7 >>); +enc_huffman(<< 121, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111010:7 >>); +enc_huffman(<< 122, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111011:7 >>); +enc_huffman(<< 123, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111110:15 >>); +enc_huffman(<< 124, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111100:11 >>); +enc_huffman(<< 125, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111101:14 >>); +enc_huffman(<< 126, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111101:13 >>); +enc_huffman(<< 127, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111100:28 >>); +enc_huffman(<< 128, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111100110:20 >>); +enc_huffman(<< 129, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010010:22 >>); +enc_huffman(<< 130, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111100111:20 >>); +enc_huffman(<< 131, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101000:20 >>); +enc_huffman(<< 132, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010011:22 >>); +enc_huffman(<< 133, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010100:22 >>); +enc_huffman(<< 134, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010101:22 >>); +enc_huffman(<< 135, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011001:23 >>); +enc_huffman(<< 136, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010110:22 >>); +enc_huffman(<< 137, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011010:23 >>); +enc_huffman(<< 138, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011011:23 >>); +enc_huffman(<< 139, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011100:23 >>); +enc_huffman(<< 140, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011101:23 >>); +enc_huffman(<< 141, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011110:23 >>); +enc_huffman(<< 142, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101011:24 >>); +enc_huffman(<< 143, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111011111:23 >>); +enc_huffman(<< 144, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101100:24 >>); +enc_huffman(<< 145, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101101:24 >>); +enc_huffman(<< 146, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111010111:22 >>); +enc_huffman(<< 147, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100000:23 >>); +enc_huffman(<< 148, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101110:24 >>); +enc_huffman(<< 149, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100001:23 >>); +enc_huffman(<< 150, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100010:23 >>); +enc_huffman(<< 151, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100011:23 >>); +enc_huffman(<< 152, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100100:23 >>); +enc_huffman(<< 153, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011100:21 >>); +enc_huffman(<< 154, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011000:22 >>); +enc_huffman(<< 155, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100101:23 >>); +enc_huffman(<< 156, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011001:22 >>); +enc_huffman(<< 157, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100110:23 >>); +enc_huffman(<< 158, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111100111:23 >>); +enc_huffman(<< 159, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111101111:24 >>); +enc_huffman(<< 160, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011010:22 >>); +enc_huffman(<< 161, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011101:21 >>); +enc_huffman(<< 162, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101001:20 >>); +enc_huffman(<< 163, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011011:22 >>); +enc_huffman(<< 164, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011100:22 >>); +enc_huffman(<< 165, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101000:23 >>); +enc_huffman(<< 166, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101001:23 >>); +enc_huffman(<< 167, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011110:21 >>); +enc_huffman(<< 168, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101010:23 >>); +enc_huffman(<< 169, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011101:22 >>); +enc_huffman(<< 170, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011110:22 >>); +enc_huffman(<< 171, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110000:24 >>); +enc_huffman(<< 172, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111011111:21 >>); +enc_huffman(<< 173, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111011111:22 >>); +enc_huffman(<< 174, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101011:23 >>); +enc_huffman(<< 175, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101100:23 >>); +enc_huffman(<< 176, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100000:21 >>); +enc_huffman(<< 177, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100001:21 >>); +enc_huffman(<< 178, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100000:22 >>); +enc_huffman(<< 179, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100010:21 >>); +enc_huffman(<< 180, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101101:23 >>); +enc_huffman(<< 181, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100001:22 >>); +enc_huffman(<< 182, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101110:23 >>); +enc_huffman(<< 183, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111101111:23 >>); +enc_huffman(<< 184, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101010:20 >>); +enc_huffman(<< 185, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100010:22 >>); +enc_huffman(<< 186, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100011:22 >>); +enc_huffman(<< 187, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100100:22 >>); +enc_huffman(<< 188, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110000:23 >>); +enc_huffman(<< 189, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100101:22 >>); +enc_huffman(<< 190, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100110:22 >>); +enc_huffman(<< 191, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110001:23 >>); +enc_huffman(<< 192, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100000:26 >>); +enc_huffman(<< 193, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100001:26 >>); +enc_huffman(<< 194, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101011:20 >>); +enc_huffman(<< 195, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110001:19 >>); +enc_huffman(<< 196, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111100111:22 >>); +enc_huffman(<< 197, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110010:23 >>); +enc_huffman(<< 198, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101000:22 >>); +enc_huffman(<< 199, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101100:25 >>); +enc_huffman(<< 200, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100010:26 >>); +enc_huffman(<< 201, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100011:26 >>); +enc_huffman(<< 202, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100100:26 >>); +enc_huffman(<< 203, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111011110:27 >>); +enc_huffman(<< 204, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111011111:27 >>); +enc_huffman(<< 205, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100101:26 >>); +enc_huffman(<< 206, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110001:24 >>); +enc_huffman(<< 207, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101101:25 >>); +enc_huffman(<< 208, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111110010:19 >>); +enc_huffman(<< 209, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100011:21 >>); +enc_huffman(<< 210, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100110:26 >>); +enc_huffman(<< 211, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100000:27 >>); +enc_huffman(<< 212, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100001:27 >>); +enc_huffman(<< 213, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111100111:26 >>); +enc_huffman(<< 214, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100010:27 >>); +enc_huffman(<< 215, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110010:24 >>); +enc_huffman(<< 216, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100100:21 >>); +enc_huffman(<< 217, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100101:21 >>); +enc_huffman(<< 218, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101000:26 >>); +enc_huffman(<< 219, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101001:26 >>); +enc_huffman(<< 220, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111101:28 >>); +enc_huffman(<< 221, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100011:27 >>); +enc_huffman(<< 222, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100100:27 >>); +enc_huffman(<< 223, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100101:27 >>); +enc_huffman(<< 224, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101100:20 >>); +enc_huffman(<< 225, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110011:24 >>); +enc_huffman(<< 226, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111101101:20 >>); +enc_huffman(<< 227, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100110:21 >>); +enc_huffman(<< 228, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101001:22 >>); +enc_huffman(<< 229, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111100111:21 >>); +enc_huffman(<< 230, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111101000:21 >>); +enc_huffman(<< 231, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110011:23 >>); +enc_huffman(<< 232, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101010:22 >>); +enc_huffman(<< 233, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111101011:22 >>); +enc_huffman(<< 234, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101110:25 >>); +enc_huffman(<< 235, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111101111:25 >>); +enc_huffman(<< 236, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110100:24 >>); +enc_huffman(<< 237, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111110101:24 >>); +enc_huffman(<< 238, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101010:26 >>); +enc_huffman(<< 239, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111110100:23 >>); +enc_huffman(<< 240, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101011:26 >>); +enc_huffman(<< 241, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100110:27 >>); +enc_huffman(<< 242, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101100:26 >>); +enc_huffman(<< 243, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101101:26 >>); +enc_huffman(<< 244, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111100111:27 >>); +enc_huffman(<< 245, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101000:27 >>); +enc_huffman(<< 246, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101001:27 >>); +enc_huffman(<< 247, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101010:27 >>); +enc_huffman(<< 248, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101011:27 >>); +enc_huffman(<< 249, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#1111111111111111111111111110:28 >>); +enc_huffman(<< 250, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101100:27 >>); +enc_huffman(<< 251, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101101:27 >>); +enc_huffman(<< 252, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101110:27 >>); +enc_huffman(<< 253, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111101111:27 >>); +enc_huffman(<< 254, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#111111111111111111111110000:27 >>); +enc_huffman(<< 255, R/bits >>, A) -> enc_huffman(R, << A/bits, 2#11111111111111111111101110:26 >>). + +-ifdef(TEST). +req_encode_test() -> + %% First request (raw then huffman). + Headers1 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>} + ], + {Raw1, State1} = encode(Headers1, init(), #{huffman => false}), + << 16#828684410f7777772e6578616d706c652e636f6d:160 >> = iolist_to_binary(Raw1), + {Huff1, State1} = encode(Headers1), + << 16#828684418cf1e3c2e5f23a6ba0ab90f4ff:136 >> = iolist_to_binary(Huff1), + #state{size=57, dyn_table=[{57,{<<":authority">>, <<"www.example.com">>}}]} = State1, + %% Second request (raw then huffman). + Headers2 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"cache-control">>, <<"no-cache">>} + ], + {Raw2, State2} = encode(Headers2, State1, #{huffman => false}), + << 16#828684be58086e6f2d6361636865:112 >> = iolist_to_binary(Raw2), + {Huff2, State2} = encode(Headers2, State1), + << 16#828684be5886a8eb10649cbf:96 >> = iolist_to_binary(Huff2), + #state{size=110, dyn_table=[ + {53,{<<"cache-control">>, <<"no-cache">>}}, + {57,{<<":authority">>, <<"www.example.com">>}}]} = State2, + %% Third request (raw then huffman). + Headers3 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/index.html">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"custom-key">>, <<"custom-value">>} + ], + {Raw3, State3} = encode(Headers3, State2, #{huffman => false}), + << 16#828785bf400a637573746f6d2d6b65790c637573746f6d2d76616c7565:232 >> = iolist_to_binary(Raw3), + {Huff3, State3} = encode(Headers3, State2), + << 16#828785bf408825a849e95ba97d7f8925a849e95bb8e8b4bf:192 >> = iolist_to_binary(Huff3), + #state{size=164, dyn_table=[ + {54,{<<"custom-key">>, <<"custom-value">>}}, + {53,{<<"cache-control">>, <<"no-cache">>}}, + {57,{<<":authority">>, <<"www.example.com">>}}]} = State3, + ok. + +resp_encode_test() -> + %% Use a max_size of 256 to trigger header evictions. + State0 = init(256), + %% First response (raw then huffman). + Headers1 = [ + {<<":status">>, <<"302">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + {Raw1, State1} = encode(Headers1, State0, #{huffman => false}), + << 16#4803333032580770726976617465611d4d6f6e2c203231204f637420323031332032303a31333a323120474d546e1768747470733a2f2f7777772e6578616d706c652e636f6d:560 >> = iolist_to_binary(Raw1), + {Huff1, State1} = encode(Headers1, State0), + << 16#488264025885aec3771a4b6196d07abe941054d444a8200595040b8166e082a62d1bff6e919d29ad171863c78f0b97c8e9ae82ae43d3:432 >> = iolist_to_binary(Huff1), + #state{size=222, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = State1, + %% Second response (raw then huffman). + Headers2 = [ + {<<":status">>, <<"307">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + {Raw2, State2} = encode(Headers2, State1, #{huffman => false}), + << 16#4803333037c1c0bf:64 >> = iolist_to_binary(Raw2), + {Huff2, State2} = encode(Headers2, State1), + << 16#4883640effc1c0bf:64 >> = iolist_to_binary(Huff2), + #state{size=222, dyn_table=[ + {42,{<<":status">>, <<"307">>}}, + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}]} = State2, + %% Third response (raw then huffman). + Headers3 = [ + {<<":status">>, <<"200">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:22 GMT">>}, + {<<"location">>, <<"https://www.example.com">>}, + {<<"content-encoding">>, <<"gzip">>}, + {<<"set-cookie">>, <<"foo=ASDJKHQKBZXOQWEOPIUAXQWEOIU; max-age=3600; version=1">>} + ], + {Raw3, State3} = encode(Headers3, State2, #{huffman => false}), + << 16#88c1611d4d6f6e2c203231204f637420323031332032303a31333a323220474d54c05a04677a69707738666f6f3d4153444a4b48514b425a584f5157454f50495541585157454f49553b206d61782d6167653d333630303b2076657273696f6e3d31:784 >> = iolist_to_binary(Raw3), + {Huff3, State3} = encode(Headers3, State2), + << 16#88c16196d07abe941054d444a8200595040b8166e084a62d1bffc05a839bd9ab77ad94e7821dd7f2e6c7b335dfdfcd5b3960d5af27087f3672c1ab270fb5291f9587316065c003ed4ee5b1063d5007:632 >> = iolist_to_binary(Huff3), + #state{size=215, dyn_table=[ + {98,{<<"set-cookie">>, <<"foo=ASDJKHQKBZXOQWEOPIUAXQWEOIU; max-age=3600; version=1">>}}, + {52,{<<"content-encoding">>, <<"gzip">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:22 GMT">>}}]} = State3, + ok. + +%% This test assumes that table updates work correctly when decoding. +table_update_encode_test() -> + %% Use a max_size of 256 to trigger header evictions + %% when the code is not updating the max size. + DecState0 = EncState0 = init(256), + %% First response. + Headers1 = [ + {<<":status">>, <<"302">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + {Encoded1, EncState1} = encode(Headers1, EncState0), + {Headers1, DecState1} = decode(iolist_to_binary(Encoded1), DecState0), + #state{size=222, configured_max_size=256, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = DecState1, + #state{size=222, configured_max_size=256, dyn_table=[ + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = EncState1, + %% Set a new configured max_size to avoid header evictions. + DecState2 = set_max_size(512, DecState1), + EncState2 = set_max_size(512, EncState1), + %% Second response. + Headers2 = [ + {<<":status">>, <<"307">>}, + {<<"cache-control">>, <<"private">>}, + {<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}, + {<<"location">>, <<"https://www.example.com">>} + ], + {Encoded2, EncState3} = encode(Headers2, EncState2), + {Headers2, DecState3} = decode(iolist_to_binary(Encoded2), DecState2), + #state{size=264, max_size=512, dyn_table=[ + {42,{<<":status">>, <<"307">>}}, + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = DecState3, + #state{size=264, max_size=512, dyn_table=[ + {42,{<<":status">>, <<"307">>}}, + {63,{<<"location">>, <<"https://www.example.com">>}}, + {65,{<<"date">>, <<"Mon, 21 Oct 2013 20:13:21 GMT">>}}, + {52,{<<"cache-control">>, <<"private">>}}, + {42,{<<":status">>, <<"302">>}}]} = EncState3, + ok. + +%% Check that encode/2 is using the new table size after calling +%% set_max_size/1 and that adding entries larger than the max size +%% results in an empty table. +table_update_encode_max_size_0_test() -> + %% Encoding starts with default max size + EncState0 = init(), + %% Decoding starts with max size of 0 + DecState0 = init(0), + %% First request. + Headers1 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>} + ], + {Encoded1, EncState1} = encode(Headers1, EncState0), + {Headers1, DecState1} = decode(iolist_to_binary(Encoded1), DecState0), + #state{size=57, dyn_table=[{57,{<<":authority">>, <<"www.example.com">>}}]} = EncState1, + #state{size=0, dyn_table=[]} = DecState1, + %% Settings received after the first request. + EncState2 = set_max_size(0, EncState1), + #state{configured_max_size=0, max_size=4096, + size=57, dyn_table=[{57,{<<":authority">>, <<"www.example.com">>}}]} = EncState2, + %% Second request. + Headers2 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"cache-control">>, <<"no-cache">>} + ], + {Encoded2, EncState3} = encode(Headers2, EncState2), + {Headers2, DecState2} = decode(iolist_to_binary(Encoded2), DecState1), + #state{configured_max_size=0, max_size=0, size=0, dyn_table=[]} = EncState3, + #state{size=0, dyn_table=[]} = DecState2, + ok. + +encode_iolist_test() -> + Headers = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"content-type">>, [<<"image">>,<<"/">>,<<"png">>,<<>>]} + ], + {_, _} = encode(Headers), + ok. + +horse_encode_raw() -> + horse:repeat(20000, + do_horse_encode_raw() + ). + +do_horse_encode_raw() -> + Headers1 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>} + ], + {_, State1} = encode(Headers1, init(), #{huffman => false}), + Headers2 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"cache-control">>, <<"no-cache">>} + ], + {_, State2} = encode(Headers2, State1, #{huffman => false}), + Headers3 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/index.html">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"custom-key">>, <<"custom-value">>} + ], + {_, _} = encode(Headers3, State2, #{huffman => false}), + ok. + +horse_encode_huffman() -> + horse:repeat(20000, + do_horse_encode_huffman() + ). + +do_horse_encode_huffman() -> + Headers1 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>} + ], + {_, State1} = encode(Headers1), + Headers2 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"http">>}, + {<<":path">>, <<"/">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"cache-control">>, <<"no-cache">>} + ], + {_, State2} = encode(Headers2, State1), + Headers3 = [ + {<<":method">>, <<"GET">>}, + {<<":scheme">>, <<"https">>}, + {<<":path">>, <<"/index.html">>}, + {<<":authority">>, <<"www.example.com">>}, + {<<"custom-key">>, <<"custom-value">>} + ], + {_, _} = encode(Headers3, State2), + ok. +-endif. + +%% Static and dynamic tables. + +%% @todo There must be a more efficient way. +table_find(Header = {Name, _}, State) -> + case table_find_field(Header, State) of + not_found -> + case table_find_name(Name, State) of + NotFound = not_found -> + NotFound; + Found -> + {name, Found} + end; + Found -> + {field, Found} + end. + +table_find_field({<<":authority">>, <<>>}, _) -> 1; +table_find_field({<<":method">>, <<"GET">>}, _) -> 2; +table_find_field({<<":method">>, <<"POST">>}, _) -> 3; +table_find_field({<<":path">>, <<"/">>}, _) -> 4; +table_find_field({<<":path">>, <<"/index.html">>}, _) -> 5; +table_find_field({<<":scheme">>, <<"http">>}, _) -> 6; +table_find_field({<<":scheme">>, <<"https">>}, _) -> 7; +table_find_field({<<":status">>, <<"200">>}, _) -> 8; +table_find_field({<<":status">>, <<"204">>}, _) -> 9; +table_find_field({<<":status">>, <<"206">>}, _) -> 10; +table_find_field({<<":status">>, <<"304">>}, _) -> 11; +table_find_field({<<":status">>, <<"400">>}, _) -> 12; +table_find_field({<<":status">>, <<"404">>}, _) -> 13; +table_find_field({<<":status">>, <<"500">>}, _) -> 14; +table_find_field({<<"accept-charset">>, <<>>}, _) -> 15; +table_find_field({<<"accept-encoding">>, <<"gzip, deflate">>}, _) -> 16; +table_find_field({<<"accept-language">>, <<>>}, _) -> 17; +table_find_field({<<"accept-ranges">>, <<>>}, _) -> 18; +table_find_field({<<"accept">>, <<>>}, _) -> 19; +table_find_field({<<"access-control-allow-origin">>, <<>>}, _) -> 20; +table_find_field({<<"age">>, <<>>}, _) -> 21; +table_find_field({<<"allow">>, <<>>}, _) -> 22; +table_find_field({<<"authorization">>, <<>>}, _) -> 23; +table_find_field({<<"cache-control">>, <<>>}, _) -> 24; +table_find_field({<<"content-disposition">>, <<>>}, _) -> 25; +table_find_field({<<"content-encoding">>, <<>>}, _) -> 26; +table_find_field({<<"content-language">>, <<>>}, _) -> 27; +table_find_field({<<"content-length">>, <<>>}, _) -> 28; +table_find_field({<<"content-location">>, <<>>}, _) -> 29; +table_find_field({<<"content-range">>, <<>>}, _) -> 30; +table_find_field({<<"content-type">>, <<>>}, _) -> 31; +table_find_field({<<"cookie">>, <<>>}, _) -> 32; +table_find_field({<<"date">>, <<>>}, _) -> 33; +table_find_field({<<"etag">>, <<>>}, _) -> 34; +table_find_field({<<"expect">>, <<>>}, _) -> 35; +table_find_field({<<"expires">>, <<>>}, _) -> 36; +table_find_field({<<"from">>, <<>>}, _) -> 37; +table_find_field({<<"host">>, <<>>}, _) -> 38; +table_find_field({<<"if-match">>, <<>>}, _) -> 39; +table_find_field({<<"if-modified-since">>, <<>>}, _) -> 40; +table_find_field({<<"if-none-match">>, <<>>}, _) -> 41; +table_find_field({<<"if-range">>, <<>>}, _) -> 42; +table_find_field({<<"if-unmodified-since">>, <<>>}, _) -> 43; +table_find_field({<<"last-modified">>, <<>>}, _) -> 44; +table_find_field({<<"link">>, <<>>}, _) -> 45; +table_find_field({<<"location">>, <<>>}, _) -> 46; +table_find_field({<<"max-forwards">>, <<>>}, _) -> 47; +table_find_field({<<"proxy-authenticate">>, <<>>}, _) -> 48; +table_find_field({<<"proxy-authorization">>, <<>>}, _) -> 49; +table_find_field({<<"range">>, <<>>}, _) -> 50; +table_find_field({<<"referer">>, <<>>}, _) -> 51; +table_find_field({<<"refresh">>, <<>>}, _) -> 52; +table_find_field({<<"retry-after">>, <<>>}, _) -> 53; +table_find_field({<<"server">>, <<>>}, _) -> 54; +table_find_field({<<"set-cookie">>, <<>>}, _) -> 55; +table_find_field({<<"strict-transport-security">>, <<>>}, _) -> 56; +table_find_field({<<"transfer-encoding">>, <<>>}, _) -> 57; +table_find_field({<<"user-agent">>, <<>>}, _) -> 58; +table_find_field({<<"vary">>, <<>>}, _) -> 59; +table_find_field({<<"via">>, <<>>}, _) -> 60; +table_find_field({<<"www-authenticate">>, <<>>}, _) -> 61; +table_find_field(Header, #state{dyn_table=DynamicTable}) -> + table_find_field_dyn(Header, DynamicTable, 62). + +table_find_field_dyn(_, [], _) -> not_found; +table_find_field_dyn(Header, [{_, Header}|_], Index) -> Index; +table_find_field_dyn(Header, [_|Tail], Index) -> table_find_field_dyn(Header, Tail, Index + 1). + +table_find_name(<<":authority">>, _) -> 1; +table_find_name(<<":method">>, _) -> 2; +table_find_name(<<":path">>, _) -> 4; +table_find_name(<<":scheme">>, _) -> 6; +table_find_name(<<":status">>, _) -> 8; +table_find_name(<<"accept-charset">>, _) -> 15; +table_find_name(<<"accept-encoding">>, _) -> 16; +table_find_name(<<"accept-language">>, _) -> 17; +table_find_name(<<"accept-ranges">>, _) -> 18; +table_find_name(<<"accept">>, _) -> 19; +table_find_name(<<"access-control-allow-origin">>, _) -> 20; +table_find_name(<<"age">>, _) -> 21; +table_find_name(<<"allow">>, _) -> 22; +table_find_name(<<"authorization">>, _) -> 23; +table_find_name(<<"cache-control">>, _) -> 24; +table_find_name(<<"content-disposition">>, _) -> 25; +table_find_name(<<"content-encoding">>, _) -> 26; +table_find_name(<<"content-language">>, _) -> 27; +table_find_name(<<"content-length">>, _) -> 28; +table_find_name(<<"content-location">>, _) -> 29; +table_find_name(<<"content-range">>, _) -> 30; +table_find_name(<<"content-type">>, _) -> 31; +table_find_name(<<"cookie">>, _) -> 32; +table_find_name(<<"date">>, _) -> 33; +table_find_name(<<"etag">>, _) -> 34; +table_find_name(<<"expect">>, _) -> 35; +table_find_name(<<"expires">>, _) -> 36; +table_find_name(<<"from">>, _) -> 37; +table_find_name(<<"host">>, _) -> 38; +table_find_name(<<"if-match">>, _) -> 39; +table_find_name(<<"if-modified-since">>, _) -> 40; +table_find_name(<<"if-none-match">>, _) -> 41; +table_find_name(<<"if-range">>, _) -> 42; +table_find_name(<<"if-unmodified-since">>, _) -> 43; +table_find_name(<<"last-modified">>, _) -> 44; +table_find_name(<<"link">>, _) -> 45; +table_find_name(<<"location">>, _) -> 46; +table_find_name(<<"max-forwards">>, _) -> 47; +table_find_name(<<"proxy-authenticate">>, _) -> 48; +table_find_name(<<"proxy-authorization">>, _) -> 49; +table_find_name(<<"range">>, _) -> 50; +table_find_name(<<"referer">>, _) -> 51; +table_find_name(<<"refresh">>, _) -> 52; +table_find_name(<<"retry-after">>, _) -> 53; +table_find_name(<<"server">>, _) -> 54; +table_find_name(<<"set-cookie">>, _) -> 55; +table_find_name(<<"strict-transport-security">>, _) -> 56; +table_find_name(<<"transfer-encoding">>, _) -> 57; +table_find_name(<<"user-agent">>, _) -> 58; +table_find_name(<<"vary">>, _) -> 59; +table_find_name(<<"via">>, _) -> 60; +table_find_name(<<"www-authenticate">>, _) -> 61; +table_find_name(Name, #state{dyn_table=DynamicTable}) -> + table_find_name_dyn(Name, DynamicTable, 62). + +table_find_name_dyn(_, [], _) -> not_found; +table_find_name_dyn(Name, [{Name, _}|_], Index) -> Index; +table_find_name_dyn(Name, [_|Tail], Index) -> table_find_name_dyn(Name, Tail, Index + 1). + +table_get(1, _) -> {<<":authority">>, <<>>}; +table_get(2, _) -> {<<":method">>, <<"GET">>}; +table_get(3, _) -> {<<":method">>, <<"POST">>}; +table_get(4, _) -> {<<":path">>, <<"/">>}; +table_get(5, _) -> {<<":path">>, <<"/index.html">>}; +table_get(6, _) -> {<<":scheme">>, <<"http">>}; +table_get(7, _) -> {<<":scheme">>, <<"https">>}; +table_get(8, _) -> {<<":status">>, <<"200">>}; +table_get(9, _) -> {<<":status">>, <<"204">>}; +table_get(10, _) -> {<<":status">>, <<"206">>}; +table_get(11, _) -> {<<":status">>, <<"304">>}; +table_get(12, _) -> {<<":status">>, <<"400">>}; +table_get(13, _) -> {<<":status">>, <<"404">>}; +table_get(14, _) -> {<<":status">>, <<"500">>}; +table_get(15, _) -> {<<"accept-charset">>, <<>>}; +table_get(16, _) -> {<<"accept-encoding">>, <<"gzip, deflate">>}; +table_get(17, _) -> {<<"accept-language">>, <<>>}; +table_get(18, _) -> {<<"accept-ranges">>, <<>>}; +table_get(19, _) -> {<<"accept">>, <<>>}; +table_get(20, _) -> {<<"access-control-allow-origin">>, <<>>}; +table_get(21, _) -> {<<"age">>, <<>>}; +table_get(22, _) -> {<<"allow">>, <<>>}; +table_get(23, _) -> {<<"authorization">>, <<>>}; +table_get(24, _) -> {<<"cache-control">>, <<>>}; +table_get(25, _) -> {<<"content-disposition">>, <<>>}; +table_get(26, _) -> {<<"content-encoding">>, <<>>}; +table_get(27, _) -> {<<"content-language">>, <<>>}; +table_get(28, _) -> {<<"content-length">>, <<>>}; +table_get(29, _) -> {<<"content-location">>, <<>>}; +table_get(30, _) -> {<<"content-range">>, <<>>}; +table_get(31, _) -> {<<"content-type">>, <<>>}; +table_get(32, _) -> {<<"cookie">>, <<>>}; +table_get(33, _) -> {<<"date">>, <<>>}; +table_get(34, _) -> {<<"etag">>, <<>>}; +table_get(35, _) -> {<<"expect">>, <<>>}; +table_get(36, _) -> {<<"expires">>, <<>>}; +table_get(37, _) -> {<<"from">>, <<>>}; +table_get(38, _) -> {<<"host">>, <<>>}; +table_get(39, _) -> {<<"if-match">>, <<>>}; +table_get(40, _) -> {<<"if-modified-since">>, <<>>}; +table_get(41, _) -> {<<"if-none-match">>, <<>>}; +table_get(42, _) -> {<<"if-range">>, <<>>}; +table_get(43, _) -> {<<"if-unmodified-since">>, <<>>}; +table_get(44, _) -> {<<"last-modified">>, <<>>}; +table_get(45, _) -> {<<"link">>, <<>>}; +table_get(46, _) -> {<<"location">>, <<>>}; +table_get(47, _) -> {<<"max-forwards">>, <<>>}; +table_get(48, _) -> {<<"proxy-authenticate">>, <<>>}; +table_get(49, _) -> {<<"proxy-authorization">>, <<>>}; +table_get(50, _) -> {<<"range">>, <<>>}; +table_get(51, _) -> {<<"referer">>, <<>>}; +table_get(52, _) -> {<<"refresh">>, <<>>}; +table_get(53, _) -> {<<"retry-after">>, <<>>}; +table_get(54, _) -> {<<"server">>, <<>>}; +table_get(55, _) -> {<<"set-cookie">>, <<>>}; +table_get(56, _) -> {<<"strict-transport-security">>, <<>>}; +table_get(57, _) -> {<<"transfer-encoding">>, <<>>}; +table_get(58, _) -> {<<"user-agent">>, <<>>}; +table_get(59, _) -> {<<"vary">>, <<>>}; +table_get(60, _) -> {<<"via">>, <<>>}; +table_get(61, _) -> {<<"www-authenticate">>, <<>>}; +table_get(Index, #state{dyn_table=DynamicTable}) -> + {_, Header} = lists:nth(Index - 61, DynamicTable), + Header. + +table_get_name(1, _) -> <<":authority">>; +table_get_name(2, _) -> <<":method">>; +table_get_name(3, _) -> <<":method">>; +table_get_name(4, _) -> <<":path">>; +table_get_name(5, _) -> <<":path">>; +table_get_name(6, _) -> <<":scheme">>; +table_get_name(7, _) -> <<":scheme">>; +table_get_name(8, _) -> <<":status">>; +table_get_name(9, _) -> <<":status">>; +table_get_name(10, _) -> <<":status">>; +table_get_name(11, _) -> <<":status">>; +table_get_name(12, _) -> <<":status">>; +table_get_name(13, _) -> <<":status">>; +table_get_name(14, _) -> <<":status">>; +table_get_name(15, _) -> <<"accept-charset">>; +table_get_name(16, _) -> <<"accept-encoding">>; +table_get_name(17, _) -> <<"accept-language">>; +table_get_name(18, _) -> <<"accept-ranges">>; +table_get_name(19, _) -> <<"accept">>; +table_get_name(20, _) -> <<"access-control-allow-origin">>; +table_get_name(21, _) -> <<"age">>; +table_get_name(22, _) -> <<"allow">>; +table_get_name(23, _) -> <<"authorization">>; +table_get_name(24, _) -> <<"cache-control">>; +table_get_name(25, _) -> <<"content-disposition">>; +table_get_name(26, _) -> <<"content-encoding">>; +table_get_name(27, _) -> <<"content-language">>; +table_get_name(28, _) -> <<"content-length">>; +table_get_name(29, _) -> <<"content-location">>; +table_get_name(30, _) -> <<"content-range">>; +table_get_name(31, _) -> <<"content-type">>; +table_get_name(32, _) -> <<"cookie">>; +table_get_name(33, _) -> <<"date">>; +table_get_name(34, _) -> <<"etag">>; +table_get_name(35, _) -> <<"expect">>; +table_get_name(36, _) -> <<"expires">>; +table_get_name(37, _) -> <<"from">>; +table_get_name(38, _) -> <<"host">>; +table_get_name(39, _) -> <<"if-match">>; +table_get_name(40, _) -> <<"if-modified-since">>; +table_get_name(41, _) -> <<"if-none-match">>; +table_get_name(42, _) -> <<"if-range">>; +table_get_name(43, _) -> <<"if-unmodified-since">>; +table_get_name(44, _) -> <<"last-modified">>; +table_get_name(45, _) -> <<"link">>; +table_get_name(46, _) -> <<"location">>; +table_get_name(47, _) -> <<"max-forwards">>; +table_get_name(48, _) -> <<"proxy-authenticate">>; +table_get_name(49, _) -> <<"proxy-authorization">>; +table_get_name(50, _) -> <<"range">>; +table_get_name(51, _) -> <<"referer">>; +table_get_name(52, _) -> <<"refresh">>; +table_get_name(53, _) -> <<"retry-after">>; +table_get_name(54, _) -> <<"server">>; +table_get_name(55, _) -> <<"set-cookie">>; +table_get_name(56, _) -> <<"strict-transport-security">>; +table_get_name(57, _) -> <<"transfer-encoding">>; +table_get_name(58, _) -> <<"user-agent">>; +table_get_name(59, _) -> <<"vary">>; +table_get_name(60, _) -> <<"via">>; +table_get_name(61, _) -> <<"www-authenticate">>; +table_get_name(Index, #state{dyn_table=DynamicTable}) -> + {_, {Name, _}} = lists:nth(Index - 61, DynamicTable), + Name. + +table_insert(Entry = {Name, Value}, State=#state{size=Size, max_size=MaxSize, dyn_table=DynamicTable}) -> + EntrySize = byte_size(Name) + byte_size(Value) + 32, + if + EntrySize + Size =< MaxSize -> + %% Add entry without eviction + State#state{size=Size + EntrySize, dyn_table=[{EntrySize, Entry}|DynamicTable]}; + EntrySize =< MaxSize -> + %% Evict, then add entry + {DynamicTable2, Size2} = table_resize(DynamicTable, MaxSize - EntrySize, 0, []), + State#state{size=Size2 + EntrySize, dyn_table=[{EntrySize, Entry}|DynamicTable2]}; + EntrySize > MaxSize -> + %% "an attempt to add an entry larger than the + %% maximum size causes the table to be emptied + %% of all existing entries and results in an + %% empty table" (RFC 7541, 4.4) + State#state{size=0, dyn_table=[]} + end. + +table_resize([], _, Size, Acc) -> + {lists:reverse(Acc), Size}; +table_resize([{EntrySize, _}|_], MaxSize, Size, Acc) when Size + EntrySize > MaxSize -> + {lists:reverse(Acc), Size}; +table_resize([Entry = {EntrySize, _}|Tail], MaxSize, Size, Acc) -> + table_resize(Tail, MaxSize, Size + EntrySize, [Entry|Acc]). + +table_update_size(0, State) -> + State#state{size=0, max_size=0, dyn_table=[]}; +table_update_size(MaxSize, State=#state{size=CurrentSize}) + when CurrentSize =< MaxSize -> + State#state{max_size=MaxSize}; +table_update_size(MaxSize, State=#state{dyn_table=DynTable}) -> + {DynTable2, Size} = table_resize(DynTable, MaxSize, 0, []), + State#state{size=Size, max_size=MaxSize, dyn_table=DynTable2}. + +-ifdef(TEST). +prop_str_raw() -> + ?FORALL(Str, binary(), begin + {Str, <<>>} =:= dec_str(iolist_to_binary(enc_str(Str, no_huffman))) + end). + +prop_str_huffman() -> + ?FORALL(Str, binary(), begin + {Str, <<>>} =:= dec_str(iolist_to_binary(enc_str(Str, huffman))) + end). +-endif. diff --git a/src/wsLib/cow_hpack_dec_huffman_lookup.hrl b/src/wsLib/cow_hpack_dec_huffman_lookup.hrl new file mode 100644 index 0000000..5ed4d39 --- /dev/null +++ b/src/wsLib/cow_hpack_dec_huffman_lookup.hrl @@ -0,0 +1,4132 @@ +%% Copyright (c) 2019, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% This lookup function was created by converting the +%% table from Nginx[1] into a form better suitable for +%% Erlang/OTP. This particular table takes a byte-sized +%% state and 4 bits to determine whether to emit a +%% character and what the next state is. It is most +%% appropriate for Erlang/OTP because we can benefit +%% from binary pattern matching optimizations by +%% matching the binary one byte at a time, calling +%% this lookup function twice. This and similar +%% algorithms are discussed here[2] and there[3]. +%% +%% It is possible to write a lookup table taking +%% a full byte instead of just 4 bits, but this +%% would make this function take 65536 clauses instead +%% of the current 4096. This could be done later +%% as a further optimization but might not yield +%% significant improvements. +%% +%% [1] https://hg.nginx.org/nginx/file/tip/src/http/v2/ngx_http_v2_huff_decode.c +%% [2] http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.9.4248&rep=rep1&type=pdf +%% [3] https://commandlinefanatic.com/cgi-bin/showarticle.cgi?article=art007 + +dec_huffman_lookup(16#00, 16#0) -> {more, undefined, 16#04}; +dec_huffman_lookup(16#00, 16#1) -> {more, undefined, 16#05}; +dec_huffman_lookup(16#00, 16#2) -> {more, undefined, 16#07}; +dec_huffman_lookup(16#00, 16#3) -> {more, undefined, 16#08}; +dec_huffman_lookup(16#00, 16#4) -> {more, undefined, 16#0b}; +dec_huffman_lookup(16#00, 16#5) -> {more, undefined, 16#0c}; +dec_huffman_lookup(16#00, 16#6) -> {more, undefined, 16#10}; +dec_huffman_lookup(16#00, 16#7) -> {more, undefined, 16#13}; +dec_huffman_lookup(16#00, 16#8) -> {more, undefined, 16#19}; +dec_huffman_lookup(16#00, 16#9) -> {more, undefined, 16#1c}; +dec_huffman_lookup(16#00, 16#a) -> {more, undefined, 16#20}; +dec_huffman_lookup(16#00, 16#b) -> {more, undefined, 16#23}; +dec_huffman_lookup(16#00, 16#c) -> {more, undefined, 16#2a}; +dec_huffman_lookup(16#00, 16#d) -> {more, undefined, 16#31}; +dec_huffman_lookup(16#00, 16#e) -> {more, undefined, 16#39}; +dec_huffman_lookup(16#00, 16#f) -> {ok, undefined, 16#40}; +dec_huffman_lookup(16#01, 16#0) -> {ok, 16#30, 16#00}; +dec_huffman_lookup(16#01, 16#1) -> {ok, 16#31, 16#00}; +dec_huffman_lookup(16#01, 16#2) -> {ok, 16#32, 16#00}; +dec_huffman_lookup(16#01, 16#3) -> {ok, 16#61, 16#00}; +dec_huffman_lookup(16#01, 16#4) -> {ok, 16#63, 16#00}; +dec_huffman_lookup(16#01, 16#5) -> {ok, 16#65, 16#00}; +dec_huffman_lookup(16#01, 16#6) -> {ok, 16#69, 16#00}; +dec_huffman_lookup(16#01, 16#7) -> {ok, 16#6f, 16#00}; +dec_huffman_lookup(16#01, 16#8) -> {ok, 16#73, 16#00}; +dec_huffman_lookup(16#01, 16#9) -> {ok, 16#74, 16#00}; +dec_huffman_lookup(16#01, 16#a) -> {more, undefined, 16#0d}; +dec_huffman_lookup(16#01, 16#b) -> {more, undefined, 16#0e}; +dec_huffman_lookup(16#01, 16#c) -> {more, undefined, 16#11}; +dec_huffman_lookup(16#01, 16#d) -> {more, undefined, 16#12}; +dec_huffman_lookup(16#01, 16#e) -> {more, undefined, 16#14}; +dec_huffman_lookup(16#01, 16#f) -> {more, undefined, 16#15}; +dec_huffman_lookup(16#02, 16#0) -> {more, 16#30, 16#01}; +dec_huffman_lookup(16#02, 16#1) -> {ok, 16#30, 16#16}; +dec_huffman_lookup(16#02, 16#2) -> {more, 16#31, 16#01}; +dec_huffman_lookup(16#02, 16#3) -> {ok, 16#31, 16#16}; +dec_huffman_lookup(16#02, 16#4) -> {more, 16#32, 16#01}; +dec_huffman_lookup(16#02, 16#5) -> {ok, 16#32, 16#16}; +dec_huffman_lookup(16#02, 16#6) -> {more, 16#61, 16#01}; +dec_huffman_lookup(16#02, 16#7) -> {ok, 16#61, 16#16}; +dec_huffman_lookup(16#02, 16#8) -> {more, 16#63, 16#01}; +dec_huffman_lookup(16#02, 16#9) -> {ok, 16#63, 16#16}; +dec_huffman_lookup(16#02, 16#a) -> {more, 16#65, 16#01}; +dec_huffman_lookup(16#02, 16#b) -> {ok, 16#65, 16#16}; +dec_huffman_lookup(16#02, 16#c) -> {more, 16#69, 16#01}; +dec_huffman_lookup(16#02, 16#d) -> {ok, 16#69, 16#16}; +dec_huffman_lookup(16#02, 16#e) -> {more, 16#6f, 16#01}; +dec_huffman_lookup(16#02, 16#f) -> {ok, 16#6f, 16#16}; +dec_huffman_lookup(16#03, 16#0) -> {more, 16#30, 16#02}; +dec_huffman_lookup(16#03, 16#1) -> {more, 16#30, 16#09}; +dec_huffman_lookup(16#03, 16#2) -> {more, 16#30, 16#17}; +dec_huffman_lookup(16#03, 16#3) -> {ok, 16#30, 16#28}; +dec_huffman_lookup(16#03, 16#4) -> {more, 16#31, 16#02}; +dec_huffman_lookup(16#03, 16#5) -> {more, 16#31, 16#09}; +dec_huffman_lookup(16#03, 16#6) -> {more, 16#31, 16#17}; +dec_huffman_lookup(16#03, 16#7) -> {ok, 16#31, 16#28}; +dec_huffman_lookup(16#03, 16#8) -> {more, 16#32, 16#02}; +dec_huffman_lookup(16#03, 16#9) -> {more, 16#32, 16#09}; +dec_huffman_lookup(16#03, 16#a) -> {more, 16#32, 16#17}; +dec_huffman_lookup(16#03, 16#b) -> {ok, 16#32, 16#28}; +dec_huffman_lookup(16#03, 16#c) -> {more, 16#61, 16#02}; +dec_huffman_lookup(16#03, 16#d) -> {more, 16#61, 16#09}; +dec_huffman_lookup(16#03, 16#e) -> {more, 16#61, 16#17}; +dec_huffman_lookup(16#03, 16#f) -> {ok, 16#61, 16#28}; +dec_huffman_lookup(16#04, 16#0) -> {more, 16#30, 16#03}; +dec_huffman_lookup(16#04, 16#1) -> {more, 16#30, 16#06}; +dec_huffman_lookup(16#04, 16#2) -> {more, 16#30, 16#0a}; +dec_huffman_lookup(16#04, 16#3) -> {more, 16#30, 16#0f}; +dec_huffman_lookup(16#04, 16#4) -> {more, 16#30, 16#18}; +dec_huffman_lookup(16#04, 16#5) -> {more, 16#30, 16#1f}; +dec_huffman_lookup(16#04, 16#6) -> {more, 16#30, 16#29}; +dec_huffman_lookup(16#04, 16#7) -> {ok, 16#30, 16#38}; +dec_huffman_lookup(16#04, 16#8) -> {more, 16#31, 16#03}; +dec_huffman_lookup(16#04, 16#9) -> {more, 16#31, 16#06}; +dec_huffman_lookup(16#04, 16#a) -> {more, 16#31, 16#0a}; +dec_huffman_lookup(16#04, 16#b) -> {more, 16#31, 16#0f}; +dec_huffman_lookup(16#04, 16#c) -> {more, 16#31, 16#18}; +dec_huffman_lookup(16#04, 16#d) -> {more, 16#31, 16#1f}; +dec_huffman_lookup(16#04, 16#e) -> {more, 16#31, 16#29}; +dec_huffman_lookup(16#04, 16#f) -> {ok, 16#31, 16#38}; +dec_huffman_lookup(16#05, 16#0) -> {more, 16#32, 16#03}; +dec_huffman_lookup(16#05, 16#1) -> {more, 16#32, 16#06}; +dec_huffman_lookup(16#05, 16#2) -> {more, 16#32, 16#0a}; +dec_huffman_lookup(16#05, 16#3) -> {more, 16#32, 16#0f}; +dec_huffman_lookup(16#05, 16#4) -> {more, 16#32, 16#18}; +dec_huffman_lookup(16#05, 16#5) -> {more, 16#32, 16#1f}; +dec_huffman_lookup(16#05, 16#6) -> {more, 16#32, 16#29}; +dec_huffman_lookup(16#05, 16#7) -> {ok, 16#32, 16#38}; +dec_huffman_lookup(16#05, 16#8) -> {more, 16#61, 16#03}; +dec_huffman_lookup(16#05, 16#9) -> {more, 16#61, 16#06}; +dec_huffman_lookup(16#05, 16#a) -> {more, 16#61, 16#0a}; +dec_huffman_lookup(16#05, 16#b) -> {more, 16#61, 16#0f}; +dec_huffman_lookup(16#05, 16#c) -> {more, 16#61, 16#18}; +dec_huffman_lookup(16#05, 16#d) -> {more, 16#61, 16#1f}; +dec_huffman_lookup(16#05, 16#e) -> {more, 16#61, 16#29}; +dec_huffman_lookup(16#05, 16#f) -> {ok, 16#61, 16#38}; +dec_huffman_lookup(16#06, 16#0) -> {more, 16#63, 16#02}; +dec_huffman_lookup(16#06, 16#1) -> {more, 16#63, 16#09}; +dec_huffman_lookup(16#06, 16#2) -> {more, 16#63, 16#17}; +dec_huffman_lookup(16#06, 16#3) -> {ok, 16#63, 16#28}; +dec_huffman_lookup(16#06, 16#4) -> {more, 16#65, 16#02}; +dec_huffman_lookup(16#06, 16#5) -> {more, 16#65, 16#09}; +dec_huffman_lookup(16#06, 16#6) -> {more, 16#65, 16#17}; +dec_huffman_lookup(16#06, 16#7) -> {ok, 16#65, 16#28}; +dec_huffman_lookup(16#06, 16#8) -> {more, 16#69, 16#02}; +dec_huffman_lookup(16#06, 16#9) -> {more, 16#69, 16#09}; +dec_huffman_lookup(16#06, 16#a) -> {more, 16#69, 16#17}; +dec_huffman_lookup(16#06, 16#b) -> {ok, 16#69, 16#28}; +dec_huffman_lookup(16#06, 16#c) -> {more, 16#6f, 16#02}; +dec_huffman_lookup(16#06, 16#d) -> {more, 16#6f, 16#09}; +dec_huffman_lookup(16#06, 16#e) -> {more, 16#6f, 16#17}; +dec_huffman_lookup(16#06, 16#f) -> {ok, 16#6f, 16#28}; +dec_huffman_lookup(16#07, 16#0) -> {more, 16#63, 16#03}; +dec_huffman_lookup(16#07, 16#1) -> {more, 16#63, 16#06}; +dec_huffman_lookup(16#07, 16#2) -> {more, 16#63, 16#0a}; +dec_huffman_lookup(16#07, 16#3) -> {more, 16#63, 16#0f}; +dec_huffman_lookup(16#07, 16#4) -> {more, 16#63, 16#18}; +dec_huffman_lookup(16#07, 16#5) -> {more, 16#63, 16#1f}; +dec_huffman_lookup(16#07, 16#6) -> {more, 16#63, 16#29}; +dec_huffman_lookup(16#07, 16#7) -> {ok, 16#63, 16#38}; +dec_huffman_lookup(16#07, 16#8) -> {more, 16#65, 16#03}; +dec_huffman_lookup(16#07, 16#9) -> {more, 16#65, 16#06}; +dec_huffman_lookup(16#07, 16#a) -> {more, 16#65, 16#0a}; +dec_huffman_lookup(16#07, 16#b) -> {more, 16#65, 16#0f}; +dec_huffman_lookup(16#07, 16#c) -> {more, 16#65, 16#18}; +dec_huffman_lookup(16#07, 16#d) -> {more, 16#65, 16#1f}; +dec_huffman_lookup(16#07, 16#e) -> {more, 16#65, 16#29}; +dec_huffman_lookup(16#07, 16#f) -> {ok, 16#65, 16#38}; +dec_huffman_lookup(16#08, 16#0) -> {more, 16#69, 16#03}; +dec_huffman_lookup(16#08, 16#1) -> {more, 16#69, 16#06}; +dec_huffman_lookup(16#08, 16#2) -> {more, 16#69, 16#0a}; +dec_huffman_lookup(16#08, 16#3) -> {more, 16#69, 16#0f}; +dec_huffman_lookup(16#08, 16#4) -> {more, 16#69, 16#18}; +dec_huffman_lookup(16#08, 16#5) -> {more, 16#69, 16#1f}; +dec_huffman_lookup(16#08, 16#6) -> {more, 16#69, 16#29}; +dec_huffman_lookup(16#08, 16#7) -> {ok, 16#69, 16#38}; +dec_huffman_lookup(16#08, 16#8) -> {more, 16#6f, 16#03}; +dec_huffman_lookup(16#08, 16#9) -> {more, 16#6f, 16#06}; +dec_huffman_lookup(16#08, 16#a) -> {more, 16#6f, 16#0a}; +dec_huffman_lookup(16#08, 16#b) -> {more, 16#6f, 16#0f}; +dec_huffman_lookup(16#08, 16#c) -> {more, 16#6f, 16#18}; +dec_huffman_lookup(16#08, 16#d) -> {more, 16#6f, 16#1f}; +dec_huffman_lookup(16#08, 16#e) -> {more, 16#6f, 16#29}; +dec_huffman_lookup(16#08, 16#f) -> {ok, 16#6f, 16#38}; +dec_huffman_lookup(16#09, 16#0) -> {more, 16#73, 16#01}; +dec_huffman_lookup(16#09, 16#1) -> {ok, 16#73, 16#16}; +dec_huffman_lookup(16#09, 16#2) -> {more, 16#74, 16#01}; +dec_huffman_lookup(16#09, 16#3) -> {ok, 16#74, 16#16}; +dec_huffman_lookup(16#09, 16#4) -> {ok, 16#20, 16#00}; +dec_huffman_lookup(16#09, 16#5) -> {ok, 16#25, 16#00}; +dec_huffman_lookup(16#09, 16#6) -> {ok, 16#2d, 16#00}; +dec_huffman_lookup(16#09, 16#7) -> {ok, 16#2e, 16#00}; +dec_huffman_lookup(16#09, 16#8) -> {ok, 16#2f, 16#00}; +dec_huffman_lookup(16#09, 16#9) -> {ok, 16#33, 16#00}; +dec_huffman_lookup(16#09, 16#a) -> {ok, 16#34, 16#00}; +dec_huffman_lookup(16#09, 16#b) -> {ok, 16#35, 16#00}; +dec_huffman_lookup(16#09, 16#c) -> {ok, 16#36, 16#00}; +dec_huffman_lookup(16#09, 16#d) -> {ok, 16#37, 16#00}; +dec_huffman_lookup(16#09, 16#e) -> {ok, 16#38, 16#00}; +dec_huffman_lookup(16#09, 16#f) -> {ok, 16#39, 16#00}; +dec_huffman_lookup(16#0a, 16#0) -> {more, 16#73, 16#02}; +dec_huffman_lookup(16#0a, 16#1) -> {more, 16#73, 16#09}; +dec_huffman_lookup(16#0a, 16#2) -> {more, 16#73, 16#17}; +dec_huffman_lookup(16#0a, 16#3) -> {ok, 16#73, 16#28}; +dec_huffman_lookup(16#0a, 16#4) -> {more, 16#74, 16#02}; +dec_huffman_lookup(16#0a, 16#5) -> {more, 16#74, 16#09}; +dec_huffman_lookup(16#0a, 16#6) -> {more, 16#74, 16#17}; +dec_huffman_lookup(16#0a, 16#7) -> {ok, 16#74, 16#28}; +dec_huffman_lookup(16#0a, 16#8) -> {more, 16#20, 16#01}; +dec_huffman_lookup(16#0a, 16#9) -> {ok, 16#20, 16#16}; +dec_huffman_lookup(16#0a, 16#a) -> {more, 16#25, 16#01}; +dec_huffman_lookup(16#0a, 16#b) -> {ok, 16#25, 16#16}; +dec_huffman_lookup(16#0a, 16#c) -> {more, 16#2d, 16#01}; +dec_huffman_lookup(16#0a, 16#d) -> {ok, 16#2d, 16#16}; +dec_huffman_lookup(16#0a, 16#e) -> {more, 16#2e, 16#01}; +dec_huffman_lookup(16#0a, 16#f) -> {ok, 16#2e, 16#16}; +dec_huffman_lookup(16#0b, 16#0) -> {more, 16#73, 16#03}; +dec_huffman_lookup(16#0b, 16#1) -> {more, 16#73, 16#06}; +dec_huffman_lookup(16#0b, 16#2) -> {more, 16#73, 16#0a}; +dec_huffman_lookup(16#0b, 16#3) -> {more, 16#73, 16#0f}; +dec_huffman_lookup(16#0b, 16#4) -> {more, 16#73, 16#18}; +dec_huffman_lookup(16#0b, 16#5) -> {more, 16#73, 16#1f}; +dec_huffman_lookup(16#0b, 16#6) -> {more, 16#73, 16#29}; +dec_huffman_lookup(16#0b, 16#7) -> {ok, 16#73, 16#38}; +dec_huffman_lookup(16#0b, 16#8) -> {more, 16#74, 16#03}; +dec_huffman_lookup(16#0b, 16#9) -> {more, 16#74, 16#06}; +dec_huffman_lookup(16#0b, 16#a) -> {more, 16#74, 16#0a}; +dec_huffman_lookup(16#0b, 16#b) -> {more, 16#74, 16#0f}; +dec_huffman_lookup(16#0b, 16#c) -> {more, 16#74, 16#18}; +dec_huffman_lookup(16#0b, 16#d) -> {more, 16#74, 16#1f}; +dec_huffman_lookup(16#0b, 16#e) -> {more, 16#74, 16#29}; +dec_huffman_lookup(16#0b, 16#f) -> {ok, 16#74, 16#38}; +dec_huffman_lookup(16#0c, 16#0) -> {more, 16#20, 16#02}; +dec_huffman_lookup(16#0c, 16#1) -> {more, 16#20, 16#09}; +dec_huffman_lookup(16#0c, 16#2) -> {more, 16#20, 16#17}; +dec_huffman_lookup(16#0c, 16#3) -> {ok, 16#20, 16#28}; +dec_huffman_lookup(16#0c, 16#4) -> {more, 16#25, 16#02}; +dec_huffman_lookup(16#0c, 16#5) -> {more, 16#25, 16#09}; +dec_huffman_lookup(16#0c, 16#6) -> {more, 16#25, 16#17}; +dec_huffman_lookup(16#0c, 16#7) -> {ok, 16#25, 16#28}; +dec_huffman_lookup(16#0c, 16#8) -> {more, 16#2d, 16#02}; +dec_huffman_lookup(16#0c, 16#9) -> {more, 16#2d, 16#09}; +dec_huffman_lookup(16#0c, 16#a) -> {more, 16#2d, 16#17}; +dec_huffman_lookup(16#0c, 16#b) -> {ok, 16#2d, 16#28}; +dec_huffman_lookup(16#0c, 16#c) -> {more, 16#2e, 16#02}; +dec_huffman_lookup(16#0c, 16#d) -> {more, 16#2e, 16#09}; +dec_huffman_lookup(16#0c, 16#e) -> {more, 16#2e, 16#17}; +dec_huffman_lookup(16#0c, 16#f) -> {ok, 16#2e, 16#28}; +dec_huffman_lookup(16#0d, 16#0) -> {more, 16#20, 16#03}; +dec_huffman_lookup(16#0d, 16#1) -> {more, 16#20, 16#06}; +dec_huffman_lookup(16#0d, 16#2) -> {more, 16#20, 16#0a}; +dec_huffman_lookup(16#0d, 16#3) -> {more, 16#20, 16#0f}; +dec_huffman_lookup(16#0d, 16#4) -> {more, 16#20, 16#18}; +dec_huffman_lookup(16#0d, 16#5) -> {more, 16#20, 16#1f}; +dec_huffman_lookup(16#0d, 16#6) -> {more, 16#20, 16#29}; +dec_huffman_lookup(16#0d, 16#7) -> {ok, 16#20, 16#38}; +dec_huffman_lookup(16#0d, 16#8) -> {more, 16#25, 16#03}; +dec_huffman_lookup(16#0d, 16#9) -> {more, 16#25, 16#06}; +dec_huffman_lookup(16#0d, 16#a) -> {more, 16#25, 16#0a}; +dec_huffman_lookup(16#0d, 16#b) -> {more, 16#25, 16#0f}; +dec_huffman_lookup(16#0d, 16#c) -> {more, 16#25, 16#18}; +dec_huffman_lookup(16#0d, 16#d) -> {more, 16#25, 16#1f}; +dec_huffman_lookup(16#0d, 16#e) -> {more, 16#25, 16#29}; +dec_huffman_lookup(16#0d, 16#f) -> {ok, 16#25, 16#38}; +dec_huffman_lookup(16#0e, 16#0) -> {more, 16#2d, 16#03}; +dec_huffman_lookup(16#0e, 16#1) -> {more, 16#2d, 16#06}; +dec_huffman_lookup(16#0e, 16#2) -> {more, 16#2d, 16#0a}; +dec_huffman_lookup(16#0e, 16#3) -> {more, 16#2d, 16#0f}; +dec_huffman_lookup(16#0e, 16#4) -> {more, 16#2d, 16#18}; +dec_huffman_lookup(16#0e, 16#5) -> {more, 16#2d, 16#1f}; +dec_huffman_lookup(16#0e, 16#6) -> {more, 16#2d, 16#29}; +dec_huffman_lookup(16#0e, 16#7) -> {ok, 16#2d, 16#38}; +dec_huffman_lookup(16#0e, 16#8) -> {more, 16#2e, 16#03}; +dec_huffman_lookup(16#0e, 16#9) -> {more, 16#2e, 16#06}; +dec_huffman_lookup(16#0e, 16#a) -> {more, 16#2e, 16#0a}; +dec_huffman_lookup(16#0e, 16#b) -> {more, 16#2e, 16#0f}; +dec_huffman_lookup(16#0e, 16#c) -> {more, 16#2e, 16#18}; +dec_huffman_lookup(16#0e, 16#d) -> {more, 16#2e, 16#1f}; +dec_huffman_lookup(16#0e, 16#e) -> {more, 16#2e, 16#29}; +dec_huffman_lookup(16#0e, 16#f) -> {ok, 16#2e, 16#38}; +dec_huffman_lookup(16#0f, 16#0) -> {more, 16#2f, 16#01}; +dec_huffman_lookup(16#0f, 16#1) -> {ok, 16#2f, 16#16}; +dec_huffman_lookup(16#0f, 16#2) -> {more, 16#33, 16#01}; +dec_huffman_lookup(16#0f, 16#3) -> {ok, 16#33, 16#16}; +dec_huffman_lookup(16#0f, 16#4) -> {more, 16#34, 16#01}; +dec_huffman_lookup(16#0f, 16#5) -> {ok, 16#34, 16#16}; +dec_huffman_lookup(16#0f, 16#6) -> {more, 16#35, 16#01}; +dec_huffman_lookup(16#0f, 16#7) -> {ok, 16#35, 16#16}; +dec_huffman_lookup(16#0f, 16#8) -> {more, 16#36, 16#01}; +dec_huffman_lookup(16#0f, 16#9) -> {ok, 16#36, 16#16}; +dec_huffman_lookup(16#0f, 16#a) -> {more, 16#37, 16#01}; +dec_huffman_lookup(16#0f, 16#b) -> {ok, 16#37, 16#16}; +dec_huffman_lookup(16#0f, 16#c) -> {more, 16#38, 16#01}; +dec_huffman_lookup(16#0f, 16#d) -> {ok, 16#38, 16#16}; +dec_huffman_lookup(16#0f, 16#e) -> {more, 16#39, 16#01}; +dec_huffman_lookup(16#0f, 16#f) -> {ok, 16#39, 16#16}; +dec_huffman_lookup(16#10, 16#0) -> {more, 16#2f, 16#02}; +dec_huffman_lookup(16#10, 16#1) -> {more, 16#2f, 16#09}; +dec_huffman_lookup(16#10, 16#2) -> {more, 16#2f, 16#17}; +dec_huffman_lookup(16#10, 16#3) -> {ok, 16#2f, 16#28}; +dec_huffman_lookup(16#10, 16#4) -> {more, 16#33, 16#02}; +dec_huffman_lookup(16#10, 16#5) -> {more, 16#33, 16#09}; +dec_huffman_lookup(16#10, 16#6) -> {more, 16#33, 16#17}; +dec_huffman_lookup(16#10, 16#7) -> {ok, 16#33, 16#28}; +dec_huffman_lookup(16#10, 16#8) -> {more, 16#34, 16#02}; +dec_huffman_lookup(16#10, 16#9) -> {more, 16#34, 16#09}; +dec_huffman_lookup(16#10, 16#a) -> {more, 16#34, 16#17}; +dec_huffman_lookup(16#10, 16#b) -> {ok, 16#34, 16#28}; +dec_huffman_lookup(16#10, 16#c) -> {more, 16#35, 16#02}; +dec_huffman_lookup(16#10, 16#d) -> {more, 16#35, 16#09}; +dec_huffman_lookup(16#10, 16#e) -> {more, 16#35, 16#17}; +dec_huffman_lookup(16#10, 16#f) -> {ok, 16#35, 16#28}; +dec_huffman_lookup(16#11, 16#0) -> {more, 16#2f, 16#03}; +dec_huffman_lookup(16#11, 16#1) -> {more, 16#2f, 16#06}; +dec_huffman_lookup(16#11, 16#2) -> {more, 16#2f, 16#0a}; +dec_huffman_lookup(16#11, 16#3) -> {more, 16#2f, 16#0f}; +dec_huffman_lookup(16#11, 16#4) -> {more, 16#2f, 16#18}; +dec_huffman_lookup(16#11, 16#5) -> {more, 16#2f, 16#1f}; +dec_huffman_lookup(16#11, 16#6) -> {more, 16#2f, 16#29}; +dec_huffman_lookup(16#11, 16#7) -> {ok, 16#2f, 16#38}; +dec_huffman_lookup(16#11, 16#8) -> {more, 16#33, 16#03}; +dec_huffman_lookup(16#11, 16#9) -> {more, 16#33, 16#06}; +dec_huffman_lookup(16#11, 16#a) -> {more, 16#33, 16#0a}; +dec_huffman_lookup(16#11, 16#b) -> {more, 16#33, 16#0f}; +dec_huffman_lookup(16#11, 16#c) -> {more, 16#33, 16#18}; +dec_huffman_lookup(16#11, 16#d) -> {more, 16#33, 16#1f}; +dec_huffman_lookup(16#11, 16#e) -> {more, 16#33, 16#29}; +dec_huffman_lookup(16#11, 16#f) -> {ok, 16#33, 16#38}; +dec_huffman_lookup(16#12, 16#0) -> {more, 16#34, 16#03}; +dec_huffman_lookup(16#12, 16#1) -> {more, 16#34, 16#06}; +dec_huffman_lookup(16#12, 16#2) -> {more, 16#34, 16#0a}; +dec_huffman_lookup(16#12, 16#3) -> {more, 16#34, 16#0f}; +dec_huffman_lookup(16#12, 16#4) -> {more, 16#34, 16#18}; +dec_huffman_lookup(16#12, 16#5) -> {more, 16#34, 16#1f}; +dec_huffman_lookup(16#12, 16#6) -> {more, 16#34, 16#29}; +dec_huffman_lookup(16#12, 16#7) -> {ok, 16#34, 16#38}; +dec_huffman_lookup(16#12, 16#8) -> {more, 16#35, 16#03}; +dec_huffman_lookup(16#12, 16#9) -> {more, 16#35, 16#06}; +dec_huffman_lookup(16#12, 16#a) -> {more, 16#35, 16#0a}; +dec_huffman_lookup(16#12, 16#b) -> {more, 16#35, 16#0f}; +dec_huffman_lookup(16#12, 16#c) -> {more, 16#35, 16#18}; +dec_huffman_lookup(16#12, 16#d) -> {more, 16#35, 16#1f}; +dec_huffman_lookup(16#12, 16#e) -> {more, 16#35, 16#29}; +dec_huffman_lookup(16#12, 16#f) -> {ok, 16#35, 16#38}; +dec_huffman_lookup(16#13, 16#0) -> {more, 16#36, 16#02}; +dec_huffman_lookup(16#13, 16#1) -> {more, 16#36, 16#09}; +dec_huffman_lookup(16#13, 16#2) -> {more, 16#36, 16#17}; +dec_huffman_lookup(16#13, 16#3) -> {ok, 16#36, 16#28}; +dec_huffman_lookup(16#13, 16#4) -> {more, 16#37, 16#02}; +dec_huffman_lookup(16#13, 16#5) -> {more, 16#37, 16#09}; +dec_huffman_lookup(16#13, 16#6) -> {more, 16#37, 16#17}; +dec_huffman_lookup(16#13, 16#7) -> {ok, 16#37, 16#28}; +dec_huffman_lookup(16#13, 16#8) -> {more, 16#38, 16#02}; +dec_huffman_lookup(16#13, 16#9) -> {more, 16#38, 16#09}; +dec_huffman_lookup(16#13, 16#a) -> {more, 16#38, 16#17}; +dec_huffman_lookup(16#13, 16#b) -> {ok, 16#38, 16#28}; +dec_huffman_lookup(16#13, 16#c) -> {more, 16#39, 16#02}; +dec_huffman_lookup(16#13, 16#d) -> {more, 16#39, 16#09}; +dec_huffman_lookup(16#13, 16#e) -> {more, 16#39, 16#17}; +dec_huffman_lookup(16#13, 16#f) -> {ok, 16#39, 16#28}; +dec_huffman_lookup(16#14, 16#0) -> {more, 16#36, 16#03}; +dec_huffman_lookup(16#14, 16#1) -> {more, 16#36, 16#06}; +dec_huffman_lookup(16#14, 16#2) -> {more, 16#36, 16#0a}; +dec_huffman_lookup(16#14, 16#3) -> {more, 16#36, 16#0f}; +dec_huffman_lookup(16#14, 16#4) -> {more, 16#36, 16#18}; +dec_huffman_lookup(16#14, 16#5) -> {more, 16#36, 16#1f}; +dec_huffman_lookup(16#14, 16#6) -> {more, 16#36, 16#29}; +dec_huffman_lookup(16#14, 16#7) -> {ok, 16#36, 16#38}; +dec_huffman_lookup(16#14, 16#8) -> {more, 16#37, 16#03}; +dec_huffman_lookup(16#14, 16#9) -> {more, 16#37, 16#06}; +dec_huffman_lookup(16#14, 16#a) -> {more, 16#37, 16#0a}; +dec_huffman_lookup(16#14, 16#b) -> {more, 16#37, 16#0f}; +dec_huffman_lookup(16#14, 16#c) -> {more, 16#37, 16#18}; +dec_huffman_lookup(16#14, 16#d) -> {more, 16#37, 16#1f}; +dec_huffman_lookup(16#14, 16#e) -> {more, 16#37, 16#29}; +dec_huffman_lookup(16#14, 16#f) -> {ok, 16#37, 16#38}; +dec_huffman_lookup(16#15, 16#0) -> {more, 16#38, 16#03}; +dec_huffman_lookup(16#15, 16#1) -> {more, 16#38, 16#06}; +dec_huffman_lookup(16#15, 16#2) -> {more, 16#38, 16#0a}; +dec_huffman_lookup(16#15, 16#3) -> {more, 16#38, 16#0f}; +dec_huffman_lookup(16#15, 16#4) -> {more, 16#38, 16#18}; +dec_huffman_lookup(16#15, 16#5) -> {more, 16#38, 16#1f}; +dec_huffman_lookup(16#15, 16#6) -> {more, 16#38, 16#29}; +dec_huffman_lookup(16#15, 16#7) -> {ok, 16#38, 16#38}; +dec_huffman_lookup(16#15, 16#8) -> {more, 16#39, 16#03}; +dec_huffman_lookup(16#15, 16#9) -> {more, 16#39, 16#06}; +dec_huffman_lookup(16#15, 16#a) -> {more, 16#39, 16#0a}; +dec_huffman_lookup(16#15, 16#b) -> {more, 16#39, 16#0f}; +dec_huffman_lookup(16#15, 16#c) -> {more, 16#39, 16#18}; +dec_huffman_lookup(16#15, 16#d) -> {more, 16#39, 16#1f}; +dec_huffman_lookup(16#15, 16#e) -> {more, 16#39, 16#29}; +dec_huffman_lookup(16#15, 16#f) -> {ok, 16#39, 16#38}; +dec_huffman_lookup(16#16, 16#0) -> {more, undefined, 16#1a}; +dec_huffman_lookup(16#16, 16#1) -> {more, undefined, 16#1b}; +dec_huffman_lookup(16#16, 16#2) -> {more, undefined, 16#1d}; +dec_huffman_lookup(16#16, 16#3) -> {more, undefined, 16#1e}; +dec_huffman_lookup(16#16, 16#4) -> {more, undefined, 16#21}; +dec_huffman_lookup(16#16, 16#5) -> {more, undefined, 16#22}; +dec_huffman_lookup(16#16, 16#6) -> {more, undefined, 16#24}; +dec_huffman_lookup(16#16, 16#7) -> {more, undefined, 16#25}; +dec_huffman_lookup(16#16, 16#8) -> {more, undefined, 16#2b}; +dec_huffman_lookup(16#16, 16#9) -> {more, undefined, 16#2e}; +dec_huffman_lookup(16#16, 16#a) -> {more, undefined, 16#32}; +dec_huffman_lookup(16#16, 16#b) -> {more, undefined, 16#35}; +dec_huffman_lookup(16#16, 16#c) -> {more, undefined, 16#3a}; +dec_huffman_lookup(16#16, 16#d) -> {more, undefined, 16#3d}; +dec_huffman_lookup(16#16, 16#e) -> {more, undefined, 16#41}; +dec_huffman_lookup(16#16, 16#f) -> {ok, undefined, 16#44}; +dec_huffman_lookup(16#17, 16#0) -> {ok, 16#3d, 16#00}; +dec_huffman_lookup(16#17, 16#1) -> {ok, 16#41, 16#00}; +dec_huffman_lookup(16#17, 16#2) -> {ok, 16#5f, 16#00}; +dec_huffman_lookup(16#17, 16#3) -> {ok, 16#62, 16#00}; +dec_huffman_lookup(16#17, 16#4) -> {ok, 16#64, 16#00}; +dec_huffman_lookup(16#17, 16#5) -> {ok, 16#66, 16#00}; +dec_huffman_lookup(16#17, 16#6) -> {ok, 16#67, 16#00}; +dec_huffman_lookup(16#17, 16#7) -> {ok, 16#68, 16#00}; +dec_huffman_lookup(16#17, 16#8) -> {ok, 16#6c, 16#00}; +dec_huffman_lookup(16#17, 16#9) -> {ok, 16#6d, 16#00}; +dec_huffman_lookup(16#17, 16#a) -> {ok, 16#6e, 16#00}; +dec_huffman_lookup(16#17, 16#b) -> {ok, 16#70, 16#00}; +dec_huffman_lookup(16#17, 16#c) -> {ok, 16#72, 16#00}; +dec_huffman_lookup(16#17, 16#d) -> {ok, 16#75, 16#00}; +dec_huffman_lookup(16#17, 16#e) -> {more, undefined, 16#26}; +dec_huffman_lookup(16#17, 16#f) -> {more, undefined, 16#27}; +dec_huffman_lookup(16#18, 16#0) -> {more, 16#3d, 16#01}; +dec_huffman_lookup(16#18, 16#1) -> {ok, 16#3d, 16#16}; +dec_huffman_lookup(16#18, 16#2) -> {more, 16#41, 16#01}; +dec_huffman_lookup(16#18, 16#3) -> {ok, 16#41, 16#16}; +dec_huffman_lookup(16#18, 16#4) -> {more, 16#5f, 16#01}; +dec_huffman_lookup(16#18, 16#5) -> {ok, 16#5f, 16#16}; +dec_huffman_lookup(16#18, 16#6) -> {more, 16#62, 16#01}; +dec_huffman_lookup(16#18, 16#7) -> {ok, 16#62, 16#16}; +dec_huffman_lookup(16#18, 16#8) -> {more, 16#64, 16#01}; +dec_huffman_lookup(16#18, 16#9) -> {ok, 16#64, 16#16}; +dec_huffman_lookup(16#18, 16#a) -> {more, 16#66, 16#01}; +dec_huffman_lookup(16#18, 16#b) -> {ok, 16#66, 16#16}; +dec_huffman_lookup(16#18, 16#c) -> {more, 16#67, 16#01}; +dec_huffman_lookup(16#18, 16#d) -> {ok, 16#67, 16#16}; +dec_huffman_lookup(16#18, 16#e) -> {more, 16#68, 16#01}; +dec_huffman_lookup(16#18, 16#f) -> {ok, 16#68, 16#16}; +dec_huffman_lookup(16#19, 16#0) -> {more, 16#3d, 16#02}; +dec_huffman_lookup(16#19, 16#1) -> {more, 16#3d, 16#09}; +dec_huffman_lookup(16#19, 16#2) -> {more, 16#3d, 16#17}; +dec_huffman_lookup(16#19, 16#3) -> {ok, 16#3d, 16#28}; +dec_huffman_lookup(16#19, 16#4) -> {more, 16#41, 16#02}; +dec_huffman_lookup(16#19, 16#5) -> {more, 16#41, 16#09}; +dec_huffman_lookup(16#19, 16#6) -> {more, 16#41, 16#17}; +dec_huffman_lookup(16#19, 16#7) -> {ok, 16#41, 16#28}; +dec_huffman_lookup(16#19, 16#8) -> {more, 16#5f, 16#02}; +dec_huffman_lookup(16#19, 16#9) -> {more, 16#5f, 16#09}; +dec_huffman_lookup(16#19, 16#a) -> {more, 16#5f, 16#17}; +dec_huffman_lookup(16#19, 16#b) -> {ok, 16#5f, 16#28}; +dec_huffman_lookup(16#19, 16#c) -> {more, 16#62, 16#02}; +dec_huffman_lookup(16#19, 16#d) -> {more, 16#62, 16#09}; +dec_huffman_lookup(16#19, 16#e) -> {more, 16#62, 16#17}; +dec_huffman_lookup(16#19, 16#f) -> {ok, 16#62, 16#28}; +dec_huffman_lookup(16#1a, 16#0) -> {more, 16#3d, 16#03}; +dec_huffman_lookup(16#1a, 16#1) -> {more, 16#3d, 16#06}; +dec_huffman_lookup(16#1a, 16#2) -> {more, 16#3d, 16#0a}; +dec_huffman_lookup(16#1a, 16#3) -> {more, 16#3d, 16#0f}; +dec_huffman_lookup(16#1a, 16#4) -> {more, 16#3d, 16#18}; +dec_huffman_lookup(16#1a, 16#5) -> {more, 16#3d, 16#1f}; +dec_huffman_lookup(16#1a, 16#6) -> {more, 16#3d, 16#29}; +dec_huffman_lookup(16#1a, 16#7) -> {ok, 16#3d, 16#38}; +dec_huffman_lookup(16#1a, 16#8) -> {more, 16#41, 16#03}; +dec_huffman_lookup(16#1a, 16#9) -> {more, 16#41, 16#06}; +dec_huffman_lookup(16#1a, 16#a) -> {more, 16#41, 16#0a}; +dec_huffman_lookup(16#1a, 16#b) -> {more, 16#41, 16#0f}; +dec_huffman_lookup(16#1a, 16#c) -> {more, 16#41, 16#18}; +dec_huffman_lookup(16#1a, 16#d) -> {more, 16#41, 16#1f}; +dec_huffman_lookup(16#1a, 16#e) -> {more, 16#41, 16#29}; +dec_huffman_lookup(16#1a, 16#f) -> {ok, 16#41, 16#38}; +dec_huffman_lookup(16#1b, 16#0) -> {more, 16#5f, 16#03}; +dec_huffman_lookup(16#1b, 16#1) -> {more, 16#5f, 16#06}; +dec_huffman_lookup(16#1b, 16#2) -> {more, 16#5f, 16#0a}; +dec_huffman_lookup(16#1b, 16#3) -> {more, 16#5f, 16#0f}; +dec_huffman_lookup(16#1b, 16#4) -> {more, 16#5f, 16#18}; +dec_huffman_lookup(16#1b, 16#5) -> {more, 16#5f, 16#1f}; +dec_huffman_lookup(16#1b, 16#6) -> {more, 16#5f, 16#29}; +dec_huffman_lookup(16#1b, 16#7) -> {ok, 16#5f, 16#38}; +dec_huffman_lookup(16#1b, 16#8) -> {more, 16#62, 16#03}; +dec_huffman_lookup(16#1b, 16#9) -> {more, 16#62, 16#06}; +dec_huffman_lookup(16#1b, 16#a) -> {more, 16#62, 16#0a}; +dec_huffman_lookup(16#1b, 16#b) -> {more, 16#62, 16#0f}; +dec_huffman_lookup(16#1b, 16#c) -> {more, 16#62, 16#18}; +dec_huffman_lookup(16#1b, 16#d) -> {more, 16#62, 16#1f}; +dec_huffman_lookup(16#1b, 16#e) -> {more, 16#62, 16#29}; +dec_huffman_lookup(16#1b, 16#f) -> {ok, 16#62, 16#38}; +dec_huffman_lookup(16#1c, 16#0) -> {more, 16#64, 16#02}; +dec_huffman_lookup(16#1c, 16#1) -> {more, 16#64, 16#09}; +dec_huffman_lookup(16#1c, 16#2) -> {more, 16#64, 16#17}; +dec_huffman_lookup(16#1c, 16#3) -> {ok, 16#64, 16#28}; +dec_huffman_lookup(16#1c, 16#4) -> {more, 16#66, 16#02}; +dec_huffman_lookup(16#1c, 16#5) -> {more, 16#66, 16#09}; +dec_huffman_lookup(16#1c, 16#6) -> {more, 16#66, 16#17}; +dec_huffman_lookup(16#1c, 16#7) -> {ok, 16#66, 16#28}; +dec_huffman_lookup(16#1c, 16#8) -> {more, 16#67, 16#02}; +dec_huffman_lookup(16#1c, 16#9) -> {more, 16#67, 16#09}; +dec_huffman_lookup(16#1c, 16#a) -> {more, 16#67, 16#17}; +dec_huffman_lookup(16#1c, 16#b) -> {ok, 16#67, 16#28}; +dec_huffman_lookup(16#1c, 16#c) -> {more, 16#68, 16#02}; +dec_huffman_lookup(16#1c, 16#d) -> {more, 16#68, 16#09}; +dec_huffman_lookup(16#1c, 16#e) -> {more, 16#68, 16#17}; +dec_huffman_lookup(16#1c, 16#f) -> {ok, 16#68, 16#28}; +dec_huffman_lookup(16#1d, 16#0) -> {more, 16#64, 16#03}; +dec_huffman_lookup(16#1d, 16#1) -> {more, 16#64, 16#06}; +dec_huffman_lookup(16#1d, 16#2) -> {more, 16#64, 16#0a}; +dec_huffman_lookup(16#1d, 16#3) -> {more, 16#64, 16#0f}; +dec_huffman_lookup(16#1d, 16#4) -> {more, 16#64, 16#18}; +dec_huffman_lookup(16#1d, 16#5) -> {more, 16#64, 16#1f}; +dec_huffman_lookup(16#1d, 16#6) -> {more, 16#64, 16#29}; +dec_huffman_lookup(16#1d, 16#7) -> {ok, 16#64, 16#38}; +dec_huffman_lookup(16#1d, 16#8) -> {more, 16#66, 16#03}; +dec_huffman_lookup(16#1d, 16#9) -> {more, 16#66, 16#06}; +dec_huffman_lookup(16#1d, 16#a) -> {more, 16#66, 16#0a}; +dec_huffman_lookup(16#1d, 16#b) -> {more, 16#66, 16#0f}; +dec_huffman_lookup(16#1d, 16#c) -> {more, 16#66, 16#18}; +dec_huffman_lookup(16#1d, 16#d) -> {more, 16#66, 16#1f}; +dec_huffman_lookup(16#1d, 16#e) -> {more, 16#66, 16#29}; +dec_huffman_lookup(16#1d, 16#f) -> {ok, 16#66, 16#38}; +dec_huffman_lookup(16#1e, 16#0) -> {more, 16#67, 16#03}; +dec_huffman_lookup(16#1e, 16#1) -> {more, 16#67, 16#06}; +dec_huffman_lookup(16#1e, 16#2) -> {more, 16#67, 16#0a}; +dec_huffman_lookup(16#1e, 16#3) -> {more, 16#67, 16#0f}; +dec_huffman_lookup(16#1e, 16#4) -> {more, 16#67, 16#18}; +dec_huffman_lookup(16#1e, 16#5) -> {more, 16#67, 16#1f}; +dec_huffman_lookup(16#1e, 16#6) -> {more, 16#67, 16#29}; +dec_huffman_lookup(16#1e, 16#7) -> {ok, 16#67, 16#38}; +dec_huffman_lookup(16#1e, 16#8) -> {more, 16#68, 16#03}; +dec_huffman_lookup(16#1e, 16#9) -> {more, 16#68, 16#06}; +dec_huffman_lookup(16#1e, 16#a) -> {more, 16#68, 16#0a}; +dec_huffman_lookup(16#1e, 16#b) -> {more, 16#68, 16#0f}; +dec_huffman_lookup(16#1e, 16#c) -> {more, 16#68, 16#18}; +dec_huffman_lookup(16#1e, 16#d) -> {more, 16#68, 16#1f}; +dec_huffman_lookup(16#1e, 16#e) -> {more, 16#68, 16#29}; +dec_huffman_lookup(16#1e, 16#f) -> {ok, 16#68, 16#38}; +dec_huffman_lookup(16#1f, 16#0) -> {more, 16#6c, 16#01}; +dec_huffman_lookup(16#1f, 16#1) -> {ok, 16#6c, 16#16}; +dec_huffman_lookup(16#1f, 16#2) -> {more, 16#6d, 16#01}; +dec_huffman_lookup(16#1f, 16#3) -> {ok, 16#6d, 16#16}; +dec_huffman_lookup(16#1f, 16#4) -> {more, 16#6e, 16#01}; +dec_huffman_lookup(16#1f, 16#5) -> {ok, 16#6e, 16#16}; +dec_huffman_lookup(16#1f, 16#6) -> {more, 16#70, 16#01}; +dec_huffman_lookup(16#1f, 16#7) -> {ok, 16#70, 16#16}; +dec_huffman_lookup(16#1f, 16#8) -> {more, 16#72, 16#01}; +dec_huffman_lookup(16#1f, 16#9) -> {ok, 16#72, 16#16}; +dec_huffman_lookup(16#1f, 16#a) -> {more, 16#75, 16#01}; +dec_huffman_lookup(16#1f, 16#b) -> {ok, 16#75, 16#16}; +dec_huffman_lookup(16#1f, 16#c) -> {ok, 16#3a, 16#00}; +dec_huffman_lookup(16#1f, 16#d) -> {ok, 16#42, 16#00}; +dec_huffman_lookup(16#1f, 16#e) -> {ok, 16#43, 16#00}; +dec_huffman_lookup(16#1f, 16#f) -> {ok, 16#44, 16#00}; +dec_huffman_lookup(16#20, 16#0) -> {more, 16#6c, 16#02}; +dec_huffman_lookup(16#20, 16#1) -> {more, 16#6c, 16#09}; +dec_huffman_lookup(16#20, 16#2) -> {more, 16#6c, 16#17}; +dec_huffman_lookup(16#20, 16#3) -> {ok, 16#6c, 16#28}; +dec_huffman_lookup(16#20, 16#4) -> {more, 16#6d, 16#02}; +dec_huffman_lookup(16#20, 16#5) -> {more, 16#6d, 16#09}; +dec_huffman_lookup(16#20, 16#6) -> {more, 16#6d, 16#17}; +dec_huffman_lookup(16#20, 16#7) -> {ok, 16#6d, 16#28}; +dec_huffman_lookup(16#20, 16#8) -> {more, 16#6e, 16#02}; +dec_huffman_lookup(16#20, 16#9) -> {more, 16#6e, 16#09}; +dec_huffman_lookup(16#20, 16#a) -> {more, 16#6e, 16#17}; +dec_huffman_lookup(16#20, 16#b) -> {ok, 16#6e, 16#28}; +dec_huffman_lookup(16#20, 16#c) -> {more, 16#70, 16#02}; +dec_huffman_lookup(16#20, 16#d) -> {more, 16#70, 16#09}; +dec_huffman_lookup(16#20, 16#e) -> {more, 16#70, 16#17}; +dec_huffman_lookup(16#20, 16#f) -> {ok, 16#70, 16#28}; +dec_huffman_lookup(16#21, 16#0) -> {more, 16#6c, 16#03}; +dec_huffman_lookup(16#21, 16#1) -> {more, 16#6c, 16#06}; +dec_huffman_lookup(16#21, 16#2) -> {more, 16#6c, 16#0a}; +dec_huffman_lookup(16#21, 16#3) -> {more, 16#6c, 16#0f}; +dec_huffman_lookup(16#21, 16#4) -> {more, 16#6c, 16#18}; +dec_huffman_lookup(16#21, 16#5) -> {more, 16#6c, 16#1f}; +dec_huffman_lookup(16#21, 16#6) -> {more, 16#6c, 16#29}; +dec_huffman_lookup(16#21, 16#7) -> {ok, 16#6c, 16#38}; +dec_huffman_lookup(16#21, 16#8) -> {more, 16#6d, 16#03}; +dec_huffman_lookup(16#21, 16#9) -> {more, 16#6d, 16#06}; +dec_huffman_lookup(16#21, 16#a) -> {more, 16#6d, 16#0a}; +dec_huffman_lookup(16#21, 16#b) -> {more, 16#6d, 16#0f}; +dec_huffman_lookup(16#21, 16#c) -> {more, 16#6d, 16#18}; +dec_huffman_lookup(16#21, 16#d) -> {more, 16#6d, 16#1f}; +dec_huffman_lookup(16#21, 16#e) -> {more, 16#6d, 16#29}; +dec_huffman_lookup(16#21, 16#f) -> {ok, 16#6d, 16#38}; +dec_huffman_lookup(16#22, 16#0) -> {more, 16#6e, 16#03}; +dec_huffman_lookup(16#22, 16#1) -> {more, 16#6e, 16#06}; +dec_huffman_lookup(16#22, 16#2) -> {more, 16#6e, 16#0a}; +dec_huffman_lookup(16#22, 16#3) -> {more, 16#6e, 16#0f}; +dec_huffman_lookup(16#22, 16#4) -> {more, 16#6e, 16#18}; +dec_huffman_lookup(16#22, 16#5) -> {more, 16#6e, 16#1f}; +dec_huffman_lookup(16#22, 16#6) -> {more, 16#6e, 16#29}; +dec_huffman_lookup(16#22, 16#7) -> {ok, 16#6e, 16#38}; +dec_huffman_lookup(16#22, 16#8) -> {more, 16#70, 16#03}; +dec_huffman_lookup(16#22, 16#9) -> {more, 16#70, 16#06}; +dec_huffman_lookup(16#22, 16#a) -> {more, 16#70, 16#0a}; +dec_huffman_lookup(16#22, 16#b) -> {more, 16#70, 16#0f}; +dec_huffman_lookup(16#22, 16#c) -> {more, 16#70, 16#18}; +dec_huffman_lookup(16#22, 16#d) -> {more, 16#70, 16#1f}; +dec_huffman_lookup(16#22, 16#e) -> {more, 16#70, 16#29}; +dec_huffman_lookup(16#22, 16#f) -> {ok, 16#70, 16#38}; +dec_huffman_lookup(16#23, 16#0) -> {more, 16#72, 16#02}; +dec_huffman_lookup(16#23, 16#1) -> {more, 16#72, 16#09}; +dec_huffman_lookup(16#23, 16#2) -> {more, 16#72, 16#17}; +dec_huffman_lookup(16#23, 16#3) -> {ok, 16#72, 16#28}; +dec_huffman_lookup(16#23, 16#4) -> {more, 16#75, 16#02}; +dec_huffman_lookup(16#23, 16#5) -> {more, 16#75, 16#09}; +dec_huffman_lookup(16#23, 16#6) -> {more, 16#75, 16#17}; +dec_huffman_lookup(16#23, 16#7) -> {ok, 16#75, 16#28}; +dec_huffman_lookup(16#23, 16#8) -> {more, 16#3a, 16#01}; +dec_huffman_lookup(16#23, 16#9) -> {ok, 16#3a, 16#16}; +dec_huffman_lookup(16#23, 16#a) -> {more, 16#42, 16#01}; +dec_huffman_lookup(16#23, 16#b) -> {ok, 16#42, 16#16}; +dec_huffman_lookup(16#23, 16#c) -> {more, 16#43, 16#01}; +dec_huffman_lookup(16#23, 16#d) -> {ok, 16#43, 16#16}; +dec_huffman_lookup(16#23, 16#e) -> {more, 16#44, 16#01}; +dec_huffman_lookup(16#23, 16#f) -> {ok, 16#44, 16#16}; +dec_huffman_lookup(16#24, 16#0) -> {more, 16#72, 16#03}; +dec_huffman_lookup(16#24, 16#1) -> {more, 16#72, 16#06}; +dec_huffman_lookup(16#24, 16#2) -> {more, 16#72, 16#0a}; +dec_huffman_lookup(16#24, 16#3) -> {more, 16#72, 16#0f}; +dec_huffman_lookup(16#24, 16#4) -> {more, 16#72, 16#18}; +dec_huffman_lookup(16#24, 16#5) -> {more, 16#72, 16#1f}; +dec_huffman_lookup(16#24, 16#6) -> {more, 16#72, 16#29}; +dec_huffman_lookup(16#24, 16#7) -> {ok, 16#72, 16#38}; +dec_huffman_lookup(16#24, 16#8) -> {more, 16#75, 16#03}; +dec_huffman_lookup(16#24, 16#9) -> {more, 16#75, 16#06}; +dec_huffman_lookup(16#24, 16#a) -> {more, 16#75, 16#0a}; +dec_huffman_lookup(16#24, 16#b) -> {more, 16#75, 16#0f}; +dec_huffman_lookup(16#24, 16#c) -> {more, 16#75, 16#18}; +dec_huffman_lookup(16#24, 16#d) -> {more, 16#75, 16#1f}; +dec_huffman_lookup(16#24, 16#e) -> {more, 16#75, 16#29}; +dec_huffman_lookup(16#24, 16#f) -> {ok, 16#75, 16#38}; +dec_huffman_lookup(16#25, 16#0) -> {more, 16#3a, 16#02}; +dec_huffman_lookup(16#25, 16#1) -> {more, 16#3a, 16#09}; +dec_huffman_lookup(16#25, 16#2) -> {more, 16#3a, 16#17}; +dec_huffman_lookup(16#25, 16#3) -> {ok, 16#3a, 16#28}; +dec_huffman_lookup(16#25, 16#4) -> {more, 16#42, 16#02}; +dec_huffman_lookup(16#25, 16#5) -> {more, 16#42, 16#09}; +dec_huffman_lookup(16#25, 16#6) -> {more, 16#42, 16#17}; +dec_huffman_lookup(16#25, 16#7) -> {ok, 16#42, 16#28}; +dec_huffman_lookup(16#25, 16#8) -> {more, 16#43, 16#02}; +dec_huffman_lookup(16#25, 16#9) -> {more, 16#43, 16#09}; +dec_huffman_lookup(16#25, 16#a) -> {more, 16#43, 16#17}; +dec_huffman_lookup(16#25, 16#b) -> {ok, 16#43, 16#28}; +dec_huffman_lookup(16#25, 16#c) -> {more, 16#44, 16#02}; +dec_huffman_lookup(16#25, 16#d) -> {more, 16#44, 16#09}; +dec_huffman_lookup(16#25, 16#e) -> {more, 16#44, 16#17}; +dec_huffman_lookup(16#25, 16#f) -> {ok, 16#44, 16#28}; +dec_huffman_lookup(16#26, 16#0) -> {more, 16#3a, 16#03}; +dec_huffman_lookup(16#26, 16#1) -> {more, 16#3a, 16#06}; +dec_huffman_lookup(16#26, 16#2) -> {more, 16#3a, 16#0a}; +dec_huffman_lookup(16#26, 16#3) -> {more, 16#3a, 16#0f}; +dec_huffman_lookup(16#26, 16#4) -> {more, 16#3a, 16#18}; +dec_huffman_lookup(16#26, 16#5) -> {more, 16#3a, 16#1f}; +dec_huffman_lookup(16#26, 16#6) -> {more, 16#3a, 16#29}; +dec_huffman_lookup(16#26, 16#7) -> {ok, 16#3a, 16#38}; +dec_huffman_lookup(16#26, 16#8) -> {more, 16#42, 16#03}; +dec_huffman_lookup(16#26, 16#9) -> {more, 16#42, 16#06}; +dec_huffman_lookup(16#26, 16#a) -> {more, 16#42, 16#0a}; +dec_huffman_lookup(16#26, 16#b) -> {more, 16#42, 16#0f}; +dec_huffman_lookup(16#26, 16#c) -> {more, 16#42, 16#18}; +dec_huffman_lookup(16#26, 16#d) -> {more, 16#42, 16#1f}; +dec_huffman_lookup(16#26, 16#e) -> {more, 16#42, 16#29}; +dec_huffman_lookup(16#26, 16#f) -> {ok, 16#42, 16#38}; +dec_huffman_lookup(16#27, 16#0) -> {more, 16#43, 16#03}; +dec_huffman_lookup(16#27, 16#1) -> {more, 16#43, 16#06}; +dec_huffman_lookup(16#27, 16#2) -> {more, 16#43, 16#0a}; +dec_huffman_lookup(16#27, 16#3) -> {more, 16#43, 16#0f}; +dec_huffman_lookup(16#27, 16#4) -> {more, 16#43, 16#18}; +dec_huffman_lookup(16#27, 16#5) -> {more, 16#43, 16#1f}; +dec_huffman_lookup(16#27, 16#6) -> {more, 16#43, 16#29}; +dec_huffman_lookup(16#27, 16#7) -> {ok, 16#43, 16#38}; +dec_huffman_lookup(16#27, 16#8) -> {more, 16#44, 16#03}; +dec_huffman_lookup(16#27, 16#9) -> {more, 16#44, 16#06}; +dec_huffman_lookup(16#27, 16#a) -> {more, 16#44, 16#0a}; +dec_huffman_lookup(16#27, 16#b) -> {more, 16#44, 16#0f}; +dec_huffman_lookup(16#27, 16#c) -> {more, 16#44, 16#18}; +dec_huffman_lookup(16#27, 16#d) -> {more, 16#44, 16#1f}; +dec_huffman_lookup(16#27, 16#e) -> {more, 16#44, 16#29}; +dec_huffman_lookup(16#27, 16#f) -> {ok, 16#44, 16#38}; +dec_huffman_lookup(16#28, 16#0) -> {more, undefined, 16#2c}; +dec_huffman_lookup(16#28, 16#1) -> {more, undefined, 16#2d}; +dec_huffman_lookup(16#28, 16#2) -> {more, undefined, 16#2f}; +dec_huffman_lookup(16#28, 16#3) -> {more, undefined, 16#30}; +dec_huffman_lookup(16#28, 16#4) -> {more, undefined, 16#33}; +dec_huffman_lookup(16#28, 16#5) -> {more, undefined, 16#34}; +dec_huffman_lookup(16#28, 16#6) -> {more, undefined, 16#36}; +dec_huffman_lookup(16#28, 16#7) -> {more, undefined, 16#37}; +dec_huffman_lookup(16#28, 16#8) -> {more, undefined, 16#3b}; +dec_huffman_lookup(16#28, 16#9) -> {more, undefined, 16#3c}; +dec_huffman_lookup(16#28, 16#a) -> {more, undefined, 16#3e}; +dec_huffman_lookup(16#28, 16#b) -> {more, undefined, 16#3f}; +dec_huffman_lookup(16#28, 16#c) -> {more, undefined, 16#42}; +dec_huffman_lookup(16#28, 16#d) -> {more, undefined, 16#43}; +dec_huffman_lookup(16#28, 16#e) -> {more, undefined, 16#45}; +dec_huffman_lookup(16#28, 16#f) -> {ok, undefined, 16#48}; +dec_huffman_lookup(16#29, 16#0) -> {ok, 16#45, 16#00}; +dec_huffman_lookup(16#29, 16#1) -> {ok, 16#46, 16#00}; +dec_huffman_lookup(16#29, 16#2) -> {ok, 16#47, 16#00}; +dec_huffman_lookup(16#29, 16#3) -> {ok, 16#48, 16#00}; +dec_huffman_lookup(16#29, 16#4) -> {ok, 16#49, 16#00}; +dec_huffman_lookup(16#29, 16#5) -> {ok, 16#4a, 16#00}; +dec_huffman_lookup(16#29, 16#6) -> {ok, 16#4b, 16#00}; +dec_huffman_lookup(16#29, 16#7) -> {ok, 16#4c, 16#00}; +dec_huffman_lookup(16#29, 16#8) -> {ok, 16#4d, 16#00}; +dec_huffman_lookup(16#29, 16#9) -> {ok, 16#4e, 16#00}; +dec_huffman_lookup(16#29, 16#a) -> {ok, 16#4f, 16#00}; +dec_huffman_lookup(16#29, 16#b) -> {ok, 16#50, 16#00}; +dec_huffman_lookup(16#29, 16#c) -> {ok, 16#51, 16#00}; +dec_huffman_lookup(16#29, 16#d) -> {ok, 16#52, 16#00}; +dec_huffman_lookup(16#29, 16#e) -> {ok, 16#53, 16#00}; +dec_huffman_lookup(16#29, 16#f) -> {ok, 16#54, 16#00}; +dec_huffman_lookup(16#2a, 16#0) -> {more, 16#45, 16#01}; +dec_huffman_lookup(16#2a, 16#1) -> {ok, 16#45, 16#16}; +dec_huffman_lookup(16#2a, 16#2) -> {more, 16#46, 16#01}; +dec_huffman_lookup(16#2a, 16#3) -> {ok, 16#46, 16#16}; +dec_huffman_lookup(16#2a, 16#4) -> {more, 16#47, 16#01}; +dec_huffman_lookup(16#2a, 16#5) -> {ok, 16#47, 16#16}; +dec_huffman_lookup(16#2a, 16#6) -> {more, 16#48, 16#01}; +dec_huffman_lookup(16#2a, 16#7) -> {ok, 16#48, 16#16}; +dec_huffman_lookup(16#2a, 16#8) -> {more, 16#49, 16#01}; +dec_huffman_lookup(16#2a, 16#9) -> {ok, 16#49, 16#16}; +dec_huffman_lookup(16#2a, 16#a) -> {more, 16#4a, 16#01}; +dec_huffman_lookup(16#2a, 16#b) -> {ok, 16#4a, 16#16}; +dec_huffman_lookup(16#2a, 16#c) -> {more, 16#4b, 16#01}; +dec_huffman_lookup(16#2a, 16#d) -> {ok, 16#4b, 16#16}; +dec_huffman_lookup(16#2a, 16#e) -> {more, 16#4c, 16#01}; +dec_huffman_lookup(16#2a, 16#f) -> {ok, 16#4c, 16#16}; +dec_huffman_lookup(16#2b, 16#0) -> {more, 16#45, 16#02}; +dec_huffman_lookup(16#2b, 16#1) -> {more, 16#45, 16#09}; +dec_huffman_lookup(16#2b, 16#2) -> {more, 16#45, 16#17}; +dec_huffman_lookup(16#2b, 16#3) -> {ok, 16#45, 16#28}; +dec_huffman_lookup(16#2b, 16#4) -> {more, 16#46, 16#02}; +dec_huffman_lookup(16#2b, 16#5) -> {more, 16#46, 16#09}; +dec_huffman_lookup(16#2b, 16#6) -> {more, 16#46, 16#17}; +dec_huffman_lookup(16#2b, 16#7) -> {ok, 16#46, 16#28}; +dec_huffman_lookup(16#2b, 16#8) -> {more, 16#47, 16#02}; +dec_huffman_lookup(16#2b, 16#9) -> {more, 16#47, 16#09}; +dec_huffman_lookup(16#2b, 16#a) -> {more, 16#47, 16#17}; +dec_huffman_lookup(16#2b, 16#b) -> {ok, 16#47, 16#28}; +dec_huffman_lookup(16#2b, 16#c) -> {more, 16#48, 16#02}; +dec_huffman_lookup(16#2b, 16#d) -> {more, 16#48, 16#09}; +dec_huffman_lookup(16#2b, 16#e) -> {more, 16#48, 16#17}; +dec_huffman_lookup(16#2b, 16#f) -> {ok, 16#48, 16#28}; +dec_huffman_lookup(16#2c, 16#0) -> {more, 16#45, 16#03}; +dec_huffman_lookup(16#2c, 16#1) -> {more, 16#45, 16#06}; +dec_huffman_lookup(16#2c, 16#2) -> {more, 16#45, 16#0a}; +dec_huffman_lookup(16#2c, 16#3) -> {more, 16#45, 16#0f}; +dec_huffman_lookup(16#2c, 16#4) -> {more, 16#45, 16#18}; +dec_huffman_lookup(16#2c, 16#5) -> {more, 16#45, 16#1f}; +dec_huffman_lookup(16#2c, 16#6) -> {more, 16#45, 16#29}; +dec_huffman_lookup(16#2c, 16#7) -> {ok, 16#45, 16#38}; +dec_huffman_lookup(16#2c, 16#8) -> {more, 16#46, 16#03}; +dec_huffman_lookup(16#2c, 16#9) -> {more, 16#46, 16#06}; +dec_huffman_lookup(16#2c, 16#a) -> {more, 16#46, 16#0a}; +dec_huffman_lookup(16#2c, 16#b) -> {more, 16#46, 16#0f}; +dec_huffman_lookup(16#2c, 16#c) -> {more, 16#46, 16#18}; +dec_huffman_lookup(16#2c, 16#d) -> {more, 16#46, 16#1f}; +dec_huffman_lookup(16#2c, 16#e) -> {more, 16#46, 16#29}; +dec_huffman_lookup(16#2c, 16#f) -> {ok, 16#46, 16#38}; +dec_huffman_lookup(16#2d, 16#0) -> {more, 16#47, 16#03}; +dec_huffman_lookup(16#2d, 16#1) -> {more, 16#47, 16#06}; +dec_huffman_lookup(16#2d, 16#2) -> {more, 16#47, 16#0a}; +dec_huffman_lookup(16#2d, 16#3) -> {more, 16#47, 16#0f}; +dec_huffman_lookup(16#2d, 16#4) -> {more, 16#47, 16#18}; +dec_huffman_lookup(16#2d, 16#5) -> {more, 16#47, 16#1f}; +dec_huffman_lookup(16#2d, 16#6) -> {more, 16#47, 16#29}; +dec_huffman_lookup(16#2d, 16#7) -> {ok, 16#47, 16#38}; +dec_huffman_lookup(16#2d, 16#8) -> {more, 16#48, 16#03}; +dec_huffman_lookup(16#2d, 16#9) -> {more, 16#48, 16#06}; +dec_huffman_lookup(16#2d, 16#a) -> {more, 16#48, 16#0a}; +dec_huffman_lookup(16#2d, 16#b) -> {more, 16#48, 16#0f}; +dec_huffman_lookup(16#2d, 16#c) -> {more, 16#48, 16#18}; +dec_huffman_lookup(16#2d, 16#d) -> {more, 16#48, 16#1f}; +dec_huffman_lookup(16#2d, 16#e) -> {more, 16#48, 16#29}; +dec_huffman_lookup(16#2d, 16#f) -> {ok, 16#48, 16#38}; +dec_huffman_lookup(16#2e, 16#0) -> {more, 16#49, 16#02}; +dec_huffman_lookup(16#2e, 16#1) -> {more, 16#49, 16#09}; +dec_huffman_lookup(16#2e, 16#2) -> {more, 16#49, 16#17}; +dec_huffman_lookup(16#2e, 16#3) -> {ok, 16#49, 16#28}; +dec_huffman_lookup(16#2e, 16#4) -> {more, 16#4a, 16#02}; +dec_huffman_lookup(16#2e, 16#5) -> {more, 16#4a, 16#09}; +dec_huffman_lookup(16#2e, 16#6) -> {more, 16#4a, 16#17}; +dec_huffman_lookup(16#2e, 16#7) -> {ok, 16#4a, 16#28}; +dec_huffman_lookup(16#2e, 16#8) -> {more, 16#4b, 16#02}; +dec_huffman_lookup(16#2e, 16#9) -> {more, 16#4b, 16#09}; +dec_huffman_lookup(16#2e, 16#a) -> {more, 16#4b, 16#17}; +dec_huffman_lookup(16#2e, 16#b) -> {ok, 16#4b, 16#28}; +dec_huffman_lookup(16#2e, 16#c) -> {more, 16#4c, 16#02}; +dec_huffman_lookup(16#2e, 16#d) -> {more, 16#4c, 16#09}; +dec_huffman_lookup(16#2e, 16#e) -> {more, 16#4c, 16#17}; +dec_huffman_lookup(16#2e, 16#f) -> {ok, 16#4c, 16#28}; +dec_huffman_lookup(16#2f, 16#0) -> {more, 16#49, 16#03}; +dec_huffman_lookup(16#2f, 16#1) -> {more, 16#49, 16#06}; +dec_huffman_lookup(16#2f, 16#2) -> {more, 16#49, 16#0a}; +dec_huffman_lookup(16#2f, 16#3) -> {more, 16#49, 16#0f}; +dec_huffman_lookup(16#2f, 16#4) -> {more, 16#49, 16#18}; +dec_huffman_lookup(16#2f, 16#5) -> {more, 16#49, 16#1f}; +dec_huffman_lookup(16#2f, 16#6) -> {more, 16#49, 16#29}; +dec_huffman_lookup(16#2f, 16#7) -> {ok, 16#49, 16#38}; +dec_huffman_lookup(16#2f, 16#8) -> {more, 16#4a, 16#03}; +dec_huffman_lookup(16#2f, 16#9) -> {more, 16#4a, 16#06}; +dec_huffman_lookup(16#2f, 16#a) -> {more, 16#4a, 16#0a}; +dec_huffman_lookup(16#2f, 16#b) -> {more, 16#4a, 16#0f}; +dec_huffman_lookup(16#2f, 16#c) -> {more, 16#4a, 16#18}; +dec_huffman_lookup(16#2f, 16#d) -> {more, 16#4a, 16#1f}; +dec_huffman_lookup(16#2f, 16#e) -> {more, 16#4a, 16#29}; +dec_huffman_lookup(16#2f, 16#f) -> {ok, 16#4a, 16#38}; +dec_huffman_lookup(16#30, 16#0) -> {more, 16#4b, 16#03}; +dec_huffman_lookup(16#30, 16#1) -> {more, 16#4b, 16#06}; +dec_huffman_lookup(16#30, 16#2) -> {more, 16#4b, 16#0a}; +dec_huffman_lookup(16#30, 16#3) -> {more, 16#4b, 16#0f}; +dec_huffman_lookup(16#30, 16#4) -> {more, 16#4b, 16#18}; +dec_huffman_lookup(16#30, 16#5) -> {more, 16#4b, 16#1f}; +dec_huffman_lookup(16#30, 16#6) -> {more, 16#4b, 16#29}; +dec_huffman_lookup(16#30, 16#7) -> {ok, 16#4b, 16#38}; +dec_huffman_lookup(16#30, 16#8) -> {more, 16#4c, 16#03}; +dec_huffman_lookup(16#30, 16#9) -> {more, 16#4c, 16#06}; +dec_huffman_lookup(16#30, 16#a) -> {more, 16#4c, 16#0a}; +dec_huffman_lookup(16#30, 16#b) -> {more, 16#4c, 16#0f}; +dec_huffman_lookup(16#30, 16#c) -> {more, 16#4c, 16#18}; +dec_huffman_lookup(16#30, 16#d) -> {more, 16#4c, 16#1f}; +dec_huffman_lookup(16#30, 16#e) -> {more, 16#4c, 16#29}; +dec_huffman_lookup(16#30, 16#f) -> {ok, 16#4c, 16#38}; +dec_huffman_lookup(16#31, 16#0) -> {more, 16#4d, 16#01}; +dec_huffman_lookup(16#31, 16#1) -> {ok, 16#4d, 16#16}; +dec_huffman_lookup(16#31, 16#2) -> {more, 16#4e, 16#01}; +dec_huffman_lookup(16#31, 16#3) -> {ok, 16#4e, 16#16}; +dec_huffman_lookup(16#31, 16#4) -> {more, 16#4f, 16#01}; +dec_huffman_lookup(16#31, 16#5) -> {ok, 16#4f, 16#16}; +dec_huffman_lookup(16#31, 16#6) -> {more, 16#50, 16#01}; +dec_huffman_lookup(16#31, 16#7) -> {ok, 16#50, 16#16}; +dec_huffman_lookup(16#31, 16#8) -> {more, 16#51, 16#01}; +dec_huffman_lookup(16#31, 16#9) -> {ok, 16#51, 16#16}; +dec_huffman_lookup(16#31, 16#a) -> {more, 16#52, 16#01}; +dec_huffman_lookup(16#31, 16#b) -> {ok, 16#52, 16#16}; +dec_huffman_lookup(16#31, 16#c) -> {more, 16#53, 16#01}; +dec_huffman_lookup(16#31, 16#d) -> {ok, 16#53, 16#16}; +dec_huffman_lookup(16#31, 16#e) -> {more, 16#54, 16#01}; +dec_huffman_lookup(16#31, 16#f) -> {ok, 16#54, 16#16}; +dec_huffman_lookup(16#32, 16#0) -> {more, 16#4d, 16#02}; +dec_huffman_lookup(16#32, 16#1) -> {more, 16#4d, 16#09}; +dec_huffman_lookup(16#32, 16#2) -> {more, 16#4d, 16#17}; +dec_huffman_lookup(16#32, 16#3) -> {ok, 16#4d, 16#28}; +dec_huffman_lookup(16#32, 16#4) -> {more, 16#4e, 16#02}; +dec_huffman_lookup(16#32, 16#5) -> {more, 16#4e, 16#09}; +dec_huffman_lookup(16#32, 16#6) -> {more, 16#4e, 16#17}; +dec_huffman_lookup(16#32, 16#7) -> {ok, 16#4e, 16#28}; +dec_huffman_lookup(16#32, 16#8) -> {more, 16#4f, 16#02}; +dec_huffman_lookup(16#32, 16#9) -> {more, 16#4f, 16#09}; +dec_huffman_lookup(16#32, 16#a) -> {more, 16#4f, 16#17}; +dec_huffman_lookup(16#32, 16#b) -> {ok, 16#4f, 16#28}; +dec_huffman_lookup(16#32, 16#c) -> {more, 16#50, 16#02}; +dec_huffman_lookup(16#32, 16#d) -> {more, 16#50, 16#09}; +dec_huffman_lookup(16#32, 16#e) -> {more, 16#50, 16#17}; +dec_huffman_lookup(16#32, 16#f) -> {ok, 16#50, 16#28}; +dec_huffman_lookup(16#33, 16#0) -> {more, 16#4d, 16#03}; +dec_huffman_lookup(16#33, 16#1) -> {more, 16#4d, 16#06}; +dec_huffman_lookup(16#33, 16#2) -> {more, 16#4d, 16#0a}; +dec_huffman_lookup(16#33, 16#3) -> {more, 16#4d, 16#0f}; +dec_huffman_lookup(16#33, 16#4) -> {more, 16#4d, 16#18}; +dec_huffman_lookup(16#33, 16#5) -> {more, 16#4d, 16#1f}; +dec_huffman_lookup(16#33, 16#6) -> {more, 16#4d, 16#29}; +dec_huffman_lookup(16#33, 16#7) -> {ok, 16#4d, 16#38}; +dec_huffman_lookup(16#33, 16#8) -> {more, 16#4e, 16#03}; +dec_huffman_lookup(16#33, 16#9) -> {more, 16#4e, 16#06}; +dec_huffman_lookup(16#33, 16#a) -> {more, 16#4e, 16#0a}; +dec_huffman_lookup(16#33, 16#b) -> {more, 16#4e, 16#0f}; +dec_huffman_lookup(16#33, 16#c) -> {more, 16#4e, 16#18}; +dec_huffman_lookup(16#33, 16#d) -> {more, 16#4e, 16#1f}; +dec_huffman_lookup(16#33, 16#e) -> {more, 16#4e, 16#29}; +dec_huffman_lookup(16#33, 16#f) -> {ok, 16#4e, 16#38}; +dec_huffman_lookup(16#34, 16#0) -> {more, 16#4f, 16#03}; +dec_huffman_lookup(16#34, 16#1) -> {more, 16#4f, 16#06}; +dec_huffman_lookup(16#34, 16#2) -> {more, 16#4f, 16#0a}; +dec_huffman_lookup(16#34, 16#3) -> {more, 16#4f, 16#0f}; +dec_huffman_lookup(16#34, 16#4) -> {more, 16#4f, 16#18}; +dec_huffman_lookup(16#34, 16#5) -> {more, 16#4f, 16#1f}; +dec_huffman_lookup(16#34, 16#6) -> {more, 16#4f, 16#29}; +dec_huffman_lookup(16#34, 16#7) -> {ok, 16#4f, 16#38}; +dec_huffman_lookup(16#34, 16#8) -> {more, 16#50, 16#03}; +dec_huffman_lookup(16#34, 16#9) -> {more, 16#50, 16#06}; +dec_huffman_lookup(16#34, 16#a) -> {more, 16#50, 16#0a}; +dec_huffman_lookup(16#34, 16#b) -> {more, 16#50, 16#0f}; +dec_huffman_lookup(16#34, 16#c) -> {more, 16#50, 16#18}; +dec_huffman_lookup(16#34, 16#d) -> {more, 16#50, 16#1f}; +dec_huffman_lookup(16#34, 16#e) -> {more, 16#50, 16#29}; +dec_huffman_lookup(16#34, 16#f) -> {ok, 16#50, 16#38}; +dec_huffman_lookup(16#35, 16#0) -> {more, 16#51, 16#02}; +dec_huffman_lookup(16#35, 16#1) -> {more, 16#51, 16#09}; +dec_huffman_lookup(16#35, 16#2) -> {more, 16#51, 16#17}; +dec_huffman_lookup(16#35, 16#3) -> {ok, 16#51, 16#28}; +dec_huffman_lookup(16#35, 16#4) -> {more, 16#52, 16#02}; +dec_huffman_lookup(16#35, 16#5) -> {more, 16#52, 16#09}; +dec_huffman_lookup(16#35, 16#6) -> {more, 16#52, 16#17}; +dec_huffman_lookup(16#35, 16#7) -> {ok, 16#52, 16#28}; +dec_huffman_lookup(16#35, 16#8) -> {more, 16#53, 16#02}; +dec_huffman_lookup(16#35, 16#9) -> {more, 16#53, 16#09}; +dec_huffman_lookup(16#35, 16#a) -> {more, 16#53, 16#17}; +dec_huffman_lookup(16#35, 16#b) -> {ok, 16#53, 16#28}; +dec_huffman_lookup(16#35, 16#c) -> {more, 16#54, 16#02}; +dec_huffman_lookup(16#35, 16#d) -> {more, 16#54, 16#09}; +dec_huffman_lookup(16#35, 16#e) -> {more, 16#54, 16#17}; +dec_huffman_lookup(16#35, 16#f) -> {ok, 16#54, 16#28}; +dec_huffman_lookup(16#36, 16#0) -> {more, 16#51, 16#03}; +dec_huffman_lookup(16#36, 16#1) -> {more, 16#51, 16#06}; +dec_huffman_lookup(16#36, 16#2) -> {more, 16#51, 16#0a}; +dec_huffman_lookup(16#36, 16#3) -> {more, 16#51, 16#0f}; +dec_huffman_lookup(16#36, 16#4) -> {more, 16#51, 16#18}; +dec_huffman_lookup(16#36, 16#5) -> {more, 16#51, 16#1f}; +dec_huffman_lookup(16#36, 16#6) -> {more, 16#51, 16#29}; +dec_huffman_lookup(16#36, 16#7) -> {ok, 16#51, 16#38}; +dec_huffman_lookup(16#36, 16#8) -> {more, 16#52, 16#03}; +dec_huffman_lookup(16#36, 16#9) -> {more, 16#52, 16#06}; +dec_huffman_lookup(16#36, 16#a) -> {more, 16#52, 16#0a}; +dec_huffman_lookup(16#36, 16#b) -> {more, 16#52, 16#0f}; +dec_huffman_lookup(16#36, 16#c) -> {more, 16#52, 16#18}; +dec_huffman_lookup(16#36, 16#d) -> {more, 16#52, 16#1f}; +dec_huffman_lookup(16#36, 16#e) -> {more, 16#52, 16#29}; +dec_huffman_lookup(16#36, 16#f) -> {ok, 16#52, 16#38}; +dec_huffman_lookup(16#37, 16#0) -> {more, 16#53, 16#03}; +dec_huffman_lookup(16#37, 16#1) -> {more, 16#53, 16#06}; +dec_huffman_lookup(16#37, 16#2) -> {more, 16#53, 16#0a}; +dec_huffman_lookup(16#37, 16#3) -> {more, 16#53, 16#0f}; +dec_huffman_lookup(16#37, 16#4) -> {more, 16#53, 16#18}; +dec_huffman_lookup(16#37, 16#5) -> {more, 16#53, 16#1f}; +dec_huffman_lookup(16#37, 16#6) -> {more, 16#53, 16#29}; +dec_huffman_lookup(16#37, 16#7) -> {ok, 16#53, 16#38}; +dec_huffman_lookup(16#37, 16#8) -> {more, 16#54, 16#03}; +dec_huffman_lookup(16#37, 16#9) -> {more, 16#54, 16#06}; +dec_huffman_lookup(16#37, 16#a) -> {more, 16#54, 16#0a}; +dec_huffman_lookup(16#37, 16#b) -> {more, 16#54, 16#0f}; +dec_huffman_lookup(16#37, 16#c) -> {more, 16#54, 16#18}; +dec_huffman_lookup(16#37, 16#d) -> {more, 16#54, 16#1f}; +dec_huffman_lookup(16#37, 16#e) -> {more, 16#54, 16#29}; +dec_huffman_lookup(16#37, 16#f) -> {ok, 16#54, 16#38}; +dec_huffman_lookup(16#38, 16#0) -> {ok, 16#55, 16#00}; +dec_huffman_lookup(16#38, 16#1) -> {ok, 16#56, 16#00}; +dec_huffman_lookup(16#38, 16#2) -> {ok, 16#57, 16#00}; +dec_huffman_lookup(16#38, 16#3) -> {ok, 16#59, 16#00}; +dec_huffman_lookup(16#38, 16#4) -> {ok, 16#6a, 16#00}; +dec_huffman_lookup(16#38, 16#5) -> {ok, 16#6b, 16#00}; +dec_huffman_lookup(16#38, 16#6) -> {ok, 16#71, 16#00}; +dec_huffman_lookup(16#38, 16#7) -> {ok, 16#76, 16#00}; +dec_huffman_lookup(16#38, 16#8) -> {ok, 16#77, 16#00}; +dec_huffman_lookup(16#38, 16#9) -> {ok, 16#78, 16#00}; +dec_huffman_lookup(16#38, 16#a) -> {ok, 16#79, 16#00}; +dec_huffman_lookup(16#38, 16#b) -> {ok, 16#7a, 16#00}; +dec_huffman_lookup(16#38, 16#c) -> {more, undefined, 16#46}; +dec_huffman_lookup(16#38, 16#d) -> {more, undefined, 16#47}; +dec_huffman_lookup(16#38, 16#e) -> {more, undefined, 16#49}; +dec_huffman_lookup(16#38, 16#f) -> {ok, undefined, 16#4a}; +dec_huffman_lookup(16#39, 16#0) -> {more, 16#55, 16#01}; +dec_huffman_lookup(16#39, 16#1) -> {ok, 16#55, 16#16}; +dec_huffman_lookup(16#39, 16#2) -> {more, 16#56, 16#01}; +dec_huffman_lookup(16#39, 16#3) -> {ok, 16#56, 16#16}; +dec_huffman_lookup(16#39, 16#4) -> {more, 16#57, 16#01}; +dec_huffman_lookup(16#39, 16#5) -> {ok, 16#57, 16#16}; +dec_huffman_lookup(16#39, 16#6) -> {more, 16#59, 16#01}; +dec_huffman_lookup(16#39, 16#7) -> {ok, 16#59, 16#16}; +dec_huffman_lookup(16#39, 16#8) -> {more, 16#6a, 16#01}; +dec_huffman_lookup(16#39, 16#9) -> {ok, 16#6a, 16#16}; +dec_huffman_lookup(16#39, 16#a) -> {more, 16#6b, 16#01}; +dec_huffman_lookup(16#39, 16#b) -> {ok, 16#6b, 16#16}; +dec_huffman_lookup(16#39, 16#c) -> {more, 16#71, 16#01}; +dec_huffman_lookup(16#39, 16#d) -> {ok, 16#71, 16#16}; +dec_huffman_lookup(16#39, 16#e) -> {more, 16#76, 16#01}; +dec_huffman_lookup(16#39, 16#f) -> {ok, 16#76, 16#16}; +dec_huffman_lookup(16#3a, 16#0) -> {more, 16#55, 16#02}; +dec_huffman_lookup(16#3a, 16#1) -> {more, 16#55, 16#09}; +dec_huffman_lookup(16#3a, 16#2) -> {more, 16#55, 16#17}; +dec_huffman_lookup(16#3a, 16#3) -> {ok, 16#55, 16#28}; +dec_huffman_lookup(16#3a, 16#4) -> {more, 16#56, 16#02}; +dec_huffman_lookup(16#3a, 16#5) -> {more, 16#56, 16#09}; +dec_huffman_lookup(16#3a, 16#6) -> {more, 16#56, 16#17}; +dec_huffman_lookup(16#3a, 16#7) -> {ok, 16#56, 16#28}; +dec_huffman_lookup(16#3a, 16#8) -> {more, 16#57, 16#02}; +dec_huffman_lookup(16#3a, 16#9) -> {more, 16#57, 16#09}; +dec_huffman_lookup(16#3a, 16#a) -> {more, 16#57, 16#17}; +dec_huffman_lookup(16#3a, 16#b) -> {ok, 16#57, 16#28}; +dec_huffman_lookup(16#3a, 16#c) -> {more, 16#59, 16#02}; +dec_huffman_lookup(16#3a, 16#d) -> {more, 16#59, 16#09}; +dec_huffman_lookup(16#3a, 16#e) -> {more, 16#59, 16#17}; +dec_huffman_lookup(16#3a, 16#f) -> {ok, 16#59, 16#28}; +dec_huffman_lookup(16#3b, 16#0) -> {more, 16#55, 16#03}; +dec_huffman_lookup(16#3b, 16#1) -> {more, 16#55, 16#06}; +dec_huffman_lookup(16#3b, 16#2) -> {more, 16#55, 16#0a}; +dec_huffman_lookup(16#3b, 16#3) -> {more, 16#55, 16#0f}; +dec_huffman_lookup(16#3b, 16#4) -> {more, 16#55, 16#18}; +dec_huffman_lookup(16#3b, 16#5) -> {more, 16#55, 16#1f}; +dec_huffman_lookup(16#3b, 16#6) -> {more, 16#55, 16#29}; +dec_huffman_lookup(16#3b, 16#7) -> {ok, 16#55, 16#38}; +dec_huffman_lookup(16#3b, 16#8) -> {more, 16#56, 16#03}; +dec_huffman_lookup(16#3b, 16#9) -> {more, 16#56, 16#06}; +dec_huffman_lookup(16#3b, 16#a) -> {more, 16#56, 16#0a}; +dec_huffman_lookup(16#3b, 16#b) -> {more, 16#56, 16#0f}; +dec_huffman_lookup(16#3b, 16#c) -> {more, 16#56, 16#18}; +dec_huffman_lookup(16#3b, 16#d) -> {more, 16#56, 16#1f}; +dec_huffman_lookup(16#3b, 16#e) -> {more, 16#56, 16#29}; +dec_huffman_lookup(16#3b, 16#f) -> {ok, 16#56, 16#38}; +dec_huffman_lookup(16#3c, 16#0) -> {more, 16#57, 16#03}; +dec_huffman_lookup(16#3c, 16#1) -> {more, 16#57, 16#06}; +dec_huffman_lookup(16#3c, 16#2) -> {more, 16#57, 16#0a}; +dec_huffman_lookup(16#3c, 16#3) -> {more, 16#57, 16#0f}; +dec_huffman_lookup(16#3c, 16#4) -> {more, 16#57, 16#18}; +dec_huffman_lookup(16#3c, 16#5) -> {more, 16#57, 16#1f}; +dec_huffman_lookup(16#3c, 16#6) -> {more, 16#57, 16#29}; +dec_huffman_lookup(16#3c, 16#7) -> {ok, 16#57, 16#38}; +dec_huffman_lookup(16#3c, 16#8) -> {more, 16#59, 16#03}; +dec_huffman_lookup(16#3c, 16#9) -> {more, 16#59, 16#06}; +dec_huffman_lookup(16#3c, 16#a) -> {more, 16#59, 16#0a}; +dec_huffman_lookup(16#3c, 16#b) -> {more, 16#59, 16#0f}; +dec_huffman_lookup(16#3c, 16#c) -> {more, 16#59, 16#18}; +dec_huffman_lookup(16#3c, 16#d) -> {more, 16#59, 16#1f}; +dec_huffman_lookup(16#3c, 16#e) -> {more, 16#59, 16#29}; +dec_huffman_lookup(16#3c, 16#f) -> {ok, 16#59, 16#38}; +dec_huffman_lookup(16#3d, 16#0) -> {more, 16#6a, 16#02}; +dec_huffman_lookup(16#3d, 16#1) -> {more, 16#6a, 16#09}; +dec_huffman_lookup(16#3d, 16#2) -> {more, 16#6a, 16#17}; +dec_huffman_lookup(16#3d, 16#3) -> {ok, 16#6a, 16#28}; +dec_huffman_lookup(16#3d, 16#4) -> {more, 16#6b, 16#02}; +dec_huffman_lookup(16#3d, 16#5) -> {more, 16#6b, 16#09}; +dec_huffman_lookup(16#3d, 16#6) -> {more, 16#6b, 16#17}; +dec_huffman_lookup(16#3d, 16#7) -> {ok, 16#6b, 16#28}; +dec_huffman_lookup(16#3d, 16#8) -> {more, 16#71, 16#02}; +dec_huffman_lookup(16#3d, 16#9) -> {more, 16#71, 16#09}; +dec_huffman_lookup(16#3d, 16#a) -> {more, 16#71, 16#17}; +dec_huffman_lookup(16#3d, 16#b) -> {ok, 16#71, 16#28}; +dec_huffman_lookup(16#3d, 16#c) -> {more, 16#76, 16#02}; +dec_huffman_lookup(16#3d, 16#d) -> {more, 16#76, 16#09}; +dec_huffman_lookup(16#3d, 16#e) -> {more, 16#76, 16#17}; +dec_huffman_lookup(16#3d, 16#f) -> {ok, 16#76, 16#28}; +dec_huffman_lookup(16#3e, 16#0) -> {more, 16#6a, 16#03}; +dec_huffman_lookup(16#3e, 16#1) -> {more, 16#6a, 16#06}; +dec_huffman_lookup(16#3e, 16#2) -> {more, 16#6a, 16#0a}; +dec_huffman_lookup(16#3e, 16#3) -> {more, 16#6a, 16#0f}; +dec_huffman_lookup(16#3e, 16#4) -> {more, 16#6a, 16#18}; +dec_huffman_lookup(16#3e, 16#5) -> {more, 16#6a, 16#1f}; +dec_huffman_lookup(16#3e, 16#6) -> {more, 16#6a, 16#29}; +dec_huffman_lookup(16#3e, 16#7) -> {ok, 16#6a, 16#38}; +dec_huffman_lookup(16#3e, 16#8) -> {more, 16#6b, 16#03}; +dec_huffman_lookup(16#3e, 16#9) -> {more, 16#6b, 16#06}; +dec_huffman_lookup(16#3e, 16#a) -> {more, 16#6b, 16#0a}; +dec_huffman_lookup(16#3e, 16#b) -> {more, 16#6b, 16#0f}; +dec_huffman_lookup(16#3e, 16#c) -> {more, 16#6b, 16#18}; +dec_huffman_lookup(16#3e, 16#d) -> {more, 16#6b, 16#1f}; +dec_huffman_lookup(16#3e, 16#e) -> {more, 16#6b, 16#29}; +dec_huffman_lookup(16#3e, 16#f) -> {ok, 16#6b, 16#38}; +dec_huffman_lookup(16#3f, 16#0) -> {more, 16#71, 16#03}; +dec_huffman_lookup(16#3f, 16#1) -> {more, 16#71, 16#06}; +dec_huffman_lookup(16#3f, 16#2) -> {more, 16#71, 16#0a}; +dec_huffman_lookup(16#3f, 16#3) -> {more, 16#71, 16#0f}; +dec_huffman_lookup(16#3f, 16#4) -> {more, 16#71, 16#18}; +dec_huffman_lookup(16#3f, 16#5) -> {more, 16#71, 16#1f}; +dec_huffman_lookup(16#3f, 16#6) -> {more, 16#71, 16#29}; +dec_huffman_lookup(16#3f, 16#7) -> {ok, 16#71, 16#38}; +dec_huffman_lookup(16#3f, 16#8) -> {more, 16#76, 16#03}; +dec_huffman_lookup(16#3f, 16#9) -> {more, 16#76, 16#06}; +dec_huffman_lookup(16#3f, 16#a) -> {more, 16#76, 16#0a}; +dec_huffman_lookup(16#3f, 16#b) -> {more, 16#76, 16#0f}; +dec_huffman_lookup(16#3f, 16#c) -> {more, 16#76, 16#18}; +dec_huffman_lookup(16#3f, 16#d) -> {more, 16#76, 16#1f}; +dec_huffman_lookup(16#3f, 16#e) -> {more, 16#76, 16#29}; +dec_huffman_lookup(16#3f, 16#f) -> {ok, 16#76, 16#38}; +dec_huffman_lookup(16#40, 16#0) -> {more, 16#77, 16#01}; +dec_huffman_lookup(16#40, 16#1) -> {ok, 16#77, 16#16}; +dec_huffman_lookup(16#40, 16#2) -> {more, 16#78, 16#01}; +dec_huffman_lookup(16#40, 16#3) -> {ok, 16#78, 16#16}; +dec_huffman_lookup(16#40, 16#4) -> {more, 16#79, 16#01}; +dec_huffman_lookup(16#40, 16#5) -> {ok, 16#79, 16#16}; +dec_huffman_lookup(16#40, 16#6) -> {more, 16#7a, 16#01}; +dec_huffman_lookup(16#40, 16#7) -> {ok, 16#7a, 16#16}; +dec_huffman_lookup(16#40, 16#8) -> {ok, 16#26, 16#00}; +dec_huffman_lookup(16#40, 16#9) -> {ok, 16#2a, 16#00}; +dec_huffman_lookup(16#40, 16#a) -> {ok, 16#2c, 16#00}; +dec_huffman_lookup(16#40, 16#b) -> {ok, 16#3b, 16#00}; +dec_huffman_lookup(16#40, 16#c) -> {ok, 16#58, 16#00}; +dec_huffman_lookup(16#40, 16#d) -> {ok, 16#5a, 16#00}; +dec_huffman_lookup(16#40, 16#e) -> {more, undefined, 16#4b}; +dec_huffman_lookup(16#40, 16#f) -> {ok, undefined, 16#4e}; +dec_huffman_lookup(16#41, 16#0) -> {more, 16#77, 16#02}; +dec_huffman_lookup(16#41, 16#1) -> {more, 16#77, 16#09}; +dec_huffman_lookup(16#41, 16#2) -> {more, 16#77, 16#17}; +dec_huffman_lookup(16#41, 16#3) -> {ok, 16#77, 16#28}; +dec_huffman_lookup(16#41, 16#4) -> {more, 16#78, 16#02}; +dec_huffman_lookup(16#41, 16#5) -> {more, 16#78, 16#09}; +dec_huffman_lookup(16#41, 16#6) -> {more, 16#78, 16#17}; +dec_huffman_lookup(16#41, 16#7) -> {ok, 16#78, 16#28}; +dec_huffman_lookup(16#41, 16#8) -> {more, 16#79, 16#02}; +dec_huffman_lookup(16#41, 16#9) -> {more, 16#79, 16#09}; +dec_huffman_lookup(16#41, 16#a) -> {more, 16#79, 16#17}; +dec_huffman_lookup(16#41, 16#b) -> {ok, 16#79, 16#28}; +dec_huffman_lookup(16#41, 16#c) -> {more, 16#7a, 16#02}; +dec_huffman_lookup(16#41, 16#d) -> {more, 16#7a, 16#09}; +dec_huffman_lookup(16#41, 16#e) -> {more, 16#7a, 16#17}; +dec_huffman_lookup(16#41, 16#f) -> {ok, 16#7a, 16#28}; +dec_huffman_lookup(16#42, 16#0) -> {more, 16#77, 16#03}; +dec_huffman_lookup(16#42, 16#1) -> {more, 16#77, 16#06}; +dec_huffman_lookup(16#42, 16#2) -> {more, 16#77, 16#0a}; +dec_huffman_lookup(16#42, 16#3) -> {more, 16#77, 16#0f}; +dec_huffman_lookup(16#42, 16#4) -> {more, 16#77, 16#18}; +dec_huffman_lookup(16#42, 16#5) -> {more, 16#77, 16#1f}; +dec_huffman_lookup(16#42, 16#6) -> {more, 16#77, 16#29}; +dec_huffman_lookup(16#42, 16#7) -> {ok, 16#77, 16#38}; +dec_huffman_lookup(16#42, 16#8) -> {more, 16#78, 16#03}; +dec_huffman_lookup(16#42, 16#9) -> {more, 16#78, 16#06}; +dec_huffman_lookup(16#42, 16#a) -> {more, 16#78, 16#0a}; +dec_huffman_lookup(16#42, 16#b) -> {more, 16#78, 16#0f}; +dec_huffman_lookup(16#42, 16#c) -> {more, 16#78, 16#18}; +dec_huffman_lookup(16#42, 16#d) -> {more, 16#78, 16#1f}; +dec_huffman_lookup(16#42, 16#e) -> {more, 16#78, 16#29}; +dec_huffman_lookup(16#42, 16#f) -> {ok, 16#78, 16#38}; +dec_huffman_lookup(16#43, 16#0) -> {more, 16#79, 16#03}; +dec_huffman_lookup(16#43, 16#1) -> {more, 16#79, 16#06}; +dec_huffman_lookup(16#43, 16#2) -> {more, 16#79, 16#0a}; +dec_huffman_lookup(16#43, 16#3) -> {more, 16#79, 16#0f}; +dec_huffman_lookup(16#43, 16#4) -> {more, 16#79, 16#18}; +dec_huffman_lookup(16#43, 16#5) -> {more, 16#79, 16#1f}; +dec_huffman_lookup(16#43, 16#6) -> {more, 16#79, 16#29}; +dec_huffman_lookup(16#43, 16#7) -> {ok, 16#79, 16#38}; +dec_huffman_lookup(16#43, 16#8) -> {more, 16#7a, 16#03}; +dec_huffman_lookup(16#43, 16#9) -> {more, 16#7a, 16#06}; +dec_huffman_lookup(16#43, 16#a) -> {more, 16#7a, 16#0a}; +dec_huffman_lookup(16#43, 16#b) -> {more, 16#7a, 16#0f}; +dec_huffman_lookup(16#43, 16#c) -> {more, 16#7a, 16#18}; +dec_huffman_lookup(16#43, 16#d) -> {more, 16#7a, 16#1f}; +dec_huffman_lookup(16#43, 16#e) -> {more, 16#7a, 16#29}; +dec_huffman_lookup(16#43, 16#f) -> {ok, 16#7a, 16#38}; +dec_huffman_lookup(16#44, 16#0) -> {more, 16#26, 16#01}; +dec_huffman_lookup(16#44, 16#1) -> {ok, 16#26, 16#16}; +dec_huffman_lookup(16#44, 16#2) -> {more, 16#2a, 16#01}; +dec_huffman_lookup(16#44, 16#3) -> {ok, 16#2a, 16#16}; +dec_huffman_lookup(16#44, 16#4) -> {more, 16#2c, 16#01}; +dec_huffman_lookup(16#44, 16#5) -> {ok, 16#2c, 16#16}; +dec_huffman_lookup(16#44, 16#6) -> {more, 16#3b, 16#01}; +dec_huffman_lookup(16#44, 16#7) -> {ok, 16#3b, 16#16}; +dec_huffman_lookup(16#44, 16#8) -> {more, 16#58, 16#01}; +dec_huffman_lookup(16#44, 16#9) -> {ok, 16#58, 16#16}; +dec_huffman_lookup(16#44, 16#a) -> {more, 16#5a, 16#01}; +dec_huffman_lookup(16#44, 16#b) -> {ok, 16#5a, 16#16}; +dec_huffman_lookup(16#44, 16#c) -> {more, undefined, 16#4c}; +dec_huffman_lookup(16#44, 16#d) -> {more, undefined, 16#4d}; +dec_huffman_lookup(16#44, 16#e) -> {more, undefined, 16#4f}; +dec_huffman_lookup(16#44, 16#f) -> {ok, undefined, 16#51}; +dec_huffman_lookup(16#45, 16#0) -> {more, 16#26, 16#02}; +dec_huffman_lookup(16#45, 16#1) -> {more, 16#26, 16#09}; +dec_huffman_lookup(16#45, 16#2) -> {more, 16#26, 16#17}; +dec_huffman_lookup(16#45, 16#3) -> {ok, 16#26, 16#28}; +dec_huffman_lookup(16#45, 16#4) -> {more, 16#2a, 16#02}; +dec_huffman_lookup(16#45, 16#5) -> {more, 16#2a, 16#09}; +dec_huffman_lookup(16#45, 16#6) -> {more, 16#2a, 16#17}; +dec_huffman_lookup(16#45, 16#7) -> {ok, 16#2a, 16#28}; +dec_huffman_lookup(16#45, 16#8) -> {more, 16#2c, 16#02}; +dec_huffman_lookup(16#45, 16#9) -> {more, 16#2c, 16#09}; +dec_huffman_lookup(16#45, 16#a) -> {more, 16#2c, 16#17}; +dec_huffman_lookup(16#45, 16#b) -> {ok, 16#2c, 16#28}; +dec_huffman_lookup(16#45, 16#c) -> {more, 16#3b, 16#02}; +dec_huffman_lookup(16#45, 16#d) -> {more, 16#3b, 16#09}; +dec_huffman_lookup(16#45, 16#e) -> {more, 16#3b, 16#17}; +dec_huffman_lookup(16#45, 16#f) -> {ok, 16#3b, 16#28}; +dec_huffman_lookup(16#46, 16#0) -> {more, 16#26, 16#03}; +dec_huffman_lookup(16#46, 16#1) -> {more, 16#26, 16#06}; +dec_huffman_lookup(16#46, 16#2) -> {more, 16#26, 16#0a}; +dec_huffman_lookup(16#46, 16#3) -> {more, 16#26, 16#0f}; +dec_huffman_lookup(16#46, 16#4) -> {more, 16#26, 16#18}; +dec_huffman_lookup(16#46, 16#5) -> {more, 16#26, 16#1f}; +dec_huffman_lookup(16#46, 16#6) -> {more, 16#26, 16#29}; +dec_huffman_lookup(16#46, 16#7) -> {ok, 16#26, 16#38}; +dec_huffman_lookup(16#46, 16#8) -> {more, 16#2a, 16#03}; +dec_huffman_lookup(16#46, 16#9) -> {more, 16#2a, 16#06}; +dec_huffman_lookup(16#46, 16#a) -> {more, 16#2a, 16#0a}; +dec_huffman_lookup(16#46, 16#b) -> {more, 16#2a, 16#0f}; +dec_huffman_lookup(16#46, 16#c) -> {more, 16#2a, 16#18}; +dec_huffman_lookup(16#46, 16#d) -> {more, 16#2a, 16#1f}; +dec_huffman_lookup(16#46, 16#e) -> {more, 16#2a, 16#29}; +dec_huffman_lookup(16#46, 16#f) -> {ok, 16#2a, 16#38}; +dec_huffman_lookup(16#47, 16#0) -> {more, 16#2c, 16#03}; +dec_huffman_lookup(16#47, 16#1) -> {more, 16#2c, 16#06}; +dec_huffman_lookup(16#47, 16#2) -> {more, 16#2c, 16#0a}; +dec_huffman_lookup(16#47, 16#3) -> {more, 16#2c, 16#0f}; +dec_huffman_lookup(16#47, 16#4) -> {more, 16#2c, 16#18}; +dec_huffman_lookup(16#47, 16#5) -> {more, 16#2c, 16#1f}; +dec_huffman_lookup(16#47, 16#6) -> {more, 16#2c, 16#29}; +dec_huffman_lookup(16#47, 16#7) -> {ok, 16#2c, 16#38}; +dec_huffman_lookup(16#47, 16#8) -> {more, 16#3b, 16#03}; +dec_huffman_lookup(16#47, 16#9) -> {more, 16#3b, 16#06}; +dec_huffman_lookup(16#47, 16#a) -> {more, 16#3b, 16#0a}; +dec_huffman_lookup(16#47, 16#b) -> {more, 16#3b, 16#0f}; +dec_huffman_lookup(16#47, 16#c) -> {more, 16#3b, 16#18}; +dec_huffman_lookup(16#47, 16#d) -> {more, 16#3b, 16#1f}; +dec_huffman_lookup(16#47, 16#e) -> {more, 16#3b, 16#29}; +dec_huffman_lookup(16#47, 16#f) -> {ok, 16#3b, 16#38}; +dec_huffman_lookup(16#48, 16#0) -> {more, 16#58, 16#02}; +dec_huffman_lookup(16#48, 16#1) -> {more, 16#58, 16#09}; +dec_huffman_lookup(16#48, 16#2) -> {more, 16#58, 16#17}; +dec_huffman_lookup(16#48, 16#3) -> {ok, 16#58, 16#28}; +dec_huffman_lookup(16#48, 16#4) -> {more, 16#5a, 16#02}; +dec_huffman_lookup(16#48, 16#5) -> {more, 16#5a, 16#09}; +dec_huffman_lookup(16#48, 16#6) -> {more, 16#5a, 16#17}; +dec_huffman_lookup(16#48, 16#7) -> {ok, 16#5a, 16#28}; +dec_huffman_lookup(16#48, 16#8) -> {ok, 16#21, 16#00}; +dec_huffman_lookup(16#48, 16#9) -> {ok, 16#22, 16#00}; +dec_huffman_lookup(16#48, 16#a) -> {ok, 16#28, 16#00}; +dec_huffman_lookup(16#48, 16#b) -> {ok, 16#29, 16#00}; +dec_huffman_lookup(16#48, 16#c) -> {ok, 16#3f, 16#00}; +dec_huffman_lookup(16#48, 16#d) -> {more, undefined, 16#50}; +dec_huffman_lookup(16#48, 16#e) -> {more, undefined, 16#52}; +dec_huffman_lookup(16#48, 16#f) -> {ok, undefined, 16#54}; +dec_huffman_lookup(16#49, 16#0) -> {more, 16#58, 16#03}; +dec_huffman_lookup(16#49, 16#1) -> {more, 16#58, 16#06}; +dec_huffman_lookup(16#49, 16#2) -> {more, 16#58, 16#0a}; +dec_huffman_lookup(16#49, 16#3) -> {more, 16#58, 16#0f}; +dec_huffman_lookup(16#49, 16#4) -> {more, 16#58, 16#18}; +dec_huffman_lookup(16#49, 16#5) -> {more, 16#58, 16#1f}; +dec_huffman_lookup(16#49, 16#6) -> {more, 16#58, 16#29}; +dec_huffman_lookup(16#49, 16#7) -> {ok, 16#58, 16#38}; +dec_huffman_lookup(16#49, 16#8) -> {more, 16#5a, 16#03}; +dec_huffman_lookup(16#49, 16#9) -> {more, 16#5a, 16#06}; +dec_huffman_lookup(16#49, 16#a) -> {more, 16#5a, 16#0a}; +dec_huffman_lookup(16#49, 16#b) -> {more, 16#5a, 16#0f}; +dec_huffman_lookup(16#49, 16#c) -> {more, 16#5a, 16#18}; +dec_huffman_lookup(16#49, 16#d) -> {more, 16#5a, 16#1f}; +dec_huffman_lookup(16#49, 16#e) -> {more, 16#5a, 16#29}; +dec_huffman_lookup(16#49, 16#f) -> {ok, 16#5a, 16#38}; +dec_huffman_lookup(16#4a, 16#0) -> {more, 16#21, 16#01}; +dec_huffman_lookup(16#4a, 16#1) -> {ok, 16#21, 16#16}; +dec_huffman_lookup(16#4a, 16#2) -> {more, 16#22, 16#01}; +dec_huffman_lookup(16#4a, 16#3) -> {ok, 16#22, 16#16}; +dec_huffman_lookup(16#4a, 16#4) -> {more, 16#28, 16#01}; +dec_huffman_lookup(16#4a, 16#5) -> {ok, 16#28, 16#16}; +dec_huffman_lookup(16#4a, 16#6) -> {more, 16#29, 16#01}; +dec_huffman_lookup(16#4a, 16#7) -> {ok, 16#29, 16#16}; +dec_huffman_lookup(16#4a, 16#8) -> {more, 16#3f, 16#01}; +dec_huffman_lookup(16#4a, 16#9) -> {ok, 16#3f, 16#16}; +dec_huffman_lookup(16#4a, 16#a) -> {ok, 16#27, 16#00}; +dec_huffman_lookup(16#4a, 16#b) -> {ok, 16#2b, 16#00}; +dec_huffman_lookup(16#4a, 16#c) -> {ok, 16#7c, 16#00}; +dec_huffman_lookup(16#4a, 16#d) -> {more, undefined, 16#53}; +dec_huffman_lookup(16#4a, 16#e) -> {more, undefined, 16#55}; +dec_huffman_lookup(16#4a, 16#f) -> {ok, undefined, 16#58}; +dec_huffman_lookup(16#4b, 16#0) -> {more, 16#21, 16#02}; +dec_huffman_lookup(16#4b, 16#1) -> {more, 16#21, 16#09}; +dec_huffman_lookup(16#4b, 16#2) -> {more, 16#21, 16#17}; +dec_huffman_lookup(16#4b, 16#3) -> {ok, 16#21, 16#28}; +dec_huffman_lookup(16#4b, 16#4) -> {more, 16#22, 16#02}; +dec_huffman_lookup(16#4b, 16#5) -> {more, 16#22, 16#09}; +dec_huffman_lookup(16#4b, 16#6) -> {more, 16#22, 16#17}; +dec_huffman_lookup(16#4b, 16#7) -> {ok, 16#22, 16#28}; +dec_huffman_lookup(16#4b, 16#8) -> {more, 16#28, 16#02}; +dec_huffman_lookup(16#4b, 16#9) -> {more, 16#28, 16#09}; +dec_huffman_lookup(16#4b, 16#a) -> {more, 16#28, 16#17}; +dec_huffman_lookup(16#4b, 16#b) -> {ok, 16#28, 16#28}; +dec_huffman_lookup(16#4b, 16#c) -> {more, 16#29, 16#02}; +dec_huffman_lookup(16#4b, 16#d) -> {more, 16#29, 16#09}; +dec_huffman_lookup(16#4b, 16#e) -> {more, 16#29, 16#17}; +dec_huffman_lookup(16#4b, 16#f) -> {ok, 16#29, 16#28}; +dec_huffman_lookup(16#4c, 16#0) -> {more, 16#21, 16#03}; +dec_huffman_lookup(16#4c, 16#1) -> {more, 16#21, 16#06}; +dec_huffman_lookup(16#4c, 16#2) -> {more, 16#21, 16#0a}; +dec_huffman_lookup(16#4c, 16#3) -> {more, 16#21, 16#0f}; +dec_huffman_lookup(16#4c, 16#4) -> {more, 16#21, 16#18}; +dec_huffman_lookup(16#4c, 16#5) -> {more, 16#21, 16#1f}; +dec_huffman_lookup(16#4c, 16#6) -> {more, 16#21, 16#29}; +dec_huffman_lookup(16#4c, 16#7) -> {ok, 16#21, 16#38}; +dec_huffman_lookup(16#4c, 16#8) -> {more, 16#22, 16#03}; +dec_huffman_lookup(16#4c, 16#9) -> {more, 16#22, 16#06}; +dec_huffman_lookup(16#4c, 16#a) -> {more, 16#22, 16#0a}; +dec_huffman_lookup(16#4c, 16#b) -> {more, 16#22, 16#0f}; +dec_huffman_lookup(16#4c, 16#c) -> {more, 16#22, 16#18}; +dec_huffman_lookup(16#4c, 16#d) -> {more, 16#22, 16#1f}; +dec_huffman_lookup(16#4c, 16#e) -> {more, 16#22, 16#29}; +dec_huffman_lookup(16#4c, 16#f) -> {ok, 16#22, 16#38}; +dec_huffman_lookup(16#4d, 16#0) -> {more, 16#28, 16#03}; +dec_huffman_lookup(16#4d, 16#1) -> {more, 16#28, 16#06}; +dec_huffman_lookup(16#4d, 16#2) -> {more, 16#28, 16#0a}; +dec_huffman_lookup(16#4d, 16#3) -> {more, 16#28, 16#0f}; +dec_huffman_lookup(16#4d, 16#4) -> {more, 16#28, 16#18}; +dec_huffman_lookup(16#4d, 16#5) -> {more, 16#28, 16#1f}; +dec_huffman_lookup(16#4d, 16#6) -> {more, 16#28, 16#29}; +dec_huffman_lookup(16#4d, 16#7) -> {ok, 16#28, 16#38}; +dec_huffman_lookup(16#4d, 16#8) -> {more, 16#29, 16#03}; +dec_huffman_lookup(16#4d, 16#9) -> {more, 16#29, 16#06}; +dec_huffman_lookup(16#4d, 16#a) -> {more, 16#29, 16#0a}; +dec_huffman_lookup(16#4d, 16#b) -> {more, 16#29, 16#0f}; +dec_huffman_lookup(16#4d, 16#c) -> {more, 16#29, 16#18}; +dec_huffman_lookup(16#4d, 16#d) -> {more, 16#29, 16#1f}; +dec_huffman_lookup(16#4d, 16#e) -> {more, 16#29, 16#29}; +dec_huffman_lookup(16#4d, 16#f) -> {ok, 16#29, 16#38}; +dec_huffman_lookup(16#4e, 16#0) -> {more, 16#3f, 16#02}; +dec_huffman_lookup(16#4e, 16#1) -> {more, 16#3f, 16#09}; +dec_huffman_lookup(16#4e, 16#2) -> {more, 16#3f, 16#17}; +dec_huffman_lookup(16#4e, 16#3) -> {ok, 16#3f, 16#28}; +dec_huffman_lookup(16#4e, 16#4) -> {more, 16#27, 16#01}; +dec_huffman_lookup(16#4e, 16#5) -> {ok, 16#27, 16#16}; +dec_huffman_lookup(16#4e, 16#6) -> {more, 16#2b, 16#01}; +dec_huffman_lookup(16#4e, 16#7) -> {ok, 16#2b, 16#16}; +dec_huffman_lookup(16#4e, 16#8) -> {more, 16#7c, 16#01}; +dec_huffman_lookup(16#4e, 16#9) -> {ok, 16#7c, 16#16}; +dec_huffman_lookup(16#4e, 16#a) -> {ok, 16#23, 16#00}; +dec_huffman_lookup(16#4e, 16#b) -> {ok, 16#3e, 16#00}; +dec_huffman_lookup(16#4e, 16#c) -> {more, undefined, 16#56}; +dec_huffman_lookup(16#4e, 16#d) -> {more, undefined, 16#57}; +dec_huffman_lookup(16#4e, 16#e) -> {more, undefined, 16#59}; +dec_huffman_lookup(16#4e, 16#f) -> {ok, undefined, 16#5a}; +dec_huffman_lookup(16#4f, 16#0) -> {more, 16#3f, 16#03}; +dec_huffman_lookup(16#4f, 16#1) -> {more, 16#3f, 16#06}; +dec_huffman_lookup(16#4f, 16#2) -> {more, 16#3f, 16#0a}; +dec_huffman_lookup(16#4f, 16#3) -> {more, 16#3f, 16#0f}; +dec_huffman_lookup(16#4f, 16#4) -> {more, 16#3f, 16#18}; +dec_huffman_lookup(16#4f, 16#5) -> {more, 16#3f, 16#1f}; +dec_huffman_lookup(16#4f, 16#6) -> {more, 16#3f, 16#29}; +dec_huffman_lookup(16#4f, 16#7) -> {ok, 16#3f, 16#38}; +dec_huffman_lookup(16#4f, 16#8) -> {more, 16#27, 16#02}; +dec_huffman_lookup(16#4f, 16#9) -> {more, 16#27, 16#09}; +dec_huffman_lookup(16#4f, 16#a) -> {more, 16#27, 16#17}; +dec_huffman_lookup(16#4f, 16#b) -> {ok, 16#27, 16#28}; +dec_huffman_lookup(16#4f, 16#c) -> {more, 16#2b, 16#02}; +dec_huffman_lookup(16#4f, 16#d) -> {more, 16#2b, 16#09}; +dec_huffman_lookup(16#4f, 16#e) -> {more, 16#2b, 16#17}; +dec_huffman_lookup(16#4f, 16#f) -> {ok, 16#2b, 16#28}; +dec_huffman_lookup(16#50, 16#0) -> {more, 16#27, 16#03}; +dec_huffman_lookup(16#50, 16#1) -> {more, 16#27, 16#06}; +dec_huffman_lookup(16#50, 16#2) -> {more, 16#27, 16#0a}; +dec_huffman_lookup(16#50, 16#3) -> {more, 16#27, 16#0f}; +dec_huffman_lookup(16#50, 16#4) -> {more, 16#27, 16#18}; +dec_huffman_lookup(16#50, 16#5) -> {more, 16#27, 16#1f}; +dec_huffman_lookup(16#50, 16#6) -> {more, 16#27, 16#29}; +dec_huffman_lookup(16#50, 16#7) -> {ok, 16#27, 16#38}; +dec_huffman_lookup(16#50, 16#8) -> {more, 16#2b, 16#03}; +dec_huffman_lookup(16#50, 16#9) -> {more, 16#2b, 16#06}; +dec_huffman_lookup(16#50, 16#a) -> {more, 16#2b, 16#0a}; +dec_huffman_lookup(16#50, 16#b) -> {more, 16#2b, 16#0f}; +dec_huffman_lookup(16#50, 16#c) -> {more, 16#2b, 16#18}; +dec_huffman_lookup(16#50, 16#d) -> {more, 16#2b, 16#1f}; +dec_huffman_lookup(16#50, 16#e) -> {more, 16#2b, 16#29}; +dec_huffman_lookup(16#50, 16#f) -> {ok, 16#2b, 16#38}; +dec_huffman_lookup(16#51, 16#0) -> {more, 16#7c, 16#02}; +dec_huffman_lookup(16#51, 16#1) -> {more, 16#7c, 16#09}; +dec_huffman_lookup(16#51, 16#2) -> {more, 16#7c, 16#17}; +dec_huffman_lookup(16#51, 16#3) -> {ok, 16#7c, 16#28}; +dec_huffman_lookup(16#51, 16#4) -> {more, 16#23, 16#01}; +dec_huffman_lookup(16#51, 16#5) -> {ok, 16#23, 16#16}; +dec_huffman_lookup(16#51, 16#6) -> {more, 16#3e, 16#01}; +dec_huffman_lookup(16#51, 16#7) -> {ok, 16#3e, 16#16}; +dec_huffman_lookup(16#51, 16#8) -> {ok, 16#00, 16#00}; +dec_huffman_lookup(16#51, 16#9) -> {ok, 16#24, 16#00}; +dec_huffman_lookup(16#51, 16#a) -> {ok, 16#40, 16#00}; +dec_huffman_lookup(16#51, 16#b) -> {ok, 16#5b, 16#00}; +dec_huffman_lookup(16#51, 16#c) -> {ok, 16#5d, 16#00}; +dec_huffman_lookup(16#51, 16#d) -> {ok, 16#7e, 16#00}; +dec_huffman_lookup(16#51, 16#e) -> {more, undefined, 16#5b}; +dec_huffman_lookup(16#51, 16#f) -> {ok, undefined, 16#5c}; +dec_huffman_lookup(16#52, 16#0) -> {more, 16#7c, 16#03}; +dec_huffman_lookup(16#52, 16#1) -> {more, 16#7c, 16#06}; +dec_huffman_lookup(16#52, 16#2) -> {more, 16#7c, 16#0a}; +dec_huffman_lookup(16#52, 16#3) -> {more, 16#7c, 16#0f}; +dec_huffman_lookup(16#52, 16#4) -> {more, 16#7c, 16#18}; +dec_huffman_lookup(16#52, 16#5) -> {more, 16#7c, 16#1f}; +dec_huffman_lookup(16#52, 16#6) -> {more, 16#7c, 16#29}; +dec_huffman_lookup(16#52, 16#7) -> {ok, 16#7c, 16#38}; +dec_huffman_lookup(16#52, 16#8) -> {more, 16#23, 16#02}; +dec_huffman_lookup(16#52, 16#9) -> {more, 16#23, 16#09}; +dec_huffman_lookup(16#52, 16#a) -> {more, 16#23, 16#17}; +dec_huffman_lookup(16#52, 16#b) -> {ok, 16#23, 16#28}; +dec_huffman_lookup(16#52, 16#c) -> {more, 16#3e, 16#02}; +dec_huffman_lookup(16#52, 16#d) -> {more, 16#3e, 16#09}; +dec_huffman_lookup(16#52, 16#e) -> {more, 16#3e, 16#17}; +dec_huffman_lookup(16#52, 16#f) -> {ok, 16#3e, 16#28}; +dec_huffman_lookup(16#53, 16#0) -> {more, 16#23, 16#03}; +dec_huffman_lookup(16#53, 16#1) -> {more, 16#23, 16#06}; +dec_huffman_lookup(16#53, 16#2) -> {more, 16#23, 16#0a}; +dec_huffman_lookup(16#53, 16#3) -> {more, 16#23, 16#0f}; +dec_huffman_lookup(16#53, 16#4) -> {more, 16#23, 16#18}; +dec_huffman_lookup(16#53, 16#5) -> {more, 16#23, 16#1f}; +dec_huffman_lookup(16#53, 16#6) -> {more, 16#23, 16#29}; +dec_huffman_lookup(16#53, 16#7) -> {ok, 16#23, 16#38}; +dec_huffman_lookup(16#53, 16#8) -> {more, 16#3e, 16#03}; +dec_huffman_lookup(16#53, 16#9) -> {more, 16#3e, 16#06}; +dec_huffman_lookup(16#53, 16#a) -> {more, 16#3e, 16#0a}; +dec_huffman_lookup(16#53, 16#b) -> {more, 16#3e, 16#0f}; +dec_huffman_lookup(16#53, 16#c) -> {more, 16#3e, 16#18}; +dec_huffman_lookup(16#53, 16#d) -> {more, 16#3e, 16#1f}; +dec_huffman_lookup(16#53, 16#e) -> {more, 16#3e, 16#29}; +dec_huffman_lookup(16#53, 16#f) -> {ok, 16#3e, 16#38}; +dec_huffman_lookup(16#54, 16#0) -> {more, 16#00, 16#01}; +dec_huffman_lookup(16#54, 16#1) -> {ok, 16#00, 16#16}; +dec_huffman_lookup(16#54, 16#2) -> {more, 16#24, 16#01}; +dec_huffman_lookup(16#54, 16#3) -> {ok, 16#24, 16#16}; +dec_huffman_lookup(16#54, 16#4) -> {more, 16#40, 16#01}; +dec_huffman_lookup(16#54, 16#5) -> {ok, 16#40, 16#16}; +dec_huffman_lookup(16#54, 16#6) -> {more, 16#5b, 16#01}; +dec_huffman_lookup(16#54, 16#7) -> {ok, 16#5b, 16#16}; +dec_huffman_lookup(16#54, 16#8) -> {more, 16#5d, 16#01}; +dec_huffman_lookup(16#54, 16#9) -> {ok, 16#5d, 16#16}; +dec_huffman_lookup(16#54, 16#a) -> {more, 16#7e, 16#01}; +dec_huffman_lookup(16#54, 16#b) -> {ok, 16#7e, 16#16}; +dec_huffman_lookup(16#54, 16#c) -> {ok, 16#5e, 16#00}; +dec_huffman_lookup(16#54, 16#d) -> {ok, 16#7d, 16#00}; +dec_huffman_lookup(16#54, 16#e) -> {more, undefined, 16#5d}; +dec_huffman_lookup(16#54, 16#f) -> {ok, undefined, 16#5e}; +dec_huffman_lookup(16#55, 16#0) -> {more, 16#00, 16#02}; +dec_huffman_lookup(16#55, 16#1) -> {more, 16#00, 16#09}; +dec_huffman_lookup(16#55, 16#2) -> {more, 16#00, 16#17}; +dec_huffman_lookup(16#55, 16#3) -> {ok, 16#00, 16#28}; +dec_huffman_lookup(16#55, 16#4) -> {more, 16#24, 16#02}; +dec_huffman_lookup(16#55, 16#5) -> {more, 16#24, 16#09}; +dec_huffman_lookup(16#55, 16#6) -> {more, 16#24, 16#17}; +dec_huffman_lookup(16#55, 16#7) -> {ok, 16#24, 16#28}; +dec_huffman_lookup(16#55, 16#8) -> {more, 16#40, 16#02}; +dec_huffman_lookup(16#55, 16#9) -> {more, 16#40, 16#09}; +dec_huffman_lookup(16#55, 16#a) -> {more, 16#40, 16#17}; +dec_huffman_lookup(16#55, 16#b) -> {ok, 16#40, 16#28}; +dec_huffman_lookup(16#55, 16#c) -> {more, 16#5b, 16#02}; +dec_huffman_lookup(16#55, 16#d) -> {more, 16#5b, 16#09}; +dec_huffman_lookup(16#55, 16#e) -> {more, 16#5b, 16#17}; +dec_huffman_lookup(16#55, 16#f) -> {ok, 16#5b, 16#28}; +dec_huffman_lookup(16#56, 16#0) -> {more, 16#00, 16#03}; +dec_huffman_lookup(16#56, 16#1) -> {more, 16#00, 16#06}; +dec_huffman_lookup(16#56, 16#2) -> {more, 16#00, 16#0a}; +dec_huffman_lookup(16#56, 16#3) -> {more, 16#00, 16#0f}; +dec_huffman_lookup(16#56, 16#4) -> {more, 16#00, 16#18}; +dec_huffman_lookup(16#56, 16#5) -> {more, 16#00, 16#1f}; +dec_huffman_lookup(16#56, 16#6) -> {more, 16#00, 16#29}; +dec_huffman_lookup(16#56, 16#7) -> {ok, 16#00, 16#38}; +dec_huffman_lookup(16#56, 16#8) -> {more, 16#24, 16#03}; +dec_huffman_lookup(16#56, 16#9) -> {more, 16#24, 16#06}; +dec_huffman_lookup(16#56, 16#a) -> {more, 16#24, 16#0a}; +dec_huffman_lookup(16#56, 16#b) -> {more, 16#24, 16#0f}; +dec_huffman_lookup(16#56, 16#c) -> {more, 16#24, 16#18}; +dec_huffman_lookup(16#56, 16#d) -> {more, 16#24, 16#1f}; +dec_huffman_lookup(16#56, 16#e) -> {more, 16#24, 16#29}; +dec_huffman_lookup(16#56, 16#f) -> {ok, 16#24, 16#38}; +dec_huffman_lookup(16#57, 16#0) -> {more, 16#40, 16#03}; +dec_huffman_lookup(16#57, 16#1) -> {more, 16#40, 16#06}; +dec_huffman_lookup(16#57, 16#2) -> {more, 16#40, 16#0a}; +dec_huffman_lookup(16#57, 16#3) -> {more, 16#40, 16#0f}; +dec_huffman_lookup(16#57, 16#4) -> {more, 16#40, 16#18}; +dec_huffman_lookup(16#57, 16#5) -> {more, 16#40, 16#1f}; +dec_huffman_lookup(16#57, 16#6) -> {more, 16#40, 16#29}; +dec_huffman_lookup(16#57, 16#7) -> {ok, 16#40, 16#38}; +dec_huffman_lookup(16#57, 16#8) -> {more, 16#5b, 16#03}; +dec_huffman_lookup(16#57, 16#9) -> {more, 16#5b, 16#06}; +dec_huffman_lookup(16#57, 16#a) -> {more, 16#5b, 16#0a}; +dec_huffman_lookup(16#57, 16#b) -> {more, 16#5b, 16#0f}; +dec_huffman_lookup(16#57, 16#c) -> {more, 16#5b, 16#18}; +dec_huffman_lookup(16#57, 16#d) -> {more, 16#5b, 16#1f}; +dec_huffman_lookup(16#57, 16#e) -> {more, 16#5b, 16#29}; +dec_huffman_lookup(16#57, 16#f) -> {ok, 16#5b, 16#38}; +dec_huffman_lookup(16#58, 16#0) -> {more, 16#5d, 16#02}; +dec_huffman_lookup(16#58, 16#1) -> {more, 16#5d, 16#09}; +dec_huffman_lookup(16#58, 16#2) -> {more, 16#5d, 16#17}; +dec_huffman_lookup(16#58, 16#3) -> {ok, 16#5d, 16#28}; +dec_huffman_lookup(16#58, 16#4) -> {more, 16#7e, 16#02}; +dec_huffman_lookup(16#58, 16#5) -> {more, 16#7e, 16#09}; +dec_huffman_lookup(16#58, 16#6) -> {more, 16#7e, 16#17}; +dec_huffman_lookup(16#58, 16#7) -> {ok, 16#7e, 16#28}; +dec_huffman_lookup(16#58, 16#8) -> {more, 16#5e, 16#01}; +dec_huffman_lookup(16#58, 16#9) -> {ok, 16#5e, 16#16}; +dec_huffman_lookup(16#58, 16#a) -> {more, 16#7d, 16#01}; +dec_huffman_lookup(16#58, 16#b) -> {ok, 16#7d, 16#16}; +dec_huffman_lookup(16#58, 16#c) -> {ok, 16#3c, 16#00}; +dec_huffman_lookup(16#58, 16#d) -> {ok, 16#60, 16#00}; +dec_huffman_lookup(16#58, 16#e) -> {ok, 16#7b, 16#00}; +dec_huffman_lookup(16#58, 16#f) -> {ok, undefined, 16#5f}; +dec_huffman_lookup(16#59, 16#0) -> {more, 16#5d, 16#03}; +dec_huffman_lookup(16#59, 16#1) -> {more, 16#5d, 16#06}; +dec_huffman_lookup(16#59, 16#2) -> {more, 16#5d, 16#0a}; +dec_huffman_lookup(16#59, 16#3) -> {more, 16#5d, 16#0f}; +dec_huffman_lookup(16#59, 16#4) -> {more, 16#5d, 16#18}; +dec_huffman_lookup(16#59, 16#5) -> {more, 16#5d, 16#1f}; +dec_huffman_lookup(16#59, 16#6) -> {more, 16#5d, 16#29}; +dec_huffman_lookup(16#59, 16#7) -> {ok, 16#5d, 16#38}; +dec_huffman_lookup(16#59, 16#8) -> {more, 16#7e, 16#03}; +dec_huffman_lookup(16#59, 16#9) -> {more, 16#7e, 16#06}; +dec_huffman_lookup(16#59, 16#a) -> {more, 16#7e, 16#0a}; +dec_huffman_lookup(16#59, 16#b) -> {more, 16#7e, 16#0f}; +dec_huffman_lookup(16#59, 16#c) -> {more, 16#7e, 16#18}; +dec_huffman_lookup(16#59, 16#d) -> {more, 16#7e, 16#1f}; +dec_huffman_lookup(16#59, 16#e) -> {more, 16#7e, 16#29}; +dec_huffman_lookup(16#59, 16#f) -> {ok, 16#7e, 16#38}; +dec_huffman_lookup(16#5a, 16#0) -> {more, 16#5e, 16#02}; +dec_huffman_lookup(16#5a, 16#1) -> {more, 16#5e, 16#09}; +dec_huffman_lookup(16#5a, 16#2) -> {more, 16#5e, 16#17}; +dec_huffman_lookup(16#5a, 16#3) -> {ok, 16#5e, 16#28}; +dec_huffman_lookup(16#5a, 16#4) -> {more, 16#7d, 16#02}; +dec_huffman_lookup(16#5a, 16#5) -> {more, 16#7d, 16#09}; +dec_huffman_lookup(16#5a, 16#6) -> {more, 16#7d, 16#17}; +dec_huffman_lookup(16#5a, 16#7) -> {ok, 16#7d, 16#28}; +dec_huffman_lookup(16#5a, 16#8) -> {more, 16#3c, 16#01}; +dec_huffman_lookup(16#5a, 16#9) -> {ok, 16#3c, 16#16}; +dec_huffman_lookup(16#5a, 16#a) -> {more, 16#60, 16#01}; +dec_huffman_lookup(16#5a, 16#b) -> {ok, 16#60, 16#16}; +dec_huffman_lookup(16#5a, 16#c) -> {more, 16#7b, 16#01}; +dec_huffman_lookup(16#5a, 16#d) -> {ok, 16#7b, 16#16}; +dec_huffman_lookup(16#5a, 16#e) -> {more, undefined, 16#60}; +dec_huffman_lookup(16#5a, 16#f) -> {ok, undefined, 16#6e}; +dec_huffman_lookup(16#5b, 16#0) -> {more, 16#5e, 16#03}; +dec_huffman_lookup(16#5b, 16#1) -> {more, 16#5e, 16#06}; +dec_huffman_lookup(16#5b, 16#2) -> {more, 16#5e, 16#0a}; +dec_huffman_lookup(16#5b, 16#3) -> {more, 16#5e, 16#0f}; +dec_huffman_lookup(16#5b, 16#4) -> {more, 16#5e, 16#18}; +dec_huffman_lookup(16#5b, 16#5) -> {more, 16#5e, 16#1f}; +dec_huffman_lookup(16#5b, 16#6) -> {more, 16#5e, 16#29}; +dec_huffman_lookup(16#5b, 16#7) -> {ok, 16#5e, 16#38}; +dec_huffman_lookup(16#5b, 16#8) -> {more, 16#7d, 16#03}; +dec_huffman_lookup(16#5b, 16#9) -> {more, 16#7d, 16#06}; +dec_huffman_lookup(16#5b, 16#a) -> {more, 16#7d, 16#0a}; +dec_huffman_lookup(16#5b, 16#b) -> {more, 16#7d, 16#0f}; +dec_huffman_lookup(16#5b, 16#c) -> {more, 16#7d, 16#18}; +dec_huffman_lookup(16#5b, 16#d) -> {more, 16#7d, 16#1f}; +dec_huffman_lookup(16#5b, 16#e) -> {more, 16#7d, 16#29}; +dec_huffman_lookup(16#5b, 16#f) -> {ok, 16#7d, 16#38}; +dec_huffman_lookup(16#5c, 16#0) -> {more, 16#3c, 16#02}; +dec_huffman_lookup(16#5c, 16#1) -> {more, 16#3c, 16#09}; +dec_huffman_lookup(16#5c, 16#2) -> {more, 16#3c, 16#17}; +dec_huffman_lookup(16#5c, 16#3) -> {ok, 16#3c, 16#28}; +dec_huffman_lookup(16#5c, 16#4) -> {more, 16#60, 16#02}; +dec_huffman_lookup(16#5c, 16#5) -> {more, 16#60, 16#09}; +dec_huffman_lookup(16#5c, 16#6) -> {more, 16#60, 16#17}; +dec_huffman_lookup(16#5c, 16#7) -> {ok, 16#60, 16#28}; +dec_huffman_lookup(16#5c, 16#8) -> {more, 16#7b, 16#02}; +dec_huffman_lookup(16#5c, 16#9) -> {more, 16#7b, 16#09}; +dec_huffman_lookup(16#5c, 16#a) -> {more, 16#7b, 16#17}; +dec_huffman_lookup(16#5c, 16#b) -> {ok, 16#7b, 16#28}; +dec_huffman_lookup(16#5c, 16#c) -> {more, undefined, 16#61}; +dec_huffman_lookup(16#5c, 16#d) -> {more, undefined, 16#65}; +dec_huffman_lookup(16#5c, 16#e) -> {more, undefined, 16#6f}; +dec_huffman_lookup(16#5c, 16#f) -> {ok, undefined, 16#85}; +dec_huffman_lookup(16#5d, 16#0) -> {more, 16#3c, 16#03}; +dec_huffman_lookup(16#5d, 16#1) -> {more, 16#3c, 16#06}; +dec_huffman_lookup(16#5d, 16#2) -> {more, 16#3c, 16#0a}; +dec_huffman_lookup(16#5d, 16#3) -> {more, 16#3c, 16#0f}; +dec_huffman_lookup(16#5d, 16#4) -> {more, 16#3c, 16#18}; +dec_huffman_lookup(16#5d, 16#5) -> {more, 16#3c, 16#1f}; +dec_huffman_lookup(16#5d, 16#6) -> {more, 16#3c, 16#29}; +dec_huffman_lookup(16#5d, 16#7) -> {ok, 16#3c, 16#38}; +dec_huffman_lookup(16#5d, 16#8) -> {more, 16#60, 16#03}; +dec_huffman_lookup(16#5d, 16#9) -> {more, 16#60, 16#06}; +dec_huffman_lookup(16#5d, 16#a) -> {more, 16#60, 16#0a}; +dec_huffman_lookup(16#5d, 16#b) -> {more, 16#60, 16#0f}; +dec_huffman_lookup(16#5d, 16#c) -> {more, 16#60, 16#18}; +dec_huffman_lookup(16#5d, 16#d) -> {more, 16#60, 16#1f}; +dec_huffman_lookup(16#5d, 16#e) -> {more, 16#60, 16#29}; +dec_huffman_lookup(16#5d, 16#f) -> {ok, 16#60, 16#38}; +dec_huffman_lookup(16#5e, 16#0) -> {more, 16#7b, 16#03}; +dec_huffman_lookup(16#5e, 16#1) -> {more, 16#7b, 16#06}; +dec_huffman_lookup(16#5e, 16#2) -> {more, 16#7b, 16#0a}; +dec_huffman_lookup(16#5e, 16#3) -> {more, 16#7b, 16#0f}; +dec_huffman_lookup(16#5e, 16#4) -> {more, 16#7b, 16#18}; +dec_huffman_lookup(16#5e, 16#5) -> {more, 16#7b, 16#1f}; +dec_huffman_lookup(16#5e, 16#6) -> {more, 16#7b, 16#29}; +dec_huffman_lookup(16#5e, 16#7) -> {ok, 16#7b, 16#38}; +dec_huffman_lookup(16#5e, 16#8) -> {more, undefined, 16#62}; +dec_huffman_lookup(16#5e, 16#9) -> {more, undefined, 16#63}; +dec_huffman_lookup(16#5e, 16#a) -> {more, undefined, 16#66}; +dec_huffman_lookup(16#5e, 16#b) -> {more, undefined, 16#69}; +dec_huffman_lookup(16#5e, 16#c) -> {more, undefined, 16#70}; +dec_huffman_lookup(16#5e, 16#d) -> {more, undefined, 16#77}; +dec_huffman_lookup(16#5e, 16#e) -> {more, undefined, 16#86}; +dec_huffman_lookup(16#5e, 16#f) -> {ok, undefined, 16#99}; +dec_huffman_lookup(16#5f, 16#0) -> {ok, 16#5c, 16#00}; +dec_huffman_lookup(16#5f, 16#1) -> {ok, 16#c3, 16#00}; +dec_huffman_lookup(16#5f, 16#2) -> {ok, 16#d0, 16#00}; +dec_huffman_lookup(16#5f, 16#3) -> {more, undefined, 16#64}; +dec_huffman_lookup(16#5f, 16#4) -> {more, undefined, 16#67}; +dec_huffman_lookup(16#5f, 16#5) -> {more, undefined, 16#68}; +dec_huffman_lookup(16#5f, 16#6) -> {more, undefined, 16#6a}; +dec_huffman_lookup(16#5f, 16#7) -> {more, undefined, 16#6b}; +dec_huffman_lookup(16#5f, 16#8) -> {more, undefined, 16#71}; +dec_huffman_lookup(16#5f, 16#9) -> {more, undefined, 16#74}; +dec_huffman_lookup(16#5f, 16#a) -> {more, undefined, 16#78}; +dec_huffman_lookup(16#5f, 16#b) -> {more, undefined, 16#7e}; +dec_huffman_lookup(16#5f, 16#c) -> {more, undefined, 16#87}; +dec_huffman_lookup(16#5f, 16#d) -> {more, undefined, 16#8e}; +dec_huffman_lookup(16#5f, 16#e) -> {more, undefined, 16#9a}; +dec_huffman_lookup(16#5f, 16#f) -> {ok, undefined, 16#a9}; +dec_huffman_lookup(16#60, 16#0) -> {more, 16#5c, 16#01}; +dec_huffman_lookup(16#60, 16#1) -> {ok, 16#5c, 16#16}; +dec_huffman_lookup(16#60, 16#2) -> {more, 16#c3, 16#01}; +dec_huffman_lookup(16#60, 16#3) -> {ok, 16#c3, 16#16}; +dec_huffman_lookup(16#60, 16#4) -> {more, 16#d0, 16#01}; +dec_huffman_lookup(16#60, 16#5) -> {ok, 16#d0, 16#16}; +dec_huffman_lookup(16#60, 16#6) -> {ok, 16#80, 16#00}; +dec_huffman_lookup(16#60, 16#7) -> {ok, 16#82, 16#00}; +dec_huffman_lookup(16#60, 16#8) -> {ok, 16#83, 16#00}; +dec_huffman_lookup(16#60, 16#9) -> {ok, 16#a2, 16#00}; +dec_huffman_lookup(16#60, 16#a) -> {ok, 16#b8, 16#00}; +dec_huffman_lookup(16#60, 16#b) -> {ok, 16#c2, 16#00}; +dec_huffman_lookup(16#60, 16#c) -> {ok, 16#e0, 16#00}; +dec_huffman_lookup(16#60, 16#d) -> {ok, 16#e2, 16#00}; +dec_huffman_lookup(16#60, 16#e) -> {more, undefined, 16#6c}; +dec_huffman_lookup(16#60, 16#f) -> {more, undefined, 16#6d}; +dec_huffman_lookup(16#61, 16#0) -> {more, 16#5c, 16#02}; +dec_huffman_lookup(16#61, 16#1) -> {more, 16#5c, 16#09}; +dec_huffman_lookup(16#61, 16#2) -> {more, 16#5c, 16#17}; +dec_huffman_lookup(16#61, 16#3) -> {ok, 16#5c, 16#28}; +dec_huffman_lookup(16#61, 16#4) -> {more, 16#c3, 16#02}; +dec_huffman_lookup(16#61, 16#5) -> {more, 16#c3, 16#09}; +dec_huffman_lookup(16#61, 16#6) -> {more, 16#c3, 16#17}; +dec_huffman_lookup(16#61, 16#7) -> {ok, 16#c3, 16#28}; +dec_huffman_lookup(16#61, 16#8) -> {more, 16#d0, 16#02}; +dec_huffman_lookup(16#61, 16#9) -> {more, 16#d0, 16#09}; +dec_huffman_lookup(16#61, 16#a) -> {more, 16#d0, 16#17}; +dec_huffman_lookup(16#61, 16#b) -> {ok, 16#d0, 16#28}; +dec_huffman_lookup(16#61, 16#c) -> {more, 16#80, 16#01}; +dec_huffman_lookup(16#61, 16#d) -> {ok, 16#80, 16#16}; +dec_huffman_lookup(16#61, 16#e) -> {more, 16#82, 16#01}; +dec_huffman_lookup(16#61, 16#f) -> {ok, 16#82, 16#16}; +dec_huffman_lookup(16#62, 16#0) -> {more, 16#5c, 16#03}; +dec_huffman_lookup(16#62, 16#1) -> {more, 16#5c, 16#06}; +dec_huffman_lookup(16#62, 16#2) -> {more, 16#5c, 16#0a}; +dec_huffman_lookup(16#62, 16#3) -> {more, 16#5c, 16#0f}; +dec_huffman_lookup(16#62, 16#4) -> {more, 16#5c, 16#18}; +dec_huffman_lookup(16#62, 16#5) -> {more, 16#5c, 16#1f}; +dec_huffman_lookup(16#62, 16#6) -> {more, 16#5c, 16#29}; +dec_huffman_lookup(16#62, 16#7) -> {ok, 16#5c, 16#38}; +dec_huffman_lookup(16#62, 16#8) -> {more, 16#c3, 16#03}; +dec_huffman_lookup(16#62, 16#9) -> {more, 16#c3, 16#06}; +dec_huffman_lookup(16#62, 16#a) -> {more, 16#c3, 16#0a}; +dec_huffman_lookup(16#62, 16#b) -> {more, 16#c3, 16#0f}; +dec_huffman_lookup(16#62, 16#c) -> {more, 16#c3, 16#18}; +dec_huffman_lookup(16#62, 16#d) -> {more, 16#c3, 16#1f}; +dec_huffman_lookup(16#62, 16#e) -> {more, 16#c3, 16#29}; +dec_huffman_lookup(16#62, 16#f) -> {ok, 16#c3, 16#38}; +dec_huffman_lookup(16#63, 16#0) -> {more, 16#d0, 16#03}; +dec_huffman_lookup(16#63, 16#1) -> {more, 16#d0, 16#06}; +dec_huffman_lookup(16#63, 16#2) -> {more, 16#d0, 16#0a}; +dec_huffman_lookup(16#63, 16#3) -> {more, 16#d0, 16#0f}; +dec_huffman_lookup(16#63, 16#4) -> {more, 16#d0, 16#18}; +dec_huffman_lookup(16#63, 16#5) -> {more, 16#d0, 16#1f}; +dec_huffman_lookup(16#63, 16#6) -> {more, 16#d0, 16#29}; +dec_huffman_lookup(16#63, 16#7) -> {ok, 16#d0, 16#38}; +dec_huffman_lookup(16#63, 16#8) -> {more, 16#80, 16#02}; +dec_huffman_lookup(16#63, 16#9) -> {more, 16#80, 16#09}; +dec_huffman_lookup(16#63, 16#a) -> {more, 16#80, 16#17}; +dec_huffman_lookup(16#63, 16#b) -> {ok, 16#80, 16#28}; +dec_huffman_lookup(16#63, 16#c) -> {more, 16#82, 16#02}; +dec_huffman_lookup(16#63, 16#d) -> {more, 16#82, 16#09}; +dec_huffman_lookup(16#63, 16#e) -> {more, 16#82, 16#17}; +dec_huffman_lookup(16#63, 16#f) -> {ok, 16#82, 16#28}; +dec_huffman_lookup(16#64, 16#0) -> {more, 16#80, 16#03}; +dec_huffman_lookup(16#64, 16#1) -> {more, 16#80, 16#06}; +dec_huffman_lookup(16#64, 16#2) -> {more, 16#80, 16#0a}; +dec_huffman_lookup(16#64, 16#3) -> {more, 16#80, 16#0f}; +dec_huffman_lookup(16#64, 16#4) -> {more, 16#80, 16#18}; +dec_huffman_lookup(16#64, 16#5) -> {more, 16#80, 16#1f}; +dec_huffman_lookup(16#64, 16#6) -> {more, 16#80, 16#29}; +dec_huffman_lookup(16#64, 16#7) -> {ok, 16#80, 16#38}; +dec_huffman_lookup(16#64, 16#8) -> {more, 16#82, 16#03}; +dec_huffman_lookup(16#64, 16#9) -> {more, 16#82, 16#06}; +dec_huffman_lookup(16#64, 16#a) -> {more, 16#82, 16#0a}; +dec_huffman_lookup(16#64, 16#b) -> {more, 16#82, 16#0f}; +dec_huffman_lookup(16#64, 16#c) -> {more, 16#82, 16#18}; +dec_huffman_lookup(16#64, 16#d) -> {more, 16#82, 16#1f}; +dec_huffman_lookup(16#64, 16#e) -> {more, 16#82, 16#29}; +dec_huffman_lookup(16#64, 16#f) -> {ok, 16#82, 16#38}; +dec_huffman_lookup(16#65, 16#0) -> {more, 16#83, 16#01}; +dec_huffman_lookup(16#65, 16#1) -> {ok, 16#83, 16#16}; +dec_huffman_lookup(16#65, 16#2) -> {more, 16#a2, 16#01}; +dec_huffman_lookup(16#65, 16#3) -> {ok, 16#a2, 16#16}; +dec_huffman_lookup(16#65, 16#4) -> {more, 16#b8, 16#01}; +dec_huffman_lookup(16#65, 16#5) -> {ok, 16#b8, 16#16}; +dec_huffman_lookup(16#65, 16#6) -> {more, 16#c2, 16#01}; +dec_huffman_lookup(16#65, 16#7) -> {ok, 16#c2, 16#16}; +dec_huffman_lookup(16#65, 16#8) -> {more, 16#e0, 16#01}; +dec_huffman_lookup(16#65, 16#9) -> {ok, 16#e0, 16#16}; +dec_huffman_lookup(16#65, 16#a) -> {more, 16#e2, 16#01}; +dec_huffman_lookup(16#65, 16#b) -> {ok, 16#e2, 16#16}; +dec_huffman_lookup(16#65, 16#c) -> {ok, 16#99, 16#00}; +dec_huffman_lookup(16#65, 16#d) -> {ok, 16#a1, 16#00}; +dec_huffman_lookup(16#65, 16#e) -> {ok, 16#a7, 16#00}; +dec_huffman_lookup(16#65, 16#f) -> {ok, 16#ac, 16#00}; +dec_huffman_lookup(16#66, 16#0) -> {more, 16#83, 16#02}; +dec_huffman_lookup(16#66, 16#1) -> {more, 16#83, 16#09}; +dec_huffman_lookup(16#66, 16#2) -> {more, 16#83, 16#17}; +dec_huffman_lookup(16#66, 16#3) -> {ok, 16#83, 16#28}; +dec_huffman_lookup(16#66, 16#4) -> {more, 16#a2, 16#02}; +dec_huffman_lookup(16#66, 16#5) -> {more, 16#a2, 16#09}; +dec_huffman_lookup(16#66, 16#6) -> {more, 16#a2, 16#17}; +dec_huffman_lookup(16#66, 16#7) -> {ok, 16#a2, 16#28}; +dec_huffman_lookup(16#66, 16#8) -> {more, 16#b8, 16#02}; +dec_huffman_lookup(16#66, 16#9) -> {more, 16#b8, 16#09}; +dec_huffman_lookup(16#66, 16#a) -> {more, 16#b8, 16#17}; +dec_huffman_lookup(16#66, 16#b) -> {ok, 16#b8, 16#28}; +dec_huffman_lookup(16#66, 16#c) -> {more, 16#c2, 16#02}; +dec_huffman_lookup(16#66, 16#d) -> {more, 16#c2, 16#09}; +dec_huffman_lookup(16#66, 16#e) -> {more, 16#c2, 16#17}; +dec_huffman_lookup(16#66, 16#f) -> {ok, 16#c2, 16#28}; +dec_huffman_lookup(16#67, 16#0) -> {more, 16#83, 16#03}; +dec_huffman_lookup(16#67, 16#1) -> {more, 16#83, 16#06}; +dec_huffman_lookup(16#67, 16#2) -> {more, 16#83, 16#0a}; +dec_huffman_lookup(16#67, 16#3) -> {more, 16#83, 16#0f}; +dec_huffman_lookup(16#67, 16#4) -> {more, 16#83, 16#18}; +dec_huffman_lookup(16#67, 16#5) -> {more, 16#83, 16#1f}; +dec_huffman_lookup(16#67, 16#6) -> {more, 16#83, 16#29}; +dec_huffman_lookup(16#67, 16#7) -> {ok, 16#83, 16#38}; +dec_huffman_lookup(16#67, 16#8) -> {more, 16#a2, 16#03}; +dec_huffman_lookup(16#67, 16#9) -> {more, 16#a2, 16#06}; +dec_huffman_lookup(16#67, 16#a) -> {more, 16#a2, 16#0a}; +dec_huffman_lookup(16#67, 16#b) -> {more, 16#a2, 16#0f}; +dec_huffman_lookup(16#67, 16#c) -> {more, 16#a2, 16#18}; +dec_huffman_lookup(16#67, 16#d) -> {more, 16#a2, 16#1f}; +dec_huffman_lookup(16#67, 16#e) -> {more, 16#a2, 16#29}; +dec_huffman_lookup(16#67, 16#f) -> {ok, 16#a2, 16#38}; +dec_huffman_lookup(16#68, 16#0) -> {more, 16#b8, 16#03}; +dec_huffman_lookup(16#68, 16#1) -> {more, 16#b8, 16#06}; +dec_huffman_lookup(16#68, 16#2) -> {more, 16#b8, 16#0a}; +dec_huffman_lookup(16#68, 16#3) -> {more, 16#b8, 16#0f}; +dec_huffman_lookup(16#68, 16#4) -> {more, 16#b8, 16#18}; +dec_huffman_lookup(16#68, 16#5) -> {more, 16#b8, 16#1f}; +dec_huffman_lookup(16#68, 16#6) -> {more, 16#b8, 16#29}; +dec_huffman_lookup(16#68, 16#7) -> {ok, 16#b8, 16#38}; +dec_huffman_lookup(16#68, 16#8) -> {more, 16#c2, 16#03}; +dec_huffman_lookup(16#68, 16#9) -> {more, 16#c2, 16#06}; +dec_huffman_lookup(16#68, 16#a) -> {more, 16#c2, 16#0a}; +dec_huffman_lookup(16#68, 16#b) -> {more, 16#c2, 16#0f}; +dec_huffman_lookup(16#68, 16#c) -> {more, 16#c2, 16#18}; +dec_huffman_lookup(16#68, 16#d) -> {more, 16#c2, 16#1f}; +dec_huffman_lookup(16#68, 16#e) -> {more, 16#c2, 16#29}; +dec_huffman_lookup(16#68, 16#f) -> {ok, 16#c2, 16#38}; +dec_huffman_lookup(16#69, 16#0) -> {more, 16#e0, 16#02}; +dec_huffman_lookup(16#69, 16#1) -> {more, 16#e0, 16#09}; +dec_huffman_lookup(16#69, 16#2) -> {more, 16#e0, 16#17}; +dec_huffman_lookup(16#69, 16#3) -> {ok, 16#e0, 16#28}; +dec_huffman_lookup(16#69, 16#4) -> {more, 16#e2, 16#02}; +dec_huffman_lookup(16#69, 16#5) -> {more, 16#e2, 16#09}; +dec_huffman_lookup(16#69, 16#6) -> {more, 16#e2, 16#17}; +dec_huffman_lookup(16#69, 16#7) -> {ok, 16#e2, 16#28}; +dec_huffman_lookup(16#69, 16#8) -> {more, 16#99, 16#01}; +dec_huffman_lookup(16#69, 16#9) -> {ok, 16#99, 16#16}; +dec_huffman_lookup(16#69, 16#a) -> {more, 16#a1, 16#01}; +dec_huffman_lookup(16#69, 16#b) -> {ok, 16#a1, 16#16}; +dec_huffman_lookup(16#69, 16#c) -> {more, 16#a7, 16#01}; +dec_huffman_lookup(16#69, 16#d) -> {ok, 16#a7, 16#16}; +dec_huffman_lookup(16#69, 16#e) -> {more, 16#ac, 16#01}; +dec_huffman_lookup(16#69, 16#f) -> {ok, 16#ac, 16#16}; +dec_huffman_lookup(16#6a, 16#0) -> {more, 16#e0, 16#03}; +dec_huffman_lookup(16#6a, 16#1) -> {more, 16#e0, 16#06}; +dec_huffman_lookup(16#6a, 16#2) -> {more, 16#e0, 16#0a}; +dec_huffman_lookup(16#6a, 16#3) -> {more, 16#e0, 16#0f}; +dec_huffman_lookup(16#6a, 16#4) -> {more, 16#e0, 16#18}; +dec_huffman_lookup(16#6a, 16#5) -> {more, 16#e0, 16#1f}; +dec_huffman_lookup(16#6a, 16#6) -> {more, 16#e0, 16#29}; +dec_huffman_lookup(16#6a, 16#7) -> {ok, 16#e0, 16#38}; +dec_huffman_lookup(16#6a, 16#8) -> {more, 16#e2, 16#03}; +dec_huffman_lookup(16#6a, 16#9) -> {more, 16#e2, 16#06}; +dec_huffman_lookup(16#6a, 16#a) -> {more, 16#e2, 16#0a}; +dec_huffman_lookup(16#6a, 16#b) -> {more, 16#e2, 16#0f}; +dec_huffman_lookup(16#6a, 16#c) -> {more, 16#e2, 16#18}; +dec_huffman_lookup(16#6a, 16#d) -> {more, 16#e2, 16#1f}; +dec_huffman_lookup(16#6a, 16#e) -> {more, 16#e2, 16#29}; +dec_huffman_lookup(16#6a, 16#f) -> {ok, 16#e2, 16#38}; +dec_huffman_lookup(16#6b, 16#0) -> {more, 16#99, 16#02}; +dec_huffman_lookup(16#6b, 16#1) -> {more, 16#99, 16#09}; +dec_huffman_lookup(16#6b, 16#2) -> {more, 16#99, 16#17}; +dec_huffman_lookup(16#6b, 16#3) -> {ok, 16#99, 16#28}; +dec_huffman_lookup(16#6b, 16#4) -> {more, 16#a1, 16#02}; +dec_huffman_lookup(16#6b, 16#5) -> {more, 16#a1, 16#09}; +dec_huffman_lookup(16#6b, 16#6) -> {more, 16#a1, 16#17}; +dec_huffman_lookup(16#6b, 16#7) -> {ok, 16#a1, 16#28}; +dec_huffman_lookup(16#6b, 16#8) -> {more, 16#a7, 16#02}; +dec_huffman_lookup(16#6b, 16#9) -> {more, 16#a7, 16#09}; +dec_huffman_lookup(16#6b, 16#a) -> {more, 16#a7, 16#17}; +dec_huffman_lookup(16#6b, 16#b) -> {ok, 16#a7, 16#28}; +dec_huffman_lookup(16#6b, 16#c) -> {more, 16#ac, 16#02}; +dec_huffman_lookup(16#6b, 16#d) -> {more, 16#ac, 16#09}; +dec_huffman_lookup(16#6b, 16#e) -> {more, 16#ac, 16#17}; +dec_huffman_lookup(16#6b, 16#f) -> {ok, 16#ac, 16#28}; +dec_huffman_lookup(16#6c, 16#0) -> {more, 16#99, 16#03}; +dec_huffman_lookup(16#6c, 16#1) -> {more, 16#99, 16#06}; +dec_huffman_lookup(16#6c, 16#2) -> {more, 16#99, 16#0a}; +dec_huffman_lookup(16#6c, 16#3) -> {more, 16#99, 16#0f}; +dec_huffman_lookup(16#6c, 16#4) -> {more, 16#99, 16#18}; +dec_huffman_lookup(16#6c, 16#5) -> {more, 16#99, 16#1f}; +dec_huffman_lookup(16#6c, 16#6) -> {more, 16#99, 16#29}; +dec_huffman_lookup(16#6c, 16#7) -> {ok, 16#99, 16#38}; +dec_huffman_lookup(16#6c, 16#8) -> {more, 16#a1, 16#03}; +dec_huffman_lookup(16#6c, 16#9) -> {more, 16#a1, 16#06}; +dec_huffman_lookup(16#6c, 16#a) -> {more, 16#a1, 16#0a}; +dec_huffman_lookup(16#6c, 16#b) -> {more, 16#a1, 16#0f}; +dec_huffman_lookup(16#6c, 16#c) -> {more, 16#a1, 16#18}; +dec_huffman_lookup(16#6c, 16#d) -> {more, 16#a1, 16#1f}; +dec_huffman_lookup(16#6c, 16#e) -> {more, 16#a1, 16#29}; +dec_huffman_lookup(16#6c, 16#f) -> {ok, 16#a1, 16#38}; +dec_huffman_lookup(16#6d, 16#0) -> {more, 16#a7, 16#03}; +dec_huffman_lookup(16#6d, 16#1) -> {more, 16#a7, 16#06}; +dec_huffman_lookup(16#6d, 16#2) -> {more, 16#a7, 16#0a}; +dec_huffman_lookup(16#6d, 16#3) -> {more, 16#a7, 16#0f}; +dec_huffman_lookup(16#6d, 16#4) -> {more, 16#a7, 16#18}; +dec_huffman_lookup(16#6d, 16#5) -> {more, 16#a7, 16#1f}; +dec_huffman_lookup(16#6d, 16#6) -> {more, 16#a7, 16#29}; +dec_huffman_lookup(16#6d, 16#7) -> {ok, 16#a7, 16#38}; +dec_huffman_lookup(16#6d, 16#8) -> {more, 16#ac, 16#03}; +dec_huffman_lookup(16#6d, 16#9) -> {more, 16#ac, 16#06}; +dec_huffman_lookup(16#6d, 16#a) -> {more, 16#ac, 16#0a}; +dec_huffman_lookup(16#6d, 16#b) -> {more, 16#ac, 16#0f}; +dec_huffman_lookup(16#6d, 16#c) -> {more, 16#ac, 16#18}; +dec_huffman_lookup(16#6d, 16#d) -> {more, 16#ac, 16#1f}; +dec_huffman_lookup(16#6d, 16#e) -> {more, 16#ac, 16#29}; +dec_huffman_lookup(16#6d, 16#f) -> {ok, 16#ac, 16#38}; +dec_huffman_lookup(16#6e, 16#0) -> {more, undefined, 16#72}; +dec_huffman_lookup(16#6e, 16#1) -> {more, undefined, 16#73}; +dec_huffman_lookup(16#6e, 16#2) -> {more, undefined, 16#75}; +dec_huffman_lookup(16#6e, 16#3) -> {more, undefined, 16#76}; +dec_huffman_lookup(16#6e, 16#4) -> {more, undefined, 16#79}; +dec_huffman_lookup(16#6e, 16#5) -> {more, undefined, 16#7b}; +dec_huffman_lookup(16#6e, 16#6) -> {more, undefined, 16#7f}; +dec_huffman_lookup(16#6e, 16#7) -> {more, undefined, 16#82}; +dec_huffman_lookup(16#6e, 16#8) -> {more, undefined, 16#88}; +dec_huffman_lookup(16#6e, 16#9) -> {more, undefined, 16#8b}; +dec_huffman_lookup(16#6e, 16#a) -> {more, undefined, 16#8f}; +dec_huffman_lookup(16#6e, 16#b) -> {more, undefined, 16#92}; +dec_huffman_lookup(16#6e, 16#c) -> {more, undefined, 16#9b}; +dec_huffman_lookup(16#6e, 16#d) -> {more, undefined, 16#a2}; +dec_huffman_lookup(16#6e, 16#e) -> {more, undefined, 16#aa}; +dec_huffman_lookup(16#6e, 16#f) -> {ok, undefined, 16#b4}; +dec_huffman_lookup(16#6f, 16#0) -> {ok, 16#b0, 16#00}; +dec_huffman_lookup(16#6f, 16#1) -> {ok, 16#b1, 16#00}; +dec_huffman_lookup(16#6f, 16#2) -> {ok, 16#b3, 16#00}; +dec_huffman_lookup(16#6f, 16#3) -> {ok, 16#d1, 16#00}; +dec_huffman_lookup(16#6f, 16#4) -> {ok, 16#d8, 16#00}; +dec_huffman_lookup(16#6f, 16#5) -> {ok, 16#d9, 16#00}; +dec_huffman_lookup(16#6f, 16#6) -> {ok, 16#e3, 16#00}; +dec_huffman_lookup(16#6f, 16#7) -> {ok, 16#e5, 16#00}; +dec_huffman_lookup(16#6f, 16#8) -> {ok, 16#e6, 16#00}; +dec_huffman_lookup(16#6f, 16#9) -> {more, undefined, 16#7a}; +dec_huffman_lookup(16#6f, 16#a) -> {more, undefined, 16#7c}; +dec_huffman_lookup(16#6f, 16#b) -> {more, undefined, 16#7d}; +dec_huffman_lookup(16#6f, 16#c) -> {more, undefined, 16#80}; +dec_huffman_lookup(16#6f, 16#d) -> {more, undefined, 16#81}; +dec_huffman_lookup(16#6f, 16#e) -> {more, undefined, 16#83}; +dec_huffman_lookup(16#6f, 16#f) -> {more, undefined, 16#84}; +dec_huffman_lookup(16#70, 16#0) -> {more, 16#b0, 16#01}; +dec_huffman_lookup(16#70, 16#1) -> {ok, 16#b0, 16#16}; +dec_huffman_lookup(16#70, 16#2) -> {more, 16#b1, 16#01}; +dec_huffman_lookup(16#70, 16#3) -> {ok, 16#b1, 16#16}; +dec_huffman_lookup(16#70, 16#4) -> {more, 16#b3, 16#01}; +dec_huffman_lookup(16#70, 16#5) -> {ok, 16#b3, 16#16}; +dec_huffman_lookup(16#70, 16#6) -> {more, 16#d1, 16#01}; +dec_huffman_lookup(16#70, 16#7) -> {ok, 16#d1, 16#16}; +dec_huffman_lookup(16#70, 16#8) -> {more, 16#d8, 16#01}; +dec_huffman_lookup(16#70, 16#9) -> {ok, 16#d8, 16#16}; +dec_huffman_lookup(16#70, 16#a) -> {more, 16#d9, 16#01}; +dec_huffman_lookup(16#70, 16#b) -> {ok, 16#d9, 16#16}; +dec_huffman_lookup(16#70, 16#c) -> {more, 16#e3, 16#01}; +dec_huffman_lookup(16#70, 16#d) -> {ok, 16#e3, 16#16}; +dec_huffman_lookup(16#70, 16#e) -> {more, 16#e5, 16#01}; +dec_huffman_lookup(16#70, 16#f) -> {ok, 16#e5, 16#16}; +dec_huffman_lookup(16#71, 16#0) -> {more, 16#b0, 16#02}; +dec_huffman_lookup(16#71, 16#1) -> {more, 16#b0, 16#09}; +dec_huffman_lookup(16#71, 16#2) -> {more, 16#b0, 16#17}; +dec_huffman_lookup(16#71, 16#3) -> {ok, 16#b0, 16#28}; +dec_huffman_lookup(16#71, 16#4) -> {more, 16#b1, 16#02}; +dec_huffman_lookup(16#71, 16#5) -> {more, 16#b1, 16#09}; +dec_huffman_lookup(16#71, 16#6) -> {more, 16#b1, 16#17}; +dec_huffman_lookup(16#71, 16#7) -> {ok, 16#b1, 16#28}; +dec_huffman_lookup(16#71, 16#8) -> {more, 16#b3, 16#02}; +dec_huffman_lookup(16#71, 16#9) -> {more, 16#b3, 16#09}; +dec_huffman_lookup(16#71, 16#a) -> {more, 16#b3, 16#17}; +dec_huffman_lookup(16#71, 16#b) -> {ok, 16#b3, 16#28}; +dec_huffman_lookup(16#71, 16#c) -> {more, 16#d1, 16#02}; +dec_huffman_lookup(16#71, 16#d) -> {more, 16#d1, 16#09}; +dec_huffman_lookup(16#71, 16#e) -> {more, 16#d1, 16#17}; +dec_huffman_lookup(16#71, 16#f) -> {ok, 16#d1, 16#28}; +dec_huffman_lookup(16#72, 16#0) -> {more, 16#b0, 16#03}; +dec_huffman_lookup(16#72, 16#1) -> {more, 16#b0, 16#06}; +dec_huffman_lookup(16#72, 16#2) -> {more, 16#b0, 16#0a}; +dec_huffman_lookup(16#72, 16#3) -> {more, 16#b0, 16#0f}; +dec_huffman_lookup(16#72, 16#4) -> {more, 16#b0, 16#18}; +dec_huffman_lookup(16#72, 16#5) -> {more, 16#b0, 16#1f}; +dec_huffman_lookup(16#72, 16#6) -> {more, 16#b0, 16#29}; +dec_huffman_lookup(16#72, 16#7) -> {ok, 16#b0, 16#38}; +dec_huffman_lookup(16#72, 16#8) -> {more, 16#b1, 16#03}; +dec_huffman_lookup(16#72, 16#9) -> {more, 16#b1, 16#06}; +dec_huffman_lookup(16#72, 16#a) -> {more, 16#b1, 16#0a}; +dec_huffman_lookup(16#72, 16#b) -> {more, 16#b1, 16#0f}; +dec_huffman_lookup(16#72, 16#c) -> {more, 16#b1, 16#18}; +dec_huffman_lookup(16#72, 16#d) -> {more, 16#b1, 16#1f}; +dec_huffman_lookup(16#72, 16#e) -> {more, 16#b1, 16#29}; +dec_huffman_lookup(16#72, 16#f) -> {ok, 16#b1, 16#38}; +dec_huffman_lookup(16#73, 16#0) -> {more, 16#b3, 16#03}; +dec_huffman_lookup(16#73, 16#1) -> {more, 16#b3, 16#06}; +dec_huffman_lookup(16#73, 16#2) -> {more, 16#b3, 16#0a}; +dec_huffman_lookup(16#73, 16#3) -> {more, 16#b3, 16#0f}; +dec_huffman_lookup(16#73, 16#4) -> {more, 16#b3, 16#18}; +dec_huffman_lookup(16#73, 16#5) -> {more, 16#b3, 16#1f}; +dec_huffman_lookup(16#73, 16#6) -> {more, 16#b3, 16#29}; +dec_huffman_lookup(16#73, 16#7) -> {ok, 16#b3, 16#38}; +dec_huffman_lookup(16#73, 16#8) -> {more, 16#d1, 16#03}; +dec_huffman_lookup(16#73, 16#9) -> {more, 16#d1, 16#06}; +dec_huffman_lookup(16#73, 16#a) -> {more, 16#d1, 16#0a}; +dec_huffman_lookup(16#73, 16#b) -> {more, 16#d1, 16#0f}; +dec_huffman_lookup(16#73, 16#c) -> {more, 16#d1, 16#18}; +dec_huffman_lookup(16#73, 16#d) -> {more, 16#d1, 16#1f}; +dec_huffman_lookup(16#73, 16#e) -> {more, 16#d1, 16#29}; +dec_huffman_lookup(16#73, 16#f) -> {ok, 16#d1, 16#38}; +dec_huffman_lookup(16#74, 16#0) -> {more, 16#d8, 16#02}; +dec_huffman_lookup(16#74, 16#1) -> {more, 16#d8, 16#09}; +dec_huffman_lookup(16#74, 16#2) -> {more, 16#d8, 16#17}; +dec_huffman_lookup(16#74, 16#3) -> {ok, 16#d8, 16#28}; +dec_huffman_lookup(16#74, 16#4) -> {more, 16#d9, 16#02}; +dec_huffman_lookup(16#74, 16#5) -> {more, 16#d9, 16#09}; +dec_huffman_lookup(16#74, 16#6) -> {more, 16#d9, 16#17}; +dec_huffman_lookup(16#74, 16#7) -> {ok, 16#d9, 16#28}; +dec_huffman_lookup(16#74, 16#8) -> {more, 16#e3, 16#02}; +dec_huffman_lookup(16#74, 16#9) -> {more, 16#e3, 16#09}; +dec_huffman_lookup(16#74, 16#a) -> {more, 16#e3, 16#17}; +dec_huffman_lookup(16#74, 16#b) -> {ok, 16#e3, 16#28}; +dec_huffman_lookup(16#74, 16#c) -> {more, 16#e5, 16#02}; +dec_huffman_lookup(16#74, 16#d) -> {more, 16#e5, 16#09}; +dec_huffman_lookup(16#74, 16#e) -> {more, 16#e5, 16#17}; +dec_huffman_lookup(16#74, 16#f) -> {ok, 16#e5, 16#28}; +dec_huffman_lookup(16#75, 16#0) -> {more, 16#d8, 16#03}; +dec_huffman_lookup(16#75, 16#1) -> {more, 16#d8, 16#06}; +dec_huffman_lookup(16#75, 16#2) -> {more, 16#d8, 16#0a}; +dec_huffman_lookup(16#75, 16#3) -> {more, 16#d8, 16#0f}; +dec_huffman_lookup(16#75, 16#4) -> {more, 16#d8, 16#18}; +dec_huffman_lookup(16#75, 16#5) -> {more, 16#d8, 16#1f}; +dec_huffman_lookup(16#75, 16#6) -> {more, 16#d8, 16#29}; +dec_huffman_lookup(16#75, 16#7) -> {ok, 16#d8, 16#38}; +dec_huffman_lookup(16#75, 16#8) -> {more, 16#d9, 16#03}; +dec_huffman_lookup(16#75, 16#9) -> {more, 16#d9, 16#06}; +dec_huffman_lookup(16#75, 16#a) -> {more, 16#d9, 16#0a}; +dec_huffman_lookup(16#75, 16#b) -> {more, 16#d9, 16#0f}; +dec_huffman_lookup(16#75, 16#c) -> {more, 16#d9, 16#18}; +dec_huffman_lookup(16#75, 16#d) -> {more, 16#d9, 16#1f}; +dec_huffman_lookup(16#75, 16#e) -> {more, 16#d9, 16#29}; +dec_huffman_lookup(16#75, 16#f) -> {ok, 16#d9, 16#38}; +dec_huffman_lookup(16#76, 16#0) -> {more, 16#e3, 16#03}; +dec_huffman_lookup(16#76, 16#1) -> {more, 16#e3, 16#06}; +dec_huffman_lookup(16#76, 16#2) -> {more, 16#e3, 16#0a}; +dec_huffman_lookup(16#76, 16#3) -> {more, 16#e3, 16#0f}; +dec_huffman_lookup(16#76, 16#4) -> {more, 16#e3, 16#18}; +dec_huffman_lookup(16#76, 16#5) -> {more, 16#e3, 16#1f}; +dec_huffman_lookup(16#76, 16#6) -> {more, 16#e3, 16#29}; +dec_huffman_lookup(16#76, 16#7) -> {ok, 16#e3, 16#38}; +dec_huffman_lookup(16#76, 16#8) -> {more, 16#e5, 16#03}; +dec_huffman_lookup(16#76, 16#9) -> {more, 16#e5, 16#06}; +dec_huffman_lookup(16#76, 16#a) -> {more, 16#e5, 16#0a}; +dec_huffman_lookup(16#76, 16#b) -> {more, 16#e5, 16#0f}; +dec_huffman_lookup(16#76, 16#c) -> {more, 16#e5, 16#18}; +dec_huffman_lookup(16#76, 16#d) -> {more, 16#e5, 16#1f}; +dec_huffman_lookup(16#76, 16#e) -> {more, 16#e5, 16#29}; +dec_huffman_lookup(16#76, 16#f) -> {ok, 16#e5, 16#38}; +dec_huffman_lookup(16#77, 16#0) -> {more, 16#e6, 16#01}; +dec_huffman_lookup(16#77, 16#1) -> {ok, 16#e6, 16#16}; +dec_huffman_lookup(16#77, 16#2) -> {ok, 16#81, 16#00}; +dec_huffman_lookup(16#77, 16#3) -> {ok, 16#84, 16#00}; +dec_huffman_lookup(16#77, 16#4) -> {ok, 16#85, 16#00}; +dec_huffman_lookup(16#77, 16#5) -> {ok, 16#86, 16#00}; +dec_huffman_lookup(16#77, 16#6) -> {ok, 16#88, 16#00}; +dec_huffman_lookup(16#77, 16#7) -> {ok, 16#92, 16#00}; +dec_huffman_lookup(16#77, 16#8) -> {ok, 16#9a, 16#00}; +dec_huffman_lookup(16#77, 16#9) -> {ok, 16#9c, 16#00}; +dec_huffman_lookup(16#77, 16#a) -> {ok, 16#a0, 16#00}; +dec_huffman_lookup(16#77, 16#b) -> {ok, 16#a3, 16#00}; +dec_huffman_lookup(16#77, 16#c) -> {ok, 16#a4, 16#00}; +dec_huffman_lookup(16#77, 16#d) -> {ok, 16#a9, 16#00}; +dec_huffman_lookup(16#77, 16#e) -> {ok, 16#aa, 16#00}; +dec_huffman_lookup(16#77, 16#f) -> {ok, 16#ad, 16#00}; +dec_huffman_lookup(16#78, 16#0) -> {more, 16#e6, 16#02}; +dec_huffman_lookup(16#78, 16#1) -> {more, 16#e6, 16#09}; +dec_huffman_lookup(16#78, 16#2) -> {more, 16#e6, 16#17}; +dec_huffman_lookup(16#78, 16#3) -> {ok, 16#e6, 16#28}; +dec_huffman_lookup(16#78, 16#4) -> {more, 16#81, 16#01}; +dec_huffman_lookup(16#78, 16#5) -> {ok, 16#81, 16#16}; +dec_huffman_lookup(16#78, 16#6) -> {more, 16#84, 16#01}; +dec_huffman_lookup(16#78, 16#7) -> {ok, 16#84, 16#16}; +dec_huffman_lookup(16#78, 16#8) -> {more, 16#85, 16#01}; +dec_huffman_lookup(16#78, 16#9) -> {ok, 16#85, 16#16}; +dec_huffman_lookup(16#78, 16#a) -> {more, 16#86, 16#01}; +dec_huffman_lookup(16#78, 16#b) -> {ok, 16#86, 16#16}; +dec_huffman_lookup(16#78, 16#c) -> {more, 16#88, 16#01}; +dec_huffman_lookup(16#78, 16#d) -> {ok, 16#88, 16#16}; +dec_huffman_lookup(16#78, 16#e) -> {more, 16#92, 16#01}; +dec_huffman_lookup(16#78, 16#f) -> {ok, 16#92, 16#16}; +dec_huffman_lookup(16#79, 16#0) -> {more, 16#e6, 16#03}; +dec_huffman_lookup(16#79, 16#1) -> {more, 16#e6, 16#06}; +dec_huffman_lookup(16#79, 16#2) -> {more, 16#e6, 16#0a}; +dec_huffman_lookup(16#79, 16#3) -> {more, 16#e6, 16#0f}; +dec_huffman_lookup(16#79, 16#4) -> {more, 16#e6, 16#18}; +dec_huffman_lookup(16#79, 16#5) -> {more, 16#e6, 16#1f}; +dec_huffman_lookup(16#79, 16#6) -> {more, 16#e6, 16#29}; +dec_huffman_lookup(16#79, 16#7) -> {ok, 16#e6, 16#38}; +dec_huffman_lookup(16#79, 16#8) -> {more, 16#81, 16#02}; +dec_huffman_lookup(16#79, 16#9) -> {more, 16#81, 16#09}; +dec_huffman_lookup(16#79, 16#a) -> {more, 16#81, 16#17}; +dec_huffman_lookup(16#79, 16#b) -> {ok, 16#81, 16#28}; +dec_huffman_lookup(16#79, 16#c) -> {more, 16#84, 16#02}; +dec_huffman_lookup(16#79, 16#d) -> {more, 16#84, 16#09}; +dec_huffman_lookup(16#79, 16#e) -> {more, 16#84, 16#17}; +dec_huffman_lookup(16#79, 16#f) -> {ok, 16#84, 16#28}; +dec_huffman_lookup(16#7a, 16#0) -> {more, 16#81, 16#03}; +dec_huffman_lookup(16#7a, 16#1) -> {more, 16#81, 16#06}; +dec_huffman_lookup(16#7a, 16#2) -> {more, 16#81, 16#0a}; +dec_huffman_lookup(16#7a, 16#3) -> {more, 16#81, 16#0f}; +dec_huffman_lookup(16#7a, 16#4) -> {more, 16#81, 16#18}; +dec_huffman_lookup(16#7a, 16#5) -> {more, 16#81, 16#1f}; +dec_huffman_lookup(16#7a, 16#6) -> {more, 16#81, 16#29}; +dec_huffman_lookup(16#7a, 16#7) -> {ok, 16#81, 16#38}; +dec_huffman_lookup(16#7a, 16#8) -> {more, 16#84, 16#03}; +dec_huffman_lookup(16#7a, 16#9) -> {more, 16#84, 16#06}; +dec_huffman_lookup(16#7a, 16#a) -> {more, 16#84, 16#0a}; +dec_huffman_lookup(16#7a, 16#b) -> {more, 16#84, 16#0f}; +dec_huffman_lookup(16#7a, 16#c) -> {more, 16#84, 16#18}; +dec_huffman_lookup(16#7a, 16#d) -> {more, 16#84, 16#1f}; +dec_huffman_lookup(16#7a, 16#e) -> {more, 16#84, 16#29}; +dec_huffman_lookup(16#7a, 16#f) -> {ok, 16#84, 16#38}; +dec_huffman_lookup(16#7b, 16#0) -> {more, 16#85, 16#02}; +dec_huffman_lookup(16#7b, 16#1) -> {more, 16#85, 16#09}; +dec_huffman_lookup(16#7b, 16#2) -> {more, 16#85, 16#17}; +dec_huffman_lookup(16#7b, 16#3) -> {ok, 16#85, 16#28}; +dec_huffman_lookup(16#7b, 16#4) -> {more, 16#86, 16#02}; +dec_huffman_lookup(16#7b, 16#5) -> {more, 16#86, 16#09}; +dec_huffman_lookup(16#7b, 16#6) -> {more, 16#86, 16#17}; +dec_huffman_lookup(16#7b, 16#7) -> {ok, 16#86, 16#28}; +dec_huffman_lookup(16#7b, 16#8) -> {more, 16#88, 16#02}; +dec_huffman_lookup(16#7b, 16#9) -> {more, 16#88, 16#09}; +dec_huffman_lookup(16#7b, 16#a) -> {more, 16#88, 16#17}; +dec_huffman_lookup(16#7b, 16#b) -> {ok, 16#88, 16#28}; +dec_huffman_lookup(16#7b, 16#c) -> {more, 16#92, 16#02}; +dec_huffman_lookup(16#7b, 16#d) -> {more, 16#92, 16#09}; +dec_huffman_lookup(16#7b, 16#e) -> {more, 16#92, 16#17}; +dec_huffman_lookup(16#7b, 16#f) -> {ok, 16#92, 16#28}; +dec_huffman_lookup(16#7c, 16#0) -> {more, 16#85, 16#03}; +dec_huffman_lookup(16#7c, 16#1) -> {more, 16#85, 16#06}; +dec_huffman_lookup(16#7c, 16#2) -> {more, 16#85, 16#0a}; +dec_huffman_lookup(16#7c, 16#3) -> {more, 16#85, 16#0f}; +dec_huffman_lookup(16#7c, 16#4) -> {more, 16#85, 16#18}; +dec_huffman_lookup(16#7c, 16#5) -> {more, 16#85, 16#1f}; +dec_huffman_lookup(16#7c, 16#6) -> {more, 16#85, 16#29}; +dec_huffman_lookup(16#7c, 16#7) -> {ok, 16#85, 16#38}; +dec_huffman_lookup(16#7c, 16#8) -> {more, 16#86, 16#03}; +dec_huffman_lookup(16#7c, 16#9) -> {more, 16#86, 16#06}; +dec_huffman_lookup(16#7c, 16#a) -> {more, 16#86, 16#0a}; +dec_huffman_lookup(16#7c, 16#b) -> {more, 16#86, 16#0f}; +dec_huffman_lookup(16#7c, 16#c) -> {more, 16#86, 16#18}; +dec_huffman_lookup(16#7c, 16#d) -> {more, 16#86, 16#1f}; +dec_huffman_lookup(16#7c, 16#e) -> {more, 16#86, 16#29}; +dec_huffman_lookup(16#7c, 16#f) -> {ok, 16#86, 16#38}; +dec_huffman_lookup(16#7d, 16#0) -> {more, 16#88, 16#03}; +dec_huffman_lookup(16#7d, 16#1) -> {more, 16#88, 16#06}; +dec_huffman_lookup(16#7d, 16#2) -> {more, 16#88, 16#0a}; +dec_huffman_lookup(16#7d, 16#3) -> {more, 16#88, 16#0f}; +dec_huffman_lookup(16#7d, 16#4) -> {more, 16#88, 16#18}; +dec_huffman_lookup(16#7d, 16#5) -> {more, 16#88, 16#1f}; +dec_huffman_lookup(16#7d, 16#6) -> {more, 16#88, 16#29}; +dec_huffman_lookup(16#7d, 16#7) -> {ok, 16#88, 16#38}; +dec_huffman_lookup(16#7d, 16#8) -> {more, 16#92, 16#03}; +dec_huffman_lookup(16#7d, 16#9) -> {more, 16#92, 16#06}; +dec_huffman_lookup(16#7d, 16#a) -> {more, 16#92, 16#0a}; +dec_huffman_lookup(16#7d, 16#b) -> {more, 16#92, 16#0f}; +dec_huffman_lookup(16#7d, 16#c) -> {more, 16#92, 16#18}; +dec_huffman_lookup(16#7d, 16#d) -> {more, 16#92, 16#1f}; +dec_huffman_lookup(16#7d, 16#e) -> {more, 16#92, 16#29}; +dec_huffman_lookup(16#7d, 16#f) -> {ok, 16#92, 16#38}; +dec_huffman_lookup(16#7e, 16#0) -> {more, 16#9a, 16#01}; +dec_huffman_lookup(16#7e, 16#1) -> {ok, 16#9a, 16#16}; +dec_huffman_lookup(16#7e, 16#2) -> {more, 16#9c, 16#01}; +dec_huffman_lookup(16#7e, 16#3) -> {ok, 16#9c, 16#16}; +dec_huffman_lookup(16#7e, 16#4) -> {more, 16#a0, 16#01}; +dec_huffman_lookup(16#7e, 16#5) -> {ok, 16#a0, 16#16}; +dec_huffman_lookup(16#7e, 16#6) -> {more, 16#a3, 16#01}; +dec_huffman_lookup(16#7e, 16#7) -> {ok, 16#a3, 16#16}; +dec_huffman_lookup(16#7e, 16#8) -> {more, 16#a4, 16#01}; +dec_huffman_lookup(16#7e, 16#9) -> {ok, 16#a4, 16#16}; +dec_huffman_lookup(16#7e, 16#a) -> {more, 16#a9, 16#01}; +dec_huffman_lookup(16#7e, 16#b) -> {ok, 16#a9, 16#16}; +dec_huffman_lookup(16#7e, 16#c) -> {more, 16#aa, 16#01}; +dec_huffman_lookup(16#7e, 16#d) -> {ok, 16#aa, 16#16}; +dec_huffman_lookup(16#7e, 16#e) -> {more, 16#ad, 16#01}; +dec_huffman_lookup(16#7e, 16#f) -> {ok, 16#ad, 16#16}; +dec_huffman_lookup(16#7f, 16#0) -> {more, 16#9a, 16#02}; +dec_huffman_lookup(16#7f, 16#1) -> {more, 16#9a, 16#09}; +dec_huffman_lookup(16#7f, 16#2) -> {more, 16#9a, 16#17}; +dec_huffman_lookup(16#7f, 16#3) -> {ok, 16#9a, 16#28}; +dec_huffman_lookup(16#7f, 16#4) -> {more, 16#9c, 16#02}; +dec_huffman_lookup(16#7f, 16#5) -> {more, 16#9c, 16#09}; +dec_huffman_lookup(16#7f, 16#6) -> {more, 16#9c, 16#17}; +dec_huffman_lookup(16#7f, 16#7) -> {ok, 16#9c, 16#28}; +dec_huffman_lookup(16#7f, 16#8) -> {more, 16#a0, 16#02}; +dec_huffman_lookup(16#7f, 16#9) -> {more, 16#a0, 16#09}; +dec_huffman_lookup(16#7f, 16#a) -> {more, 16#a0, 16#17}; +dec_huffman_lookup(16#7f, 16#b) -> {ok, 16#a0, 16#28}; +dec_huffman_lookup(16#7f, 16#c) -> {more, 16#a3, 16#02}; +dec_huffman_lookup(16#7f, 16#d) -> {more, 16#a3, 16#09}; +dec_huffman_lookup(16#7f, 16#e) -> {more, 16#a3, 16#17}; +dec_huffman_lookup(16#7f, 16#f) -> {ok, 16#a3, 16#28}; +dec_huffman_lookup(16#80, 16#0) -> {more, 16#9a, 16#03}; +dec_huffman_lookup(16#80, 16#1) -> {more, 16#9a, 16#06}; +dec_huffman_lookup(16#80, 16#2) -> {more, 16#9a, 16#0a}; +dec_huffman_lookup(16#80, 16#3) -> {more, 16#9a, 16#0f}; +dec_huffman_lookup(16#80, 16#4) -> {more, 16#9a, 16#18}; +dec_huffman_lookup(16#80, 16#5) -> {more, 16#9a, 16#1f}; +dec_huffman_lookup(16#80, 16#6) -> {more, 16#9a, 16#29}; +dec_huffman_lookup(16#80, 16#7) -> {ok, 16#9a, 16#38}; +dec_huffman_lookup(16#80, 16#8) -> {more, 16#9c, 16#03}; +dec_huffman_lookup(16#80, 16#9) -> {more, 16#9c, 16#06}; +dec_huffman_lookup(16#80, 16#a) -> {more, 16#9c, 16#0a}; +dec_huffman_lookup(16#80, 16#b) -> {more, 16#9c, 16#0f}; +dec_huffman_lookup(16#80, 16#c) -> {more, 16#9c, 16#18}; +dec_huffman_lookup(16#80, 16#d) -> {more, 16#9c, 16#1f}; +dec_huffman_lookup(16#80, 16#e) -> {more, 16#9c, 16#29}; +dec_huffman_lookup(16#80, 16#f) -> {ok, 16#9c, 16#38}; +dec_huffman_lookup(16#81, 16#0) -> {more, 16#a0, 16#03}; +dec_huffman_lookup(16#81, 16#1) -> {more, 16#a0, 16#06}; +dec_huffman_lookup(16#81, 16#2) -> {more, 16#a0, 16#0a}; +dec_huffman_lookup(16#81, 16#3) -> {more, 16#a0, 16#0f}; +dec_huffman_lookup(16#81, 16#4) -> {more, 16#a0, 16#18}; +dec_huffman_lookup(16#81, 16#5) -> {more, 16#a0, 16#1f}; +dec_huffman_lookup(16#81, 16#6) -> {more, 16#a0, 16#29}; +dec_huffman_lookup(16#81, 16#7) -> {ok, 16#a0, 16#38}; +dec_huffman_lookup(16#81, 16#8) -> {more, 16#a3, 16#03}; +dec_huffman_lookup(16#81, 16#9) -> {more, 16#a3, 16#06}; +dec_huffman_lookup(16#81, 16#a) -> {more, 16#a3, 16#0a}; +dec_huffman_lookup(16#81, 16#b) -> {more, 16#a3, 16#0f}; +dec_huffman_lookup(16#81, 16#c) -> {more, 16#a3, 16#18}; +dec_huffman_lookup(16#81, 16#d) -> {more, 16#a3, 16#1f}; +dec_huffman_lookup(16#81, 16#e) -> {more, 16#a3, 16#29}; +dec_huffman_lookup(16#81, 16#f) -> {ok, 16#a3, 16#38}; +dec_huffman_lookup(16#82, 16#0) -> {more, 16#a4, 16#02}; +dec_huffman_lookup(16#82, 16#1) -> {more, 16#a4, 16#09}; +dec_huffman_lookup(16#82, 16#2) -> {more, 16#a4, 16#17}; +dec_huffman_lookup(16#82, 16#3) -> {ok, 16#a4, 16#28}; +dec_huffman_lookup(16#82, 16#4) -> {more, 16#a9, 16#02}; +dec_huffman_lookup(16#82, 16#5) -> {more, 16#a9, 16#09}; +dec_huffman_lookup(16#82, 16#6) -> {more, 16#a9, 16#17}; +dec_huffman_lookup(16#82, 16#7) -> {ok, 16#a9, 16#28}; +dec_huffman_lookup(16#82, 16#8) -> {more, 16#aa, 16#02}; +dec_huffman_lookup(16#82, 16#9) -> {more, 16#aa, 16#09}; +dec_huffman_lookup(16#82, 16#a) -> {more, 16#aa, 16#17}; +dec_huffman_lookup(16#82, 16#b) -> {ok, 16#aa, 16#28}; +dec_huffman_lookup(16#82, 16#c) -> {more, 16#ad, 16#02}; +dec_huffman_lookup(16#82, 16#d) -> {more, 16#ad, 16#09}; +dec_huffman_lookup(16#82, 16#e) -> {more, 16#ad, 16#17}; +dec_huffman_lookup(16#82, 16#f) -> {ok, 16#ad, 16#28}; +dec_huffman_lookup(16#83, 16#0) -> {more, 16#a4, 16#03}; +dec_huffman_lookup(16#83, 16#1) -> {more, 16#a4, 16#06}; +dec_huffman_lookup(16#83, 16#2) -> {more, 16#a4, 16#0a}; +dec_huffman_lookup(16#83, 16#3) -> {more, 16#a4, 16#0f}; +dec_huffman_lookup(16#83, 16#4) -> {more, 16#a4, 16#18}; +dec_huffman_lookup(16#83, 16#5) -> {more, 16#a4, 16#1f}; +dec_huffman_lookup(16#83, 16#6) -> {more, 16#a4, 16#29}; +dec_huffman_lookup(16#83, 16#7) -> {ok, 16#a4, 16#38}; +dec_huffman_lookup(16#83, 16#8) -> {more, 16#a9, 16#03}; +dec_huffman_lookup(16#83, 16#9) -> {more, 16#a9, 16#06}; +dec_huffman_lookup(16#83, 16#a) -> {more, 16#a9, 16#0a}; +dec_huffman_lookup(16#83, 16#b) -> {more, 16#a9, 16#0f}; +dec_huffman_lookup(16#83, 16#c) -> {more, 16#a9, 16#18}; +dec_huffman_lookup(16#83, 16#d) -> {more, 16#a9, 16#1f}; +dec_huffman_lookup(16#83, 16#e) -> {more, 16#a9, 16#29}; +dec_huffman_lookup(16#83, 16#f) -> {ok, 16#a9, 16#38}; +dec_huffman_lookup(16#84, 16#0) -> {more, 16#aa, 16#03}; +dec_huffman_lookup(16#84, 16#1) -> {more, 16#aa, 16#06}; +dec_huffman_lookup(16#84, 16#2) -> {more, 16#aa, 16#0a}; +dec_huffman_lookup(16#84, 16#3) -> {more, 16#aa, 16#0f}; +dec_huffman_lookup(16#84, 16#4) -> {more, 16#aa, 16#18}; +dec_huffman_lookup(16#84, 16#5) -> {more, 16#aa, 16#1f}; +dec_huffman_lookup(16#84, 16#6) -> {more, 16#aa, 16#29}; +dec_huffman_lookup(16#84, 16#7) -> {ok, 16#aa, 16#38}; +dec_huffman_lookup(16#84, 16#8) -> {more, 16#ad, 16#03}; +dec_huffman_lookup(16#84, 16#9) -> {more, 16#ad, 16#06}; +dec_huffman_lookup(16#84, 16#a) -> {more, 16#ad, 16#0a}; +dec_huffman_lookup(16#84, 16#b) -> {more, 16#ad, 16#0f}; +dec_huffman_lookup(16#84, 16#c) -> {more, 16#ad, 16#18}; +dec_huffman_lookup(16#84, 16#d) -> {more, 16#ad, 16#1f}; +dec_huffman_lookup(16#84, 16#e) -> {more, 16#ad, 16#29}; +dec_huffman_lookup(16#84, 16#f) -> {ok, 16#ad, 16#38}; +dec_huffman_lookup(16#85, 16#0) -> {more, undefined, 16#89}; +dec_huffman_lookup(16#85, 16#1) -> {more, undefined, 16#8a}; +dec_huffman_lookup(16#85, 16#2) -> {more, undefined, 16#8c}; +dec_huffman_lookup(16#85, 16#3) -> {more, undefined, 16#8d}; +dec_huffman_lookup(16#85, 16#4) -> {more, undefined, 16#90}; +dec_huffman_lookup(16#85, 16#5) -> {more, undefined, 16#91}; +dec_huffman_lookup(16#85, 16#6) -> {more, undefined, 16#93}; +dec_huffman_lookup(16#85, 16#7) -> {more, undefined, 16#96}; +dec_huffman_lookup(16#85, 16#8) -> {more, undefined, 16#9c}; +dec_huffman_lookup(16#85, 16#9) -> {more, undefined, 16#9f}; +dec_huffman_lookup(16#85, 16#a) -> {more, undefined, 16#a3}; +dec_huffman_lookup(16#85, 16#b) -> {more, undefined, 16#a6}; +dec_huffman_lookup(16#85, 16#c) -> {more, undefined, 16#ab}; +dec_huffman_lookup(16#85, 16#d) -> {more, undefined, 16#ae}; +dec_huffman_lookup(16#85, 16#e) -> {more, undefined, 16#b5}; +dec_huffman_lookup(16#85, 16#f) -> {ok, undefined, 16#be}; +dec_huffman_lookup(16#86, 16#0) -> {ok, 16#b2, 16#00}; +dec_huffman_lookup(16#86, 16#1) -> {ok, 16#b5, 16#00}; +dec_huffman_lookup(16#86, 16#2) -> {ok, 16#b9, 16#00}; +dec_huffman_lookup(16#86, 16#3) -> {ok, 16#ba, 16#00}; +dec_huffman_lookup(16#86, 16#4) -> {ok, 16#bb, 16#00}; +dec_huffman_lookup(16#86, 16#5) -> {ok, 16#bd, 16#00}; +dec_huffman_lookup(16#86, 16#6) -> {ok, 16#be, 16#00}; +dec_huffman_lookup(16#86, 16#7) -> {ok, 16#c4, 16#00}; +dec_huffman_lookup(16#86, 16#8) -> {ok, 16#c6, 16#00}; +dec_huffman_lookup(16#86, 16#9) -> {ok, 16#e4, 16#00}; +dec_huffman_lookup(16#86, 16#a) -> {ok, 16#e8, 16#00}; +dec_huffman_lookup(16#86, 16#b) -> {ok, 16#e9, 16#00}; +dec_huffman_lookup(16#86, 16#c) -> {more, undefined, 16#94}; +dec_huffman_lookup(16#86, 16#d) -> {more, undefined, 16#95}; +dec_huffman_lookup(16#86, 16#e) -> {more, undefined, 16#97}; +dec_huffman_lookup(16#86, 16#f) -> {more, undefined, 16#98}; +dec_huffman_lookup(16#87, 16#0) -> {more, 16#b2, 16#01}; +dec_huffman_lookup(16#87, 16#1) -> {ok, 16#b2, 16#16}; +dec_huffman_lookup(16#87, 16#2) -> {more, 16#b5, 16#01}; +dec_huffman_lookup(16#87, 16#3) -> {ok, 16#b5, 16#16}; +dec_huffman_lookup(16#87, 16#4) -> {more, 16#b9, 16#01}; +dec_huffman_lookup(16#87, 16#5) -> {ok, 16#b9, 16#16}; +dec_huffman_lookup(16#87, 16#6) -> {more, 16#ba, 16#01}; +dec_huffman_lookup(16#87, 16#7) -> {ok, 16#ba, 16#16}; +dec_huffman_lookup(16#87, 16#8) -> {more, 16#bb, 16#01}; +dec_huffman_lookup(16#87, 16#9) -> {ok, 16#bb, 16#16}; +dec_huffman_lookup(16#87, 16#a) -> {more, 16#bd, 16#01}; +dec_huffman_lookup(16#87, 16#b) -> {ok, 16#bd, 16#16}; +dec_huffman_lookup(16#87, 16#c) -> {more, 16#be, 16#01}; +dec_huffman_lookup(16#87, 16#d) -> {ok, 16#be, 16#16}; +dec_huffman_lookup(16#87, 16#e) -> {more, 16#c4, 16#01}; +dec_huffman_lookup(16#87, 16#f) -> {ok, 16#c4, 16#16}; +dec_huffman_lookup(16#88, 16#0) -> {more, 16#b2, 16#02}; +dec_huffman_lookup(16#88, 16#1) -> {more, 16#b2, 16#09}; +dec_huffman_lookup(16#88, 16#2) -> {more, 16#b2, 16#17}; +dec_huffman_lookup(16#88, 16#3) -> {ok, 16#b2, 16#28}; +dec_huffman_lookup(16#88, 16#4) -> {more, 16#b5, 16#02}; +dec_huffman_lookup(16#88, 16#5) -> {more, 16#b5, 16#09}; +dec_huffman_lookup(16#88, 16#6) -> {more, 16#b5, 16#17}; +dec_huffman_lookup(16#88, 16#7) -> {ok, 16#b5, 16#28}; +dec_huffman_lookup(16#88, 16#8) -> {more, 16#b9, 16#02}; +dec_huffman_lookup(16#88, 16#9) -> {more, 16#b9, 16#09}; +dec_huffman_lookup(16#88, 16#a) -> {more, 16#b9, 16#17}; +dec_huffman_lookup(16#88, 16#b) -> {ok, 16#b9, 16#28}; +dec_huffman_lookup(16#88, 16#c) -> {more, 16#ba, 16#02}; +dec_huffman_lookup(16#88, 16#d) -> {more, 16#ba, 16#09}; +dec_huffman_lookup(16#88, 16#e) -> {more, 16#ba, 16#17}; +dec_huffman_lookup(16#88, 16#f) -> {ok, 16#ba, 16#28}; +dec_huffman_lookup(16#89, 16#0) -> {more, 16#b2, 16#03}; +dec_huffman_lookup(16#89, 16#1) -> {more, 16#b2, 16#06}; +dec_huffman_lookup(16#89, 16#2) -> {more, 16#b2, 16#0a}; +dec_huffman_lookup(16#89, 16#3) -> {more, 16#b2, 16#0f}; +dec_huffman_lookup(16#89, 16#4) -> {more, 16#b2, 16#18}; +dec_huffman_lookup(16#89, 16#5) -> {more, 16#b2, 16#1f}; +dec_huffman_lookup(16#89, 16#6) -> {more, 16#b2, 16#29}; +dec_huffman_lookup(16#89, 16#7) -> {ok, 16#b2, 16#38}; +dec_huffman_lookup(16#89, 16#8) -> {more, 16#b5, 16#03}; +dec_huffman_lookup(16#89, 16#9) -> {more, 16#b5, 16#06}; +dec_huffman_lookup(16#89, 16#a) -> {more, 16#b5, 16#0a}; +dec_huffman_lookup(16#89, 16#b) -> {more, 16#b5, 16#0f}; +dec_huffman_lookup(16#89, 16#c) -> {more, 16#b5, 16#18}; +dec_huffman_lookup(16#89, 16#d) -> {more, 16#b5, 16#1f}; +dec_huffman_lookup(16#89, 16#e) -> {more, 16#b5, 16#29}; +dec_huffman_lookup(16#89, 16#f) -> {ok, 16#b5, 16#38}; +dec_huffman_lookup(16#8a, 16#0) -> {more, 16#b9, 16#03}; +dec_huffman_lookup(16#8a, 16#1) -> {more, 16#b9, 16#06}; +dec_huffman_lookup(16#8a, 16#2) -> {more, 16#b9, 16#0a}; +dec_huffman_lookup(16#8a, 16#3) -> {more, 16#b9, 16#0f}; +dec_huffman_lookup(16#8a, 16#4) -> {more, 16#b9, 16#18}; +dec_huffman_lookup(16#8a, 16#5) -> {more, 16#b9, 16#1f}; +dec_huffman_lookup(16#8a, 16#6) -> {more, 16#b9, 16#29}; +dec_huffman_lookup(16#8a, 16#7) -> {ok, 16#b9, 16#38}; +dec_huffman_lookup(16#8a, 16#8) -> {more, 16#ba, 16#03}; +dec_huffman_lookup(16#8a, 16#9) -> {more, 16#ba, 16#06}; +dec_huffman_lookup(16#8a, 16#a) -> {more, 16#ba, 16#0a}; +dec_huffman_lookup(16#8a, 16#b) -> {more, 16#ba, 16#0f}; +dec_huffman_lookup(16#8a, 16#c) -> {more, 16#ba, 16#18}; +dec_huffman_lookup(16#8a, 16#d) -> {more, 16#ba, 16#1f}; +dec_huffman_lookup(16#8a, 16#e) -> {more, 16#ba, 16#29}; +dec_huffman_lookup(16#8a, 16#f) -> {ok, 16#ba, 16#38}; +dec_huffman_lookup(16#8b, 16#0) -> {more, 16#bb, 16#02}; +dec_huffman_lookup(16#8b, 16#1) -> {more, 16#bb, 16#09}; +dec_huffman_lookup(16#8b, 16#2) -> {more, 16#bb, 16#17}; +dec_huffman_lookup(16#8b, 16#3) -> {ok, 16#bb, 16#28}; +dec_huffman_lookup(16#8b, 16#4) -> {more, 16#bd, 16#02}; +dec_huffman_lookup(16#8b, 16#5) -> {more, 16#bd, 16#09}; +dec_huffman_lookup(16#8b, 16#6) -> {more, 16#bd, 16#17}; +dec_huffman_lookup(16#8b, 16#7) -> {ok, 16#bd, 16#28}; +dec_huffman_lookup(16#8b, 16#8) -> {more, 16#be, 16#02}; +dec_huffman_lookup(16#8b, 16#9) -> {more, 16#be, 16#09}; +dec_huffman_lookup(16#8b, 16#a) -> {more, 16#be, 16#17}; +dec_huffman_lookup(16#8b, 16#b) -> {ok, 16#be, 16#28}; +dec_huffman_lookup(16#8b, 16#c) -> {more, 16#c4, 16#02}; +dec_huffman_lookup(16#8b, 16#d) -> {more, 16#c4, 16#09}; +dec_huffman_lookup(16#8b, 16#e) -> {more, 16#c4, 16#17}; +dec_huffman_lookup(16#8b, 16#f) -> {ok, 16#c4, 16#28}; +dec_huffman_lookup(16#8c, 16#0) -> {more, 16#bb, 16#03}; +dec_huffman_lookup(16#8c, 16#1) -> {more, 16#bb, 16#06}; +dec_huffman_lookup(16#8c, 16#2) -> {more, 16#bb, 16#0a}; +dec_huffman_lookup(16#8c, 16#3) -> {more, 16#bb, 16#0f}; +dec_huffman_lookup(16#8c, 16#4) -> {more, 16#bb, 16#18}; +dec_huffman_lookup(16#8c, 16#5) -> {more, 16#bb, 16#1f}; +dec_huffman_lookup(16#8c, 16#6) -> {more, 16#bb, 16#29}; +dec_huffman_lookup(16#8c, 16#7) -> {ok, 16#bb, 16#38}; +dec_huffman_lookup(16#8c, 16#8) -> {more, 16#bd, 16#03}; +dec_huffman_lookup(16#8c, 16#9) -> {more, 16#bd, 16#06}; +dec_huffman_lookup(16#8c, 16#a) -> {more, 16#bd, 16#0a}; +dec_huffman_lookup(16#8c, 16#b) -> {more, 16#bd, 16#0f}; +dec_huffman_lookup(16#8c, 16#c) -> {more, 16#bd, 16#18}; +dec_huffman_lookup(16#8c, 16#d) -> {more, 16#bd, 16#1f}; +dec_huffman_lookup(16#8c, 16#e) -> {more, 16#bd, 16#29}; +dec_huffman_lookup(16#8c, 16#f) -> {ok, 16#bd, 16#38}; +dec_huffman_lookup(16#8d, 16#0) -> {more, 16#be, 16#03}; +dec_huffman_lookup(16#8d, 16#1) -> {more, 16#be, 16#06}; +dec_huffman_lookup(16#8d, 16#2) -> {more, 16#be, 16#0a}; +dec_huffman_lookup(16#8d, 16#3) -> {more, 16#be, 16#0f}; +dec_huffman_lookup(16#8d, 16#4) -> {more, 16#be, 16#18}; +dec_huffman_lookup(16#8d, 16#5) -> {more, 16#be, 16#1f}; +dec_huffman_lookup(16#8d, 16#6) -> {more, 16#be, 16#29}; +dec_huffman_lookup(16#8d, 16#7) -> {ok, 16#be, 16#38}; +dec_huffman_lookup(16#8d, 16#8) -> {more, 16#c4, 16#03}; +dec_huffman_lookup(16#8d, 16#9) -> {more, 16#c4, 16#06}; +dec_huffman_lookup(16#8d, 16#a) -> {more, 16#c4, 16#0a}; +dec_huffman_lookup(16#8d, 16#b) -> {more, 16#c4, 16#0f}; +dec_huffman_lookup(16#8d, 16#c) -> {more, 16#c4, 16#18}; +dec_huffman_lookup(16#8d, 16#d) -> {more, 16#c4, 16#1f}; +dec_huffman_lookup(16#8d, 16#e) -> {more, 16#c4, 16#29}; +dec_huffman_lookup(16#8d, 16#f) -> {ok, 16#c4, 16#38}; +dec_huffman_lookup(16#8e, 16#0) -> {more, 16#c6, 16#01}; +dec_huffman_lookup(16#8e, 16#1) -> {ok, 16#c6, 16#16}; +dec_huffman_lookup(16#8e, 16#2) -> {more, 16#e4, 16#01}; +dec_huffman_lookup(16#8e, 16#3) -> {ok, 16#e4, 16#16}; +dec_huffman_lookup(16#8e, 16#4) -> {more, 16#e8, 16#01}; +dec_huffman_lookup(16#8e, 16#5) -> {ok, 16#e8, 16#16}; +dec_huffman_lookup(16#8e, 16#6) -> {more, 16#e9, 16#01}; +dec_huffman_lookup(16#8e, 16#7) -> {ok, 16#e9, 16#16}; +dec_huffman_lookup(16#8e, 16#8) -> {ok, 16#01, 16#00}; +dec_huffman_lookup(16#8e, 16#9) -> {ok, 16#87, 16#00}; +dec_huffman_lookup(16#8e, 16#a) -> {ok, 16#89, 16#00}; +dec_huffman_lookup(16#8e, 16#b) -> {ok, 16#8a, 16#00}; +dec_huffman_lookup(16#8e, 16#c) -> {ok, 16#8b, 16#00}; +dec_huffman_lookup(16#8e, 16#d) -> {ok, 16#8c, 16#00}; +dec_huffman_lookup(16#8e, 16#e) -> {ok, 16#8d, 16#00}; +dec_huffman_lookup(16#8e, 16#f) -> {ok, 16#8f, 16#00}; +dec_huffman_lookup(16#8f, 16#0) -> {more, 16#c6, 16#02}; +dec_huffman_lookup(16#8f, 16#1) -> {more, 16#c6, 16#09}; +dec_huffman_lookup(16#8f, 16#2) -> {more, 16#c6, 16#17}; +dec_huffman_lookup(16#8f, 16#3) -> {ok, 16#c6, 16#28}; +dec_huffman_lookup(16#8f, 16#4) -> {more, 16#e4, 16#02}; +dec_huffman_lookup(16#8f, 16#5) -> {more, 16#e4, 16#09}; +dec_huffman_lookup(16#8f, 16#6) -> {more, 16#e4, 16#17}; +dec_huffman_lookup(16#8f, 16#7) -> {ok, 16#e4, 16#28}; +dec_huffman_lookup(16#8f, 16#8) -> {more, 16#e8, 16#02}; +dec_huffman_lookup(16#8f, 16#9) -> {more, 16#e8, 16#09}; +dec_huffman_lookup(16#8f, 16#a) -> {more, 16#e8, 16#17}; +dec_huffman_lookup(16#8f, 16#b) -> {ok, 16#e8, 16#28}; +dec_huffman_lookup(16#8f, 16#c) -> {more, 16#e9, 16#02}; +dec_huffman_lookup(16#8f, 16#d) -> {more, 16#e9, 16#09}; +dec_huffman_lookup(16#8f, 16#e) -> {more, 16#e9, 16#17}; +dec_huffman_lookup(16#8f, 16#f) -> {ok, 16#e9, 16#28}; +dec_huffman_lookup(16#90, 16#0) -> {more, 16#c6, 16#03}; +dec_huffman_lookup(16#90, 16#1) -> {more, 16#c6, 16#06}; +dec_huffman_lookup(16#90, 16#2) -> {more, 16#c6, 16#0a}; +dec_huffman_lookup(16#90, 16#3) -> {more, 16#c6, 16#0f}; +dec_huffman_lookup(16#90, 16#4) -> {more, 16#c6, 16#18}; +dec_huffman_lookup(16#90, 16#5) -> {more, 16#c6, 16#1f}; +dec_huffman_lookup(16#90, 16#6) -> {more, 16#c6, 16#29}; +dec_huffman_lookup(16#90, 16#7) -> {ok, 16#c6, 16#38}; +dec_huffman_lookup(16#90, 16#8) -> {more, 16#e4, 16#03}; +dec_huffman_lookup(16#90, 16#9) -> {more, 16#e4, 16#06}; +dec_huffman_lookup(16#90, 16#a) -> {more, 16#e4, 16#0a}; +dec_huffman_lookup(16#90, 16#b) -> {more, 16#e4, 16#0f}; +dec_huffman_lookup(16#90, 16#c) -> {more, 16#e4, 16#18}; +dec_huffman_lookup(16#90, 16#d) -> {more, 16#e4, 16#1f}; +dec_huffman_lookup(16#90, 16#e) -> {more, 16#e4, 16#29}; +dec_huffman_lookup(16#90, 16#f) -> {ok, 16#e4, 16#38}; +dec_huffman_lookup(16#91, 16#0) -> {more, 16#e8, 16#03}; +dec_huffman_lookup(16#91, 16#1) -> {more, 16#e8, 16#06}; +dec_huffman_lookup(16#91, 16#2) -> {more, 16#e8, 16#0a}; +dec_huffman_lookup(16#91, 16#3) -> {more, 16#e8, 16#0f}; +dec_huffman_lookup(16#91, 16#4) -> {more, 16#e8, 16#18}; +dec_huffman_lookup(16#91, 16#5) -> {more, 16#e8, 16#1f}; +dec_huffman_lookup(16#91, 16#6) -> {more, 16#e8, 16#29}; +dec_huffman_lookup(16#91, 16#7) -> {ok, 16#e8, 16#38}; +dec_huffman_lookup(16#91, 16#8) -> {more, 16#e9, 16#03}; +dec_huffman_lookup(16#91, 16#9) -> {more, 16#e9, 16#06}; +dec_huffman_lookup(16#91, 16#a) -> {more, 16#e9, 16#0a}; +dec_huffman_lookup(16#91, 16#b) -> {more, 16#e9, 16#0f}; +dec_huffman_lookup(16#91, 16#c) -> {more, 16#e9, 16#18}; +dec_huffman_lookup(16#91, 16#d) -> {more, 16#e9, 16#1f}; +dec_huffman_lookup(16#91, 16#e) -> {more, 16#e9, 16#29}; +dec_huffman_lookup(16#91, 16#f) -> {ok, 16#e9, 16#38}; +dec_huffman_lookup(16#92, 16#0) -> {more, 16#01, 16#01}; +dec_huffman_lookup(16#92, 16#1) -> {ok, 16#01, 16#16}; +dec_huffman_lookup(16#92, 16#2) -> {more, 16#87, 16#01}; +dec_huffman_lookup(16#92, 16#3) -> {ok, 16#87, 16#16}; +dec_huffman_lookup(16#92, 16#4) -> {more, 16#89, 16#01}; +dec_huffman_lookup(16#92, 16#5) -> {ok, 16#89, 16#16}; +dec_huffman_lookup(16#92, 16#6) -> {more, 16#8a, 16#01}; +dec_huffman_lookup(16#92, 16#7) -> {ok, 16#8a, 16#16}; +dec_huffman_lookup(16#92, 16#8) -> {more, 16#8b, 16#01}; +dec_huffman_lookup(16#92, 16#9) -> {ok, 16#8b, 16#16}; +dec_huffman_lookup(16#92, 16#a) -> {more, 16#8c, 16#01}; +dec_huffman_lookup(16#92, 16#b) -> {ok, 16#8c, 16#16}; +dec_huffman_lookup(16#92, 16#c) -> {more, 16#8d, 16#01}; +dec_huffman_lookup(16#92, 16#d) -> {ok, 16#8d, 16#16}; +dec_huffman_lookup(16#92, 16#e) -> {more, 16#8f, 16#01}; +dec_huffman_lookup(16#92, 16#f) -> {ok, 16#8f, 16#16}; +dec_huffman_lookup(16#93, 16#0) -> {more, 16#01, 16#02}; +dec_huffman_lookup(16#93, 16#1) -> {more, 16#01, 16#09}; +dec_huffman_lookup(16#93, 16#2) -> {more, 16#01, 16#17}; +dec_huffman_lookup(16#93, 16#3) -> {ok, 16#01, 16#28}; +dec_huffman_lookup(16#93, 16#4) -> {more, 16#87, 16#02}; +dec_huffman_lookup(16#93, 16#5) -> {more, 16#87, 16#09}; +dec_huffman_lookup(16#93, 16#6) -> {more, 16#87, 16#17}; +dec_huffman_lookup(16#93, 16#7) -> {ok, 16#87, 16#28}; +dec_huffman_lookup(16#93, 16#8) -> {more, 16#89, 16#02}; +dec_huffman_lookup(16#93, 16#9) -> {more, 16#89, 16#09}; +dec_huffman_lookup(16#93, 16#a) -> {more, 16#89, 16#17}; +dec_huffman_lookup(16#93, 16#b) -> {ok, 16#89, 16#28}; +dec_huffman_lookup(16#93, 16#c) -> {more, 16#8a, 16#02}; +dec_huffman_lookup(16#93, 16#d) -> {more, 16#8a, 16#09}; +dec_huffman_lookup(16#93, 16#e) -> {more, 16#8a, 16#17}; +dec_huffman_lookup(16#93, 16#f) -> {ok, 16#8a, 16#28}; +dec_huffman_lookup(16#94, 16#0) -> {more, 16#01, 16#03}; +dec_huffman_lookup(16#94, 16#1) -> {more, 16#01, 16#06}; +dec_huffman_lookup(16#94, 16#2) -> {more, 16#01, 16#0a}; +dec_huffman_lookup(16#94, 16#3) -> {more, 16#01, 16#0f}; +dec_huffman_lookup(16#94, 16#4) -> {more, 16#01, 16#18}; +dec_huffman_lookup(16#94, 16#5) -> {more, 16#01, 16#1f}; +dec_huffman_lookup(16#94, 16#6) -> {more, 16#01, 16#29}; +dec_huffman_lookup(16#94, 16#7) -> {ok, 16#01, 16#38}; +dec_huffman_lookup(16#94, 16#8) -> {more, 16#87, 16#03}; +dec_huffman_lookup(16#94, 16#9) -> {more, 16#87, 16#06}; +dec_huffman_lookup(16#94, 16#a) -> {more, 16#87, 16#0a}; +dec_huffman_lookup(16#94, 16#b) -> {more, 16#87, 16#0f}; +dec_huffman_lookup(16#94, 16#c) -> {more, 16#87, 16#18}; +dec_huffman_lookup(16#94, 16#d) -> {more, 16#87, 16#1f}; +dec_huffman_lookup(16#94, 16#e) -> {more, 16#87, 16#29}; +dec_huffman_lookup(16#94, 16#f) -> {ok, 16#87, 16#38}; +dec_huffman_lookup(16#95, 16#0) -> {more, 16#89, 16#03}; +dec_huffman_lookup(16#95, 16#1) -> {more, 16#89, 16#06}; +dec_huffman_lookup(16#95, 16#2) -> {more, 16#89, 16#0a}; +dec_huffman_lookup(16#95, 16#3) -> {more, 16#89, 16#0f}; +dec_huffman_lookup(16#95, 16#4) -> {more, 16#89, 16#18}; +dec_huffman_lookup(16#95, 16#5) -> {more, 16#89, 16#1f}; +dec_huffman_lookup(16#95, 16#6) -> {more, 16#89, 16#29}; +dec_huffman_lookup(16#95, 16#7) -> {ok, 16#89, 16#38}; +dec_huffman_lookup(16#95, 16#8) -> {more, 16#8a, 16#03}; +dec_huffman_lookup(16#95, 16#9) -> {more, 16#8a, 16#06}; +dec_huffman_lookup(16#95, 16#a) -> {more, 16#8a, 16#0a}; +dec_huffman_lookup(16#95, 16#b) -> {more, 16#8a, 16#0f}; +dec_huffman_lookup(16#95, 16#c) -> {more, 16#8a, 16#18}; +dec_huffman_lookup(16#95, 16#d) -> {more, 16#8a, 16#1f}; +dec_huffman_lookup(16#95, 16#e) -> {more, 16#8a, 16#29}; +dec_huffman_lookup(16#95, 16#f) -> {ok, 16#8a, 16#38}; +dec_huffman_lookup(16#96, 16#0) -> {more, 16#8b, 16#02}; +dec_huffman_lookup(16#96, 16#1) -> {more, 16#8b, 16#09}; +dec_huffman_lookup(16#96, 16#2) -> {more, 16#8b, 16#17}; +dec_huffman_lookup(16#96, 16#3) -> {ok, 16#8b, 16#28}; +dec_huffman_lookup(16#96, 16#4) -> {more, 16#8c, 16#02}; +dec_huffman_lookup(16#96, 16#5) -> {more, 16#8c, 16#09}; +dec_huffman_lookup(16#96, 16#6) -> {more, 16#8c, 16#17}; +dec_huffman_lookup(16#96, 16#7) -> {ok, 16#8c, 16#28}; +dec_huffman_lookup(16#96, 16#8) -> {more, 16#8d, 16#02}; +dec_huffman_lookup(16#96, 16#9) -> {more, 16#8d, 16#09}; +dec_huffman_lookup(16#96, 16#a) -> {more, 16#8d, 16#17}; +dec_huffman_lookup(16#96, 16#b) -> {ok, 16#8d, 16#28}; +dec_huffman_lookup(16#96, 16#c) -> {more, 16#8f, 16#02}; +dec_huffman_lookup(16#96, 16#d) -> {more, 16#8f, 16#09}; +dec_huffman_lookup(16#96, 16#e) -> {more, 16#8f, 16#17}; +dec_huffman_lookup(16#96, 16#f) -> {ok, 16#8f, 16#28}; +dec_huffman_lookup(16#97, 16#0) -> {more, 16#8b, 16#03}; +dec_huffman_lookup(16#97, 16#1) -> {more, 16#8b, 16#06}; +dec_huffman_lookup(16#97, 16#2) -> {more, 16#8b, 16#0a}; +dec_huffman_lookup(16#97, 16#3) -> {more, 16#8b, 16#0f}; +dec_huffman_lookup(16#97, 16#4) -> {more, 16#8b, 16#18}; +dec_huffman_lookup(16#97, 16#5) -> {more, 16#8b, 16#1f}; +dec_huffman_lookup(16#97, 16#6) -> {more, 16#8b, 16#29}; +dec_huffman_lookup(16#97, 16#7) -> {ok, 16#8b, 16#38}; +dec_huffman_lookup(16#97, 16#8) -> {more, 16#8c, 16#03}; +dec_huffman_lookup(16#97, 16#9) -> {more, 16#8c, 16#06}; +dec_huffman_lookup(16#97, 16#a) -> {more, 16#8c, 16#0a}; +dec_huffman_lookup(16#97, 16#b) -> {more, 16#8c, 16#0f}; +dec_huffman_lookup(16#97, 16#c) -> {more, 16#8c, 16#18}; +dec_huffman_lookup(16#97, 16#d) -> {more, 16#8c, 16#1f}; +dec_huffman_lookup(16#97, 16#e) -> {more, 16#8c, 16#29}; +dec_huffman_lookup(16#97, 16#f) -> {ok, 16#8c, 16#38}; +dec_huffman_lookup(16#98, 16#0) -> {more, 16#8d, 16#03}; +dec_huffman_lookup(16#98, 16#1) -> {more, 16#8d, 16#06}; +dec_huffman_lookup(16#98, 16#2) -> {more, 16#8d, 16#0a}; +dec_huffman_lookup(16#98, 16#3) -> {more, 16#8d, 16#0f}; +dec_huffman_lookup(16#98, 16#4) -> {more, 16#8d, 16#18}; +dec_huffman_lookup(16#98, 16#5) -> {more, 16#8d, 16#1f}; +dec_huffman_lookup(16#98, 16#6) -> {more, 16#8d, 16#29}; +dec_huffman_lookup(16#98, 16#7) -> {ok, 16#8d, 16#38}; +dec_huffman_lookup(16#98, 16#8) -> {more, 16#8f, 16#03}; +dec_huffman_lookup(16#98, 16#9) -> {more, 16#8f, 16#06}; +dec_huffman_lookup(16#98, 16#a) -> {more, 16#8f, 16#0a}; +dec_huffman_lookup(16#98, 16#b) -> {more, 16#8f, 16#0f}; +dec_huffman_lookup(16#98, 16#c) -> {more, 16#8f, 16#18}; +dec_huffman_lookup(16#98, 16#d) -> {more, 16#8f, 16#1f}; +dec_huffman_lookup(16#98, 16#e) -> {more, 16#8f, 16#29}; +dec_huffman_lookup(16#98, 16#f) -> {ok, 16#8f, 16#38}; +dec_huffman_lookup(16#99, 16#0) -> {more, undefined, 16#9d}; +dec_huffman_lookup(16#99, 16#1) -> {more, undefined, 16#9e}; +dec_huffman_lookup(16#99, 16#2) -> {more, undefined, 16#a0}; +dec_huffman_lookup(16#99, 16#3) -> {more, undefined, 16#a1}; +dec_huffman_lookup(16#99, 16#4) -> {more, undefined, 16#a4}; +dec_huffman_lookup(16#99, 16#5) -> {more, undefined, 16#a5}; +dec_huffman_lookup(16#99, 16#6) -> {more, undefined, 16#a7}; +dec_huffman_lookup(16#99, 16#7) -> {more, undefined, 16#a8}; +dec_huffman_lookup(16#99, 16#8) -> {more, undefined, 16#ac}; +dec_huffman_lookup(16#99, 16#9) -> {more, undefined, 16#ad}; +dec_huffman_lookup(16#99, 16#a) -> {more, undefined, 16#af}; +dec_huffman_lookup(16#99, 16#b) -> {more, undefined, 16#b1}; +dec_huffman_lookup(16#99, 16#c) -> {more, undefined, 16#b6}; +dec_huffman_lookup(16#99, 16#d) -> {more, undefined, 16#b9}; +dec_huffman_lookup(16#99, 16#e) -> {more, undefined, 16#bf}; +dec_huffman_lookup(16#99, 16#f) -> {ok, undefined, 16#cf}; +dec_huffman_lookup(16#9a, 16#0) -> {ok, 16#93, 16#00}; +dec_huffman_lookup(16#9a, 16#1) -> {ok, 16#95, 16#00}; +dec_huffman_lookup(16#9a, 16#2) -> {ok, 16#96, 16#00}; +dec_huffman_lookup(16#9a, 16#3) -> {ok, 16#97, 16#00}; +dec_huffman_lookup(16#9a, 16#4) -> {ok, 16#98, 16#00}; +dec_huffman_lookup(16#9a, 16#5) -> {ok, 16#9b, 16#00}; +dec_huffman_lookup(16#9a, 16#6) -> {ok, 16#9d, 16#00}; +dec_huffman_lookup(16#9a, 16#7) -> {ok, 16#9e, 16#00}; +dec_huffman_lookup(16#9a, 16#8) -> {ok, 16#a5, 16#00}; +dec_huffman_lookup(16#9a, 16#9) -> {ok, 16#a6, 16#00}; +dec_huffman_lookup(16#9a, 16#a) -> {ok, 16#a8, 16#00}; +dec_huffman_lookup(16#9a, 16#b) -> {ok, 16#ae, 16#00}; +dec_huffman_lookup(16#9a, 16#c) -> {ok, 16#af, 16#00}; +dec_huffman_lookup(16#9a, 16#d) -> {ok, 16#b4, 16#00}; +dec_huffman_lookup(16#9a, 16#e) -> {ok, 16#b6, 16#00}; +dec_huffman_lookup(16#9a, 16#f) -> {ok, 16#b7, 16#00}; +dec_huffman_lookup(16#9b, 16#0) -> {more, 16#93, 16#01}; +dec_huffman_lookup(16#9b, 16#1) -> {ok, 16#93, 16#16}; +dec_huffman_lookup(16#9b, 16#2) -> {more, 16#95, 16#01}; +dec_huffman_lookup(16#9b, 16#3) -> {ok, 16#95, 16#16}; +dec_huffman_lookup(16#9b, 16#4) -> {more, 16#96, 16#01}; +dec_huffman_lookup(16#9b, 16#5) -> {ok, 16#96, 16#16}; +dec_huffman_lookup(16#9b, 16#6) -> {more, 16#97, 16#01}; +dec_huffman_lookup(16#9b, 16#7) -> {ok, 16#97, 16#16}; +dec_huffman_lookup(16#9b, 16#8) -> {more, 16#98, 16#01}; +dec_huffman_lookup(16#9b, 16#9) -> {ok, 16#98, 16#16}; +dec_huffman_lookup(16#9b, 16#a) -> {more, 16#9b, 16#01}; +dec_huffman_lookup(16#9b, 16#b) -> {ok, 16#9b, 16#16}; +dec_huffman_lookup(16#9b, 16#c) -> {more, 16#9d, 16#01}; +dec_huffman_lookup(16#9b, 16#d) -> {ok, 16#9d, 16#16}; +dec_huffman_lookup(16#9b, 16#e) -> {more, 16#9e, 16#01}; +dec_huffman_lookup(16#9b, 16#f) -> {ok, 16#9e, 16#16}; +dec_huffman_lookup(16#9c, 16#0) -> {more, 16#93, 16#02}; +dec_huffman_lookup(16#9c, 16#1) -> {more, 16#93, 16#09}; +dec_huffman_lookup(16#9c, 16#2) -> {more, 16#93, 16#17}; +dec_huffman_lookup(16#9c, 16#3) -> {ok, 16#93, 16#28}; +dec_huffman_lookup(16#9c, 16#4) -> {more, 16#95, 16#02}; +dec_huffman_lookup(16#9c, 16#5) -> {more, 16#95, 16#09}; +dec_huffman_lookup(16#9c, 16#6) -> {more, 16#95, 16#17}; +dec_huffman_lookup(16#9c, 16#7) -> {ok, 16#95, 16#28}; +dec_huffman_lookup(16#9c, 16#8) -> {more, 16#96, 16#02}; +dec_huffman_lookup(16#9c, 16#9) -> {more, 16#96, 16#09}; +dec_huffman_lookup(16#9c, 16#a) -> {more, 16#96, 16#17}; +dec_huffman_lookup(16#9c, 16#b) -> {ok, 16#96, 16#28}; +dec_huffman_lookup(16#9c, 16#c) -> {more, 16#97, 16#02}; +dec_huffman_lookup(16#9c, 16#d) -> {more, 16#97, 16#09}; +dec_huffman_lookup(16#9c, 16#e) -> {more, 16#97, 16#17}; +dec_huffman_lookup(16#9c, 16#f) -> {ok, 16#97, 16#28}; +dec_huffman_lookup(16#9d, 16#0) -> {more, 16#93, 16#03}; +dec_huffman_lookup(16#9d, 16#1) -> {more, 16#93, 16#06}; +dec_huffman_lookup(16#9d, 16#2) -> {more, 16#93, 16#0a}; +dec_huffman_lookup(16#9d, 16#3) -> {more, 16#93, 16#0f}; +dec_huffman_lookup(16#9d, 16#4) -> {more, 16#93, 16#18}; +dec_huffman_lookup(16#9d, 16#5) -> {more, 16#93, 16#1f}; +dec_huffman_lookup(16#9d, 16#6) -> {more, 16#93, 16#29}; +dec_huffman_lookup(16#9d, 16#7) -> {ok, 16#93, 16#38}; +dec_huffman_lookup(16#9d, 16#8) -> {more, 16#95, 16#03}; +dec_huffman_lookup(16#9d, 16#9) -> {more, 16#95, 16#06}; +dec_huffman_lookup(16#9d, 16#a) -> {more, 16#95, 16#0a}; +dec_huffman_lookup(16#9d, 16#b) -> {more, 16#95, 16#0f}; +dec_huffman_lookup(16#9d, 16#c) -> {more, 16#95, 16#18}; +dec_huffman_lookup(16#9d, 16#d) -> {more, 16#95, 16#1f}; +dec_huffman_lookup(16#9d, 16#e) -> {more, 16#95, 16#29}; +dec_huffman_lookup(16#9d, 16#f) -> {ok, 16#95, 16#38}; +dec_huffman_lookup(16#9e, 16#0) -> {more, 16#96, 16#03}; +dec_huffman_lookup(16#9e, 16#1) -> {more, 16#96, 16#06}; +dec_huffman_lookup(16#9e, 16#2) -> {more, 16#96, 16#0a}; +dec_huffman_lookup(16#9e, 16#3) -> {more, 16#96, 16#0f}; +dec_huffman_lookup(16#9e, 16#4) -> {more, 16#96, 16#18}; +dec_huffman_lookup(16#9e, 16#5) -> {more, 16#96, 16#1f}; +dec_huffman_lookup(16#9e, 16#6) -> {more, 16#96, 16#29}; +dec_huffman_lookup(16#9e, 16#7) -> {ok, 16#96, 16#38}; +dec_huffman_lookup(16#9e, 16#8) -> {more, 16#97, 16#03}; +dec_huffman_lookup(16#9e, 16#9) -> {more, 16#97, 16#06}; +dec_huffman_lookup(16#9e, 16#a) -> {more, 16#97, 16#0a}; +dec_huffman_lookup(16#9e, 16#b) -> {more, 16#97, 16#0f}; +dec_huffman_lookup(16#9e, 16#c) -> {more, 16#97, 16#18}; +dec_huffman_lookup(16#9e, 16#d) -> {more, 16#97, 16#1f}; +dec_huffman_lookup(16#9e, 16#e) -> {more, 16#97, 16#29}; +dec_huffman_lookup(16#9e, 16#f) -> {ok, 16#97, 16#38}; +dec_huffman_lookup(16#9f, 16#0) -> {more, 16#98, 16#02}; +dec_huffman_lookup(16#9f, 16#1) -> {more, 16#98, 16#09}; +dec_huffman_lookup(16#9f, 16#2) -> {more, 16#98, 16#17}; +dec_huffman_lookup(16#9f, 16#3) -> {ok, 16#98, 16#28}; +dec_huffman_lookup(16#9f, 16#4) -> {more, 16#9b, 16#02}; +dec_huffman_lookup(16#9f, 16#5) -> {more, 16#9b, 16#09}; +dec_huffman_lookup(16#9f, 16#6) -> {more, 16#9b, 16#17}; +dec_huffman_lookup(16#9f, 16#7) -> {ok, 16#9b, 16#28}; +dec_huffman_lookup(16#9f, 16#8) -> {more, 16#9d, 16#02}; +dec_huffman_lookup(16#9f, 16#9) -> {more, 16#9d, 16#09}; +dec_huffman_lookup(16#9f, 16#a) -> {more, 16#9d, 16#17}; +dec_huffman_lookup(16#9f, 16#b) -> {ok, 16#9d, 16#28}; +dec_huffman_lookup(16#9f, 16#c) -> {more, 16#9e, 16#02}; +dec_huffman_lookup(16#9f, 16#d) -> {more, 16#9e, 16#09}; +dec_huffman_lookup(16#9f, 16#e) -> {more, 16#9e, 16#17}; +dec_huffman_lookup(16#9f, 16#f) -> {ok, 16#9e, 16#28}; +dec_huffman_lookup(16#a0, 16#0) -> {more, 16#98, 16#03}; +dec_huffman_lookup(16#a0, 16#1) -> {more, 16#98, 16#06}; +dec_huffman_lookup(16#a0, 16#2) -> {more, 16#98, 16#0a}; +dec_huffman_lookup(16#a0, 16#3) -> {more, 16#98, 16#0f}; +dec_huffman_lookup(16#a0, 16#4) -> {more, 16#98, 16#18}; +dec_huffman_lookup(16#a0, 16#5) -> {more, 16#98, 16#1f}; +dec_huffman_lookup(16#a0, 16#6) -> {more, 16#98, 16#29}; +dec_huffman_lookup(16#a0, 16#7) -> {ok, 16#98, 16#38}; +dec_huffman_lookup(16#a0, 16#8) -> {more, 16#9b, 16#03}; +dec_huffman_lookup(16#a0, 16#9) -> {more, 16#9b, 16#06}; +dec_huffman_lookup(16#a0, 16#a) -> {more, 16#9b, 16#0a}; +dec_huffman_lookup(16#a0, 16#b) -> {more, 16#9b, 16#0f}; +dec_huffman_lookup(16#a0, 16#c) -> {more, 16#9b, 16#18}; +dec_huffman_lookup(16#a0, 16#d) -> {more, 16#9b, 16#1f}; +dec_huffman_lookup(16#a0, 16#e) -> {more, 16#9b, 16#29}; +dec_huffman_lookup(16#a0, 16#f) -> {ok, 16#9b, 16#38}; +dec_huffman_lookup(16#a1, 16#0) -> {more, 16#9d, 16#03}; +dec_huffman_lookup(16#a1, 16#1) -> {more, 16#9d, 16#06}; +dec_huffman_lookup(16#a1, 16#2) -> {more, 16#9d, 16#0a}; +dec_huffman_lookup(16#a1, 16#3) -> {more, 16#9d, 16#0f}; +dec_huffman_lookup(16#a1, 16#4) -> {more, 16#9d, 16#18}; +dec_huffman_lookup(16#a1, 16#5) -> {more, 16#9d, 16#1f}; +dec_huffman_lookup(16#a1, 16#6) -> {more, 16#9d, 16#29}; +dec_huffman_lookup(16#a1, 16#7) -> {ok, 16#9d, 16#38}; +dec_huffman_lookup(16#a1, 16#8) -> {more, 16#9e, 16#03}; +dec_huffman_lookup(16#a1, 16#9) -> {more, 16#9e, 16#06}; +dec_huffman_lookup(16#a1, 16#a) -> {more, 16#9e, 16#0a}; +dec_huffman_lookup(16#a1, 16#b) -> {more, 16#9e, 16#0f}; +dec_huffman_lookup(16#a1, 16#c) -> {more, 16#9e, 16#18}; +dec_huffman_lookup(16#a1, 16#d) -> {more, 16#9e, 16#1f}; +dec_huffman_lookup(16#a1, 16#e) -> {more, 16#9e, 16#29}; +dec_huffman_lookup(16#a1, 16#f) -> {ok, 16#9e, 16#38}; +dec_huffman_lookup(16#a2, 16#0) -> {more, 16#a5, 16#01}; +dec_huffman_lookup(16#a2, 16#1) -> {ok, 16#a5, 16#16}; +dec_huffman_lookup(16#a2, 16#2) -> {more, 16#a6, 16#01}; +dec_huffman_lookup(16#a2, 16#3) -> {ok, 16#a6, 16#16}; +dec_huffman_lookup(16#a2, 16#4) -> {more, 16#a8, 16#01}; +dec_huffman_lookup(16#a2, 16#5) -> {ok, 16#a8, 16#16}; +dec_huffman_lookup(16#a2, 16#6) -> {more, 16#ae, 16#01}; +dec_huffman_lookup(16#a2, 16#7) -> {ok, 16#ae, 16#16}; +dec_huffman_lookup(16#a2, 16#8) -> {more, 16#af, 16#01}; +dec_huffman_lookup(16#a2, 16#9) -> {ok, 16#af, 16#16}; +dec_huffman_lookup(16#a2, 16#a) -> {more, 16#b4, 16#01}; +dec_huffman_lookup(16#a2, 16#b) -> {ok, 16#b4, 16#16}; +dec_huffman_lookup(16#a2, 16#c) -> {more, 16#b6, 16#01}; +dec_huffman_lookup(16#a2, 16#d) -> {ok, 16#b6, 16#16}; +dec_huffman_lookup(16#a2, 16#e) -> {more, 16#b7, 16#01}; +dec_huffman_lookup(16#a2, 16#f) -> {ok, 16#b7, 16#16}; +dec_huffman_lookup(16#a3, 16#0) -> {more, 16#a5, 16#02}; +dec_huffman_lookup(16#a3, 16#1) -> {more, 16#a5, 16#09}; +dec_huffman_lookup(16#a3, 16#2) -> {more, 16#a5, 16#17}; +dec_huffman_lookup(16#a3, 16#3) -> {ok, 16#a5, 16#28}; +dec_huffman_lookup(16#a3, 16#4) -> {more, 16#a6, 16#02}; +dec_huffman_lookup(16#a3, 16#5) -> {more, 16#a6, 16#09}; +dec_huffman_lookup(16#a3, 16#6) -> {more, 16#a6, 16#17}; +dec_huffman_lookup(16#a3, 16#7) -> {ok, 16#a6, 16#28}; +dec_huffman_lookup(16#a3, 16#8) -> {more, 16#a8, 16#02}; +dec_huffman_lookup(16#a3, 16#9) -> {more, 16#a8, 16#09}; +dec_huffman_lookup(16#a3, 16#a) -> {more, 16#a8, 16#17}; +dec_huffman_lookup(16#a3, 16#b) -> {ok, 16#a8, 16#28}; +dec_huffman_lookup(16#a3, 16#c) -> {more, 16#ae, 16#02}; +dec_huffman_lookup(16#a3, 16#d) -> {more, 16#ae, 16#09}; +dec_huffman_lookup(16#a3, 16#e) -> {more, 16#ae, 16#17}; +dec_huffman_lookup(16#a3, 16#f) -> {ok, 16#ae, 16#28}; +dec_huffman_lookup(16#a4, 16#0) -> {more, 16#a5, 16#03}; +dec_huffman_lookup(16#a4, 16#1) -> {more, 16#a5, 16#06}; +dec_huffman_lookup(16#a4, 16#2) -> {more, 16#a5, 16#0a}; +dec_huffman_lookup(16#a4, 16#3) -> {more, 16#a5, 16#0f}; +dec_huffman_lookup(16#a4, 16#4) -> {more, 16#a5, 16#18}; +dec_huffman_lookup(16#a4, 16#5) -> {more, 16#a5, 16#1f}; +dec_huffman_lookup(16#a4, 16#6) -> {more, 16#a5, 16#29}; +dec_huffman_lookup(16#a4, 16#7) -> {ok, 16#a5, 16#38}; +dec_huffman_lookup(16#a4, 16#8) -> {more, 16#a6, 16#03}; +dec_huffman_lookup(16#a4, 16#9) -> {more, 16#a6, 16#06}; +dec_huffman_lookup(16#a4, 16#a) -> {more, 16#a6, 16#0a}; +dec_huffman_lookup(16#a4, 16#b) -> {more, 16#a6, 16#0f}; +dec_huffman_lookup(16#a4, 16#c) -> {more, 16#a6, 16#18}; +dec_huffman_lookup(16#a4, 16#d) -> {more, 16#a6, 16#1f}; +dec_huffman_lookup(16#a4, 16#e) -> {more, 16#a6, 16#29}; +dec_huffman_lookup(16#a4, 16#f) -> {ok, 16#a6, 16#38}; +dec_huffman_lookup(16#a5, 16#0) -> {more, 16#a8, 16#03}; +dec_huffman_lookup(16#a5, 16#1) -> {more, 16#a8, 16#06}; +dec_huffman_lookup(16#a5, 16#2) -> {more, 16#a8, 16#0a}; +dec_huffman_lookup(16#a5, 16#3) -> {more, 16#a8, 16#0f}; +dec_huffman_lookup(16#a5, 16#4) -> {more, 16#a8, 16#18}; +dec_huffman_lookup(16#a5, 16#5) -> {more, 16#a8, 16#1f}; +dec_huffman_lookup(16#a5, 16#6) -> {more, 16#a8, 16#29}; +dec_huffman_lookup(16#a5, 16#7) -> {ok, 16#a8, 16#38}; +dec_huffman_lookup(16#a5, 16#8) -> {more, 16#ae, 16#03}; +dec_huffman_lookup(16#a5, 16#9) -> {more, 16#ae, 16#06}; +dec_huffman_lookup(16#a5, 16#a) -> {more, 16#ae, 16#0a}; +dec_huffman_lookup(16#a5, 16#b) -> {more, 16#ae, 16#0f}; +dec_huffman_lookup(16#a5, 16#c) -> {more, 16#ae, 16#18}; +dec_huffman_lookup(16#a5, 16#d) -> {more, 16#ae, 16#1f}; +dec_huffman_lookup(16#a5, 16#e) -> {more, 16#ae, 16#29}; +dec_huffman_lookup(16#a5, 16#f) -> {ok, 16#ae, 16#38}; +dec_huffman_lookup(16#a6, 16#0) -> {more, 16#af, 16#02}; +dec_huffman_lookup(16#a6, 16#1) -> {more, 16#af, 16#09}; +dec_huffman_lookup(16#a6, 16#2) -> {more, 16#af, 16#17}; +dec_huffman_lookup(16#a6, 16#3) -> {ok, 16#af, 16#28}; +dec_huffman_lookup(16#a6, 16#4) -> {more, 16#b4, 16#02}; +dec_huffman_lookup(16#a6, 16#5) -> {more, 16#b4, 16#09}; +dec_huffman_lookup(16#a6, 16#6) -> {more, 16#b4, 16#17}; +dec_huffman_lookup(16#a6, 16#7) -> {ok, 16#b4, 16#28}; +dec_huffman_lookup(16#a6, 16#8) -> {more, 16#b6, 16#02}; +dec_huffman_lookup(16#a6, 16#9) -> {more, 16#b6, 16#09}; +dec_huffman_lookup(16#a6, 16#a) -> {more, 16#b6, 16#17}; +dec_huffman_lookup(16#a6, 16#b) -> {ok, 16#b6, 16#28}; +dec_huffman_lookup(16#a6, 16#c) -> {more, 16#b7, 16#02}; +dec_huffman_lookup(16#a6, 16#d) -> {more, 16#b7, 16#09}; +dec_huffman_lookup(16#a6, 16#e) -> {more, 16#b7, 16#17}; +dec_huffman_lookup(16#a6, 16#f) -> {ok, 16#b7, 16#28}; +dec_huffman_lookup(16#a7, 16#0) -> {more, 16#af, 16#03}; +dec_huffman_lookup(16#a7, 16#1) -> {more, 16#af, 16#06}; +dec_huffman_lookup(16#a7, 16#2) -> {more, 16#af, 16#0a}; +dec_huffman_lookup(16#a7, 16#3) -> {more, 16#af, 16#0f}; +dec_huffman_lookup(16#a7, 16#4) -> {more, 16#af, 16#18}; +dec_huffman_lookup(16#a7, 16#5) -> {more, 16#af, 16#1f}; +dec_huffman_lookup(16#a7, 16#6) -> {more, 16#af, 16#29}; +dec_huffman_lookup(16#a7, 16#7) -> {ok, 16#af, 16#38}; +dec_huffman_lookup(16#a7, 16#8) -> {more, 16#b4, 16#03}; +dec_huffman_lookup(16#a7, 16#9) -> {more, 16#b4, 16#06}; +dec_huffman_lookup(16#a7, 16#a) -> {more, 16#b4, 16#0a}; +dec_huffman_lookup(16#a7, 16#b) -> {more, 16#b4, 16#0f}; +dec_huffman_lookup(16#a7, 16#c) -> {more, 16#b4, 16#18}; +dec_huffman_lookup(16#a7, 16#d) -> {more, 16#b4, 16#1f}; +dec_huffman_lookup(16#a7, 16#e) -> {more, 16#b4, 16#29}; +dec_huffman_lookup(16#a7, 16#f) -> {ok, 16#b4, 16#38}; +dec_huffman_lookup(16#a8, 16#0) -> {more, 16#b6, 16#03}; +dec_huffman_lookup(16#a8, 16#1) -> {more, 16#b6, 16#06}; +dec_huffman_lookup(16#a8, 16#2) -> {more, 16#b6, 16#0a}; +dec_huffman_lookup(16#a8, 16#3) -> {more, 16#b6, 16#0f}; +dec_huffman_lookup(16#a8, 16#4) -> {more, 16#b6, 16#18}; +dec_huffman_lookup(16#a8, 16#5) -> {more, 16#b6, 16#1f}; +dec_huffman_lookup(16#a8, 16#6) -> {more, 16#b6, 16#29}; +dec_huffman_lookup(16#a8, 16#7) -> {ok, 16#b6, 16#38}; +dec_huffman_lookup(16#a8, 16#8) -> {more, 16#b7, 16#03}; +dec_huffman_lookup(16#a8, 16#9) -> {more, 16#b7, 16#06}; +dec_huffman_lookup(16#a8, 16#a) -> {more, 16#b7, 16#0a}; +dec_huffman_lookup(16#a8, 16#b) -> {more, 16#b7, 16#0f}; +dec_huffman_lookup(16#a8, 16#c) -> {more, 16#b7, 16#18}; +dec_huffman_lookup(16#a8, 16#d) -> {more, 16#b7, 16#1f}; +dec_huffman_lookup(16#a8, 16#e) -> {more, 16#b7, 16#29}; +dec_huffman_lookup(16#a8, 16#f) -> {ok, 16#b7, 16#38}; +dec_huffman_lookup(16#a9, 16#0) -> {ok, 16#bc, 16#00}; +dec_huffman_lookup(16#a9, 16#1) -> {ok, 16#bf, 16#00}; +dec_huffman_lookup(16#a9, 16#2) -> {ok, 16#c5, 16#00}; +dec_huffman_lookup(16#a9, 16#3) -> {ok, 16#e7, 16#00}; +dec_huffman_lookup(16#a9, 16#4) -> {ok, 16#ef, 16#00}; +dec_huffman_lookup(16#a9, 16#5) -> {more, undefined, 16#b0}; +dec_huffman_lookup(16#a9, 16#6) -> {more, undefined, 16#b2}; +dec_huffman_lookup(16#a9, 16#7) -> {more, undefined, 16#b3}; +dec_huffman_lookup(16#a9, 16#8) -> {more, undefined, 16#b7}; +dec_huffman_lookup(16#a9, 16#9) -> {more, undefined, 16#b8}; +dec_huffman_lookup(16#a9, 16#a) -> {more, undefined, 16#ba}; +dec_huffman_lookup(16#a9, 16#b) -> {more, undefined, 16#bb}; +dec_huffman_lookup(16#a9, 16#c) -> {more, undefined, 16#c0}; +dec_huffman_lookup(16#a9, 16#d) -> {more, undefined, 16#c7}; +dec_huffman_lookup(16#a9, 16#e) -> {more, undefined, 16#d0}; +dec_huffman_lookup(16#a9, 16#f) -> {ok, undefined, 16#df}; +dec_huffman_lookup(16#aa, 16#0) -> {more, 16#bc, 16#01}; +dec_huffman_lookup(16#aa, 16#1) -> {ok, 16#bc, 16#16}; +dec_huffman_lookup(16#aa, 16#2) -> {more, 16#bf, 16#01}; +dec_huffman_lookup(16#aa, 16#3) -> {ok, 16#bf, 16#16}; +dec_huffman_lookup(16#aa, 16#4) -> {more, 16#c5, 16#01}; +dec_huffman_lookup(16#aa, 16#5) -> {ok, 16#c5, 16#16}; +dec_huffman_lookup(16#aa, 16#6) -> {more, 16#e7, 16#01}; +dec_huffman_lookup(16#aa, 16#7) -> {ok, 16#e7, 16#16}; +dec_huffman_lookup(16#aa, 16#8) -> {more, 16#ef, 16#01}; +dec_huffman_lookup(16#aa, 16#9) -> {ok, 16#ef, 16#16}; +dec_huffman_lookup(16#aa, 16#a) -> {ok, 16#09, 16#00}; +dec_huffman_lookup(16#aa, 16#b) -> {ok, 16#8e, 16#00}; +dec_huffman_lookup(16#aa, 16#c) -> {ok, 16#90, 16#00}; +dec_huffman_lookup(16#aa, 16#d) -> {ok, 16#91, 16#00}; +dec_huffman_lookup(16#aa, 16#e) -> {ok, 16#94, 16#00}; +dec_huffman_lookup(16#aa, 16#f) -> {ok, 16#9f, 16#00}; +dec_huffman_lookup(16#ab, 16#0) -> {more, 16#bc, 16#02}; +dec_huffman_lookup(16#ab, 16#1) -> {more, 16#bc, 16#09}; +dec_huffman_lookup(16#ab, 16#2) -> {more, 16#bc, 16#17}; +dec_huffman_lookup(16#ab, 16#3) -> {ok, 16#bc, 16#28}; +dec_huffman_lookup(16#ab, 16#4) -> {more, 16#bf, 16#02}; +dec_huffman_lookup(16#ab, 16#5) -> {more, 16#bf, 16#09}; +dec_huffman_lookup(16#ab, 16#6) -> {more, 16#bf, 16#17}; +dec_huffman_lookup(16#ab, 16#7) -> {ok, 16#bf, 16#28}; +dec_huffman_lookup(16#ab, 16#8) -> {more, 16#c5, 16#02}; +dec_huffman_lookup(16#ab, 16#9) -> {more, 16#c5, 16#09}; +dec_huffman_lookup(16#ab, 16#a) -> {more, 16#c5, 16#17}; +dec_huffman_lookup(16#ab, 16#b) -> {ok, 16#c5, 16#28}; +dec_huffman_lookup(16#ab, 16#c) -> {more, 16#e7, 16#02}; +dec_huffman_lookup(16#ab, 16#d) -> {more, 16#e7, 16#09}; +dec_huffman_lookup(16#ab, 16#e) -> {more, 16#e7, 16#17}; +dec_huffman_lookup(16#ab, 16#f) -> {ok, 16#e7, 16#28}; +dec_huffman_lookup(16#ac, 16#0) -> {more, 16#bc, 16#03}; +dec_huffman_lookup(16#ac, 16#1) -> {more, 16#bc, 16#06}; +dec_huffman_lookup(16#ac, 16#2) -> {more, 16#bc, 16#0a}; +dec_huffman_lookup(16#ac, 16#3) -> {more, 16#bc, 16#0f}; +dec_huffman_lookup(16#ac, 16#4) -> {more, 16#bc, 16#18}; +dec_huffman_lookup(16#ac, 16#5) -> {more, 16#bc, 16#1f}; +dec_huffman_lookup(16#ac, 16#6) -> {more, 16#bc, 16#29}; +dec_huffman_lookup(16#ac, 16#7) -> {ok, 16#bc, 16#38}; +dec_huffman_lookup(16#ac, 16#8) -> {more, 16#bf, 16#03}; +dec_huffman_lookup(16#ac, 16#9) -> {more, 16#bf, 16#06}; +dec_huffman_lookup(16#ac, 16#a) -> {more, 16#bf, 16#0a}; +dec_huffman_lookup(16#ac, 16#b) -> {more, 16#bf, 16#0f}; +dec_huffman_lookup(16#ac, 16#c) -> {more, 16#bf, 16#18}; +dec_huffman_lookup(16#ac, 16#d) -> {more, 16#bf, 16#1f}; +dec_huffman_lookup(16#ac, 16#e) -> {more, 16#bf, 16#29}; +dec_huffman_lookup(16#ac, 16#f) -> {ok, 16#bf, 16#38}; +dec_huffman_lookup(16#ad, 16#0) -> {more, 16#c5, 16#03}; +dec_huffman_lookup(16#ad, 16#1) -> {more, 16#c5, 16#06}; +dec_huffman_lookup(16#ad, 16#2) -> {more, 16#c5, 16#0a}; +dec_huffman_lookup(16#ad, 16#3) -> {more, 16#c5, 16#0f}; +dec_huffman_lookup(16#ad, 16#4) -> {more, 16#c5, 16#18}; +dec_huffman_lookup(16#ad, 16#5) -> {more, 16#c5, 16#1f}; +dec_huffman_lookup(16#ad, 16#6) -> {more, 16#c5, 16#29}; +dec_huffman_lookup(16#ad, 16#7) -> {ok, 16#c5, 16#38}; +dec_huffman_lookup(16#ad, 16#8) -> {more, 16#e7, 16#03}; +dec_huffman_lookup(16#ad, 16#9) -> {more, 16#e7, 16#06}; +dec_huffman_lookup(16#ad, 16#a) -> {more, 16#e7, 16#0a}; +dec_huffman_lookup(16#ad, 16#b) -> {more, 16#e7, 16#0f}; +dec_huffman_lookup(16#ad, 16#c) -> {more, 16#e7, 16#18}; +dec_huffman_lookup(16#ad, 16#d) -> {more, 16#e7, 16#1f}; +dec_huffman_lookup(16#ad, 16#e) -> {more, 16#e7, 16#29}; +dec_huffman_lookup(16#ad, 16#f) -> {ok, 16#e7, 16#38}; +dec_huffman_lookup(16#ae, 16#0) -> {more, 16#ef, 16#02}; +dec_huffman_lookup(16#ae, 16#1) -> {more, 16#ef, 16#09}; +dec_huffman_lookup(16#ae, 16#2) -> {more, 16#ef, 16#17}; +dec_huffman_lookup(16#ae, 16#3) -> {ok, 16#ef, 16#28}; +dec_huffman_lookup(16#ae, 16#4) -> {more, 16#09, 16#01}; +dec_huffman_lookup(16#ae, 16#5) -> {ok, 16#09, 16#16}; +dec_huffman_lookup(16#ae, 16#6) -> {more, 16#8e, 16#01}; +dec_huffman_lookup(16#ae, 16#7) -> {ok, 16#8e, 16#16}; +dec_huffman_lookup(16#ae, 16#8) -> {more, 16#90, 16#01}; +dec_huffman_lookup(16#ae, 16#9) -> {ok, 16#90, 16#16}; +dec_huffman_lookup(16#ae, 16#a) -> {more, 16#91, 16#01}; +dec_huffman_lookup(16#ae, 16#b) -> {ok, 16#91, 16#16}; +dec_huffman_lookup(16#ae, 16#c) -> {more, 16#94, 16#01}; +dec_huffman_lookup(16#ae, 16#d) -> {ok, 16#94, 16#16}; +dec_huffman_lookup(16#ae, 16#e) -> {more, 16#9f, 16#01}; +dec_huffman_lookup(16#ae, 16#f) -> {ok, 16#9f, 16#16}; +dec_huffman_lookup(16#af, 16#0) -> {more, 16#ef, 16#03}; +dec_huffman_lookup(16#af, 16#1) -> {more, 16#ef, 16#06}; +dec_huffman_lookup(16#af, 16#2) -> {more, 16#ef, 16#0a}; +dec_huffman_lookup(16#af, 16#3) -> {more, 16#ef, 16#0f}; +dec_huffman_lookup(16#af, 16#4) -> {more, 16#ef, 16#18}; +dec_huffman_lookup(16#af, 16#5) -> {more, 16#ef, 16#1f}; +dec_huffman_lookup(16#af, 16#6) -> {more, 16#ef, 16#29}; +dec_huffman_lookup(16#af, 16#7) -> {ok, 16#ef, 16#38}; +dec_huffman_lookup(16#af, 16#8) -> {more, 16#09, 16#02}; +dec_huffman_lookup(16#af, 16#9) -> {more, 16#09, 16#09}; +dec_huffman_lookup(16#af, 16#a) -> {more, 16#09, 16#17}; +dec_huffman_lookup(16#af, 16#b) -> {ok, 16#09, 16#28}; +dec_huffman_lookup(16#af, 16#c) -> {more, 16#8e, 16#02}; +dec_huffman_lookup(16#af, 16#d) -> {more, 16#8e, 16#09}; +dec_huffman_lookup(16#af, 16#e) -> {more, 16#8e, 16#17}; +dec_huffman_lookup(16#af, 16#f) -> {ok, 16#8e, 16#28}; +dec_huffman_lookup(16#b0, 16#0) -> {more, 16#09, 16#03}; +dec_huffman_lookup(16#b0, 16#1) -> {more, 16#09, 16#06}; +dec_huffman_lookup(16#b0, 16#2) -> {more, 16#09, 16#0a}; +dec_huffman_lookup(16#b0, 16#3) -> {more, 16#09, 16#0f}; +dec_huffman_lookup(16#b0, 16#4) -> {more, 16#09, 16#18}; +dec_huffman_lookup(16#b0, 16#5) -> {more, 16#09, 16#1f}; +dec_huffman_lookup(16#b0, 16#6) -> {more, 16#09, 16#29}; +dec_huffman_lookup(16#b0, 16#7) -> {ok, 16#09, 16#38}; +dec_huffman_lookup(16#b0, 16#8) -> {more, 16#8e, 16#03}; +dec_huffman_lookup(16#b0, 16#9) -> {more, 16#8e, 16#06}; +dec_huffman_lookup(16#b0, 16#a) -> {more, 16#8e, 16#0a}; +dec_huffman_lookup(16#b0, 16#b) -> {more, 16#8e, 16#0f}; +dec_huffman_lookup(16#b0, 16#c) -> {more, 16#8e, 16#18}; +dec_huffman_lookup(16#b0, 16#d) -> {more, 16#8e, 16#1f}; +dec_huffman_lookup(16#b0, 16#e) -> {more, 16#8e, 16#29}; +dec_huffman_lookup(16#b0, 16#f) -> {ok, 16#8e, 16#38}; +dec_huffman_lookup(16#b1, 16#0) -> {more, 16#90, 16#02}; +dec_huffman_lookup(16#b1, 16#1) -> {more, 16#90, 16#09}; +dec_huffman_lookup(16#b1, 16#2) -> {more, 16#90, 16#17}; +dec_huffman_lookup(16#b1, 16#3) -> {ok, 16#90, 16#28}; +dec_huffman_lookup(16#b1, 16#4) -> {more, 16#91, 16#02}; +dec_huffman_lookup(16#b1, 16#5) -> {more, 16#91, 16#09}; +dec_huffman_lookup(16#b1, 16#6) -> {more, 16#91, 16#17}; +dec_huffman_lookup(16#b1, 16#7) -> {ok, 16#91, 16#28}; +dec_huffman_lookup(16#b1, 16#8) -> {more, 16#94, 16#02}; +dec_huffman_lookup(16#b1, 16#9) -> {more, 16#94, 16#09}; +dec_huffman_lookup(16#b1, 16#a) -> {more, 16#94, 16#17}; +dec_huffman_lookup(16#b1, 16#b) -> {ok, 16#94, 16#28}; +dec_huffman_lookup(16#b1, 16#c) -> {more, 16#9f, 16#02}; +dec_huffman_lookup(16#b1, 16#d) -> {more, 16#9f, 16#09}; +dec_huffman_lookup(16#b1, 16#e) -> {more, 16#9f, 16#17}; +dec_huffman_lookup(16#b1, 16#f) -> {ok, 16#9f, 16#28}; +dec_huffman_lookup(16#b2, 16#0) -> {more, 16#90, 16#03}; +dec_huffman_lookup(16#b2, 16#1) -> {more, 16#90, 16#06}; +dec_huffman_lookup(16#b2, 16#2) -> {more, 16#90, 16#0a}; +dec_huffman_lookup(16#b2, 16#3) -> {more, 16#90, 16#0f}; +dec_huffman_lookup(16#b2, 16#4) -> {more, 16#90, 16#18}; +dec_huffman_lookup(16#b2, 16#5) -> {more, 16#90, 16#1f}; +dec_huffman_lookup(16#b2, 16#6) -> {more, 16#90, 16#29}; +dec_huffman_lookup(16#b2, 16#7) -> {ok, 16#90, 16#38}; +dec_huffman_lookup(16#b2, 16#8) -> {more, 16#91, 16#03}; +dec_huffman_lookup(16#b2, 16#9) -> {more, 16#91, 16#06}; +dec_huffman_lookup(16#b2, 16#a) -> {more, 16#91, 16#0a}; +dec_huffman_lookup(16#b2, 16#b) -> {more, 16#91, 16#0f}; +dec_huffman_lookup(16#b2, 16#c) -> {more, 16#91, 16#18}; +dec_huffman_lookup(16#b2, 16#d) -> {more, 16#91, 16#1f}; +dec_huffman_lookup(16#b2, 16#e) -> {more, 16#91, 16#29}; +dec_huffman_lookup(16#b2, 16#f) -> {ok, 16#91, 16#38}; +dec_huffman_lookup(16#b3, 16#0) -> {more, 16#94, 16#03}; +dec_huffman_lookup(16#b3, 16#1) -> {more, 16#94, 16#06}; +dec_huffman_lookup(16#b3, 16#2) -> {more, 16#94, 16#0a}; +dec_huffman_lookup(16#b3, 16#3) -> {more, 16#94, 16#0f}; +dec_huffman_lookup(16#b3, 16#4) -> {more, 16#94, 16#18}; +dec_huffman_lookup(16#b3, 16#5) -> {more, 16#94, 16#1f}; +dec_huffman_lookup(16#b3, 16#6) -> {more, 16#94, 16#29}; +dec_huffman_lookup(16#b3, 16#7) -> {ok, 16#94, 16#38}; +dec_huffman_lookup(16#b3, 16#8) -> {more, 16#9f, 16#03}; +dec_huffman_lookup(16#b3, 16#9) -> {more, 16#9f, 16#06}; +dec_huffman_lookup(16#b3, 16#a) -> {more, 16#9f, 16#0a}; +dec_huffman_lookup(16#b3, 16#b) -> {more, 16#9f, 16#0f}; +dec_huffman_lookup(16#b3, 16#c) -> {more, 16#9f, 16#18}; +dec_huffman_lookup(16#b3, 16#d) -> {more, 16#9f, 16#1f}; +dec_huffman_lookup(16#b3, 16#e) -> {more, 16#9f, 16#29}; +dec_huffman_lookup(16#b3, 16#f) -> {ok, 16#9f, 16#38}; +dec_huffman_lookup(16#b4, 16#0) -> {ok, 16#ab, 16#00}; +dec_huffman_lookup(16#b4, 16#1) -> {ok, 16#ce, 16#00}; +dec_huffman_lookup(16#b4, 16#2) -> {ok, 16#d7, 16#00}; +dec_huffman_lookup(16#b4, 16#3) -> {ok, 16#e1, 16#00}; +dec_huffman_lookup(16#b4, 16#4) -> {ok, 16#ec, 16#00}; +dec_huffman_lookup(16#b4, 16#5) -> {ok, 16#ed, 16#00}; +dec_huffman_lookup(16#b4, 16#6) -> {more, undefined, 16#bc}; +dec_huffman_lookup(16#b4, 16#7) -> {more, undefined, 16#bd}; +dec_huffman_lookup(16#b4, 16#8) -> {more, undefined, 16#c1}; +dec_huffman_lookup(16#b4, 16#9) -> {more, undefined, 16#c4}; +dec_huffman_lookup(16#b4, 16#a) -> {more, undefined, 16#c8}; +dec_huffman_lookup(16#b4, 16#b) -> {more, undefined, 16#cb}; +dec_huffman_lookup(16#b4, 16#c) -> {more, undefined, 16#d1}; +dec_huffman_lookup(16#b4, 16#d) -> {more, undefined, 16#d8}; +dec_huffman_lookup(16#b4, 16#e) -> {more, undefined, 16#e0}; +dec_huffman_lookup(16#b4, 16#f) -> {ok, undefined, 16#ee}; +dec_huffman_lookup(16#b5, 16#0) -> {more, 16#ab, 16#01}; +dec_huffman_lookup(16#b5, 16#1) -> {ok, 16#ab, 16#16}; +dec_huffman_lookup(16#b5, 16#2) -> {more, 16#ce, 16#01}; +dec_huffman_lookup(16#b5, 16#3) -> {ok, 16#ce, 16#16}; +dec_huffman_lookup(16#b5, 16#4) -> {more, 16#d7, 16#01}; +dec_huffman_lookup(16#b5, 16#5) -> {ok, 16#d7, 16#16}; +dec_huffman_lookup(16#b5, 16#6) -> {more, 16#e1, 16#01}; +dec_huffman_lookup(16#b5, 16#7) -> {ok, 16#e1, 16#16}; +dec_huffman_lookup(16#b5, 16#8) -> {more, 16#ec, 16#01}; +dec_huffman_lookup(16#b5, 16#9) -> {ok, 16#ec, 16#16}; +dec_huffman_lookup(16#b5, 16#a) -> {more, 16#ed, 16#01}; +dec_huffman_lookup(16#b5, 16#b) -> {ok, 16#ed, 16#16}; +dec_huffman_lookup(16#b5, 16#c) -> {ok, 16#c7, 16#00}; +dec_huffman_lookup(16#b5, 16#d) -> {ok, 16#cf, 16#00}; +dec_huffman_lookup(16#b5, 16#e) -> {ok, 16#ea, 16#00}; +dec_huffman_lookup(16#b5, 16#f) -> {ok, 16#eb, 16#00}; +dec_huffman_lookup(16#b6, 16#0) -> {more, 16#ab, 16#02}; +dec_huffman_lookup(16#b6, 16#1) -> {more, 16#ab, 16#09}; +dec_huffman_lookup(16#b6, 16#2) -> {more, 16#ab, 16#17}; +dec_huffman_lookup(16#b6, 16#3) -> {ok, 16#ab, 16#28}; +dec_huffman_lookup(16#b6, 16#4) -> {more, 16#ce, 16#02}; +dec_huffman_lookup(16#b6, 16#5) -> {more, 16#ce, 16#09}; +dec_huffman_lookup(16#b6, 16#6) -> {more, 16#ce, 16#17}; +dec_huffman_lookup(16#b6, 16#7) -> {ok, 16#ce, 16#28}; +dec_huffman_lookup(16#b6, 16#8) -> {more, 16#d7, 16#02}; +dec_huffman_lookup(16#b6, 16#9) -> {more, 16#d7, 16#09}; +dec_huffman_lookup(16#b6, 16#a) -> {more, 16#d7, 16#17}; +dec_huffman_lookup(16#b6, 16#b) -> {ok, 16#d7, 16#28}; +dec_huffman_lookup(16#b6, 16#c) -> {more, 16#e1, 16#02}; +dec_huffman_lookup(16#b6, 16#d) -> {more, 16#e1, 16#09}; +dec_huffman_lookup(16#b6, 16#e) -> {more, 16#e1, 16#17}; +dec_huffman_lookup(16#b6, 16#f) -> {ok, 16#e1, 16#28}; +dec_huffman_lookup(16#b7, 16#0) -> {more, 16#ab, 16#03}; +dec_huffman_lookup(16#b7, 16#1) -> {more, 16#ab, 16#06}; +dec_huffman_lookup(16#b7, 16#2) -> {more, 16#ab, 16#0a}; +dec_huffman_lookup(16#b7, 16#3) -> {more, 16#ab, 16#0f}; +dec_huffman_lookup(16#b7, 16#4) -> {more, 16#ab, 16#18}; +dec_huffman_lookup(16#b7, 16#5) -> {more, 16#ab, 16#1f}; +dec_huffman_lookup(16#b7, 16#6) -> {more, 16#ab, 16#29}; +dec_huffman_lookup(16#b7, 16#7) -> {ok, 16#ab, 16#38}; +dec_huffman_lookup(16#b7, 16#8) -> {more, 16#ce, 16#03}; +dec_huffman_lookup(16#b7, 16#9) -> {more, 16#ce, 16#06}; +dec_huffman_lookup(16#b7, 16#a) -> {more, 16#ce, 16#0a}; +dec_huffman_lookup(16#b7, 16#b) -> {more, 16#ce, 16#0f}; +dec_huffman_lookup(16#b7, 16#c) -> {more, 16#ce, 16#18}; +dec_huffman_lookup(16#b7, 16#d) -> {more, 16#ce, 16#1f}; +dec_huffman_lookup(16#b7, 16#e) -> {more, 16#ce, 16#29}; +dec_huffman_lookup(16#b7, 16#f) -> {ok, 16#ce, 16#38}; +dec_huffman_lookup(16#b8, 16#0) -> {more, 16#d7, 16#03}; +dec_huffman_lookup(16#b8, 16#1) -> {more, 16#d7, 16#06}; +dec_huffman_lookup(16#b8, 16#2) -> {more, 16#d7, 16#0a}; +dec_huffman_lookup(16#b8, 16#3) -> {more, 16#d7, 16#0f}; +dec_huffman_lookup(16#b8, 16#4) -> {more, 16#d7, 16#18}; +dec_huffman_lookup(16#b8, 16#5) -> {more, 16#d7, 16#1f}; +dec_huffman_lookup(16#b8, 16#6) -> {more, 16#d7, 16#29}; +dec_huffman_lookup(16#b8, 16#7) -> {ok, 16#d7, 16#38}; +dec_huffman_lookup(16#b8, 16#8) -> {more, 16#e1, 16#03}; +dec_huffman_lookup(16#b8, 16#9) -> {more, 16#e1, 16#06}; +dec_huffman_lookup(16#b8, 16#a) -> {more, 16#e1, 16#0a}; +dec_huffman_lookup(16#b8, 16#b) -> {more, 16#e1, 16#0f}; +dec_huffman_lookup(16#b8, 16#c) -> {more, 16#e1, 16#18}; +dec_huffman_lookup(16#b8, 16#d) -> {more, 16#e1, 16#1f}; +dec_huffman_lookup(16#b8, 16#e) -> {more, 16#e1, 16#29}; +dec_huffman_lookup(16#b8, 16#f) -> {ok, 16#e1, 16#38}; +dec_huffman_lookup(16#b9, 16#0) -> {more, 16#ec, 16#02}; +dec_huffman_lookup(16#b9, 16#1) -> {more, 16#ec, 16#09}; +dec_huffman_lookup(16#b9, 16#2) -> {more, 16#ec, 16#17}; +dec_huffman_lookup(16#b9, 16#3) -> {ok, 16#ec, 16#28}; +dec_huffman_lookup(16#b9, 16#4) -> {more, 16#ed, 16#02}; +dec_huffman_lookup(16#b9, 16#5) -> {more, 16#ed, 16#09}; +dec_huffman_lookup(16#b9, 16#6) -> {more, 16#ed, 16#17}; +dec_huffman_lookup(16#b9, 16#7) -> {ok, 16#ed, 16#28}; +dec_huffman_lookup(16#b9, 16#8) -> {more, 16#c7, 16#01}; +dec_huffman_lookup(16#b9, 16#9) -> {ok, 16#c7, 16#16}; +dec_huffman_lookup(16#b9, 16#a) -> {more, 16#cf, 16#01}; +dec_huffman_lookup(16#b9, 16#b) -> {ok, 16#cf, 16#16}; +dec_huffman_lookup(16#b9, 16#c) -> {more, 16#ea, 16#01}; +dec_huffman_lookup(16#b9, 16#d) -> {ok, 16#ea, 16#16}; +dec_huffman_lookup(16#b9, 16#e) -> {more, 16#eb, 16#01}; +dec_huffman_lookup(16#b9, 16#f) -> {ok, 16#eb, 16#16}; +dec_huffman_lookup(16#ba, 16#0) -> {more, 16#ec, 16#03}; +dec_huffman_lookup(16#ba, 16#1) -> {more, 16#ec, 16#06}; +dec_huffman_lookup(16#ba, 16#2) -> {more, 16#ec, 16#0a}; +dec_huffman_lookup(16#ba, 16#3) -> {more, 16#ec, 16#0f}; +dec_huffman_lookup(16#ba, 16#4) -> {more, 16#ec, 16#18}; +dec_huffman_lookup(16#ba, 16#5) -> {more, 16#ec, 16#1f}; +dec_huffman_lookup(16#ba, 16#6) -> {more, 16#ec, 16#29}; +dec_huffman_lookup(16#ba, 16#7) -> {ok, 16#ec, 16#38}; +dec_huffman_lookup(16#ba, 16#8) -> {more, 16#ed, 16#03}; +dec_huffman_lookup(16#ba, 16#9) -> {more, 16#ed, 16#06}; +dec_huffman_lookup(16#ba, 16#a) -> {more, 16#ed, 16#0a}; +dec_huffman_lookup(16#ba, 16#b) -> {more, 16#ed, 16#0f}; +dec_huffman_lookup(16#ba, 16#c) -> {more, 16#ed, 16#18}; +dec_huffman_lookup(16#ba, 16#d) -> {more, 16#ed, 16#1f}; +dec_huffman_lookup(16#ba, 16#e) -> {more, 16#ed, 16#29}; +dec_huffman_lookup(16#ba, 16#f) -> {ok, 16#ed, 16#38}; +dec_huffman_lookup(16#bb, 16#0) -> {more, 16#c7, 16#02}; +dec_huffman_lookup(16#bb, 16#1) -> {more, 16#c7, 16#09}; +dec_huffman_lookup(16#bb, 16#2) -> {more, 16#c7, 16#17}; +dec_huffman_lookup(16#bb, 16#3) -> {ok, 16#c7, 16#28}; +dec_huffman_lookup(16#bb, 16#4) -> {more, 16#cf, 16#02}; +dec_huffman_lookup(16#bb, 16#5) -> {more, 16#cf, 16#09}; +dec_huffman_lookup(16#bb, 16#6) -> {more, 16#cf, 16#17}; +dec_huffman_lookup(16#bb, 16#7) -> {ok, 16#cf, 16#28}; +dec_huffman_lookup(16#bb, 16#8) -> {more, 16#ea, 16#02}; +dec_huffman_lookup(16#bb, 16#9) -> {more, 16#ea, 16#09}; +dec_huffman_lookup(16#bb, 16#a) -> {more, 16#ea, 16#17}; +dec_huffman_lookup(16#bb, 16#b) -> {ok, 16#ea, 16#28}; +dec_huffman_lookup(16#bb, 16#c) -> {more, 16#eb, 16#02}; +dec_huffman_lookup(16#bb, 16#d) -> {more, 16#eb, 16#09}; +dec_huffman_lookup(16#bb, 16#e) -> {more, 16#eb, 16#17}; +dec_huffman_lookup(16#bb, 16#f) -> {ok, 16#eb, 16#28}; +dec_huffman_lookup(16#bc, 16#0) -> {more, 16#c7, 16#03}; +dec_huffman_lookup(16#bc, 16#1) -> {more, 16#c7, 16#06}; +dec_huffman_lookup(16#bc, 16#2) -> {more, 16#c7, 16#0a}; +dec_huffman_lookup(16#bc, 16#3) -> {more, 16#c7, 16#0f}; +dec_huffman_lookup(16#bc, 16#4) -> {more, 16#c7, 16#18}; +dec_huffman_lookup(16#bc, 16#5) -> {more, 16#c7, 16#1f}; +dec_huffman_lookup(16#bc, 16#6) -> {more, 16#c7, 16#29}; +dec_huffman_lookup(16#bc, 16#7) -> {ok, 16#c7, 16#38}; +dec_huffman_lookup(16#bc, 16#8) -> {more, 16#cf, 16#03}; +dec_huffman_lookup(16#bc, 16#9) -> {more, 16#cf, 16#06}; +dec_huffman_lookup(16#bc, 16#a) -> {more, 16#cf, 16#0a}; +dec_huffman_lookup(16#bc, 16#b) -> {more, 16#cf, 16#0f}; +dec_huffman_lookup(16#bc, 16#c) -> {more, 16#cf, 16#18}; +dec_huffman_lookup(16#bc, 16#d) -> {more, 16#cf, 16#1f}; +dec_huffman_lookup(16#bc, 16#e) -> {more, 16#cf, 16#29}; +dec_huffman_lookup(16#bc, 16#f) -> {ok, 16#cf, 16#38}; +dec_huffman_lookup(16#bd, 16#0) -> {more, 16#ea, 16#03}; +dec_huffman_lookup(16#bd, 16#1) -> {more, 16#ea, 16#06}; +dec_huffman_lookup(16#bd, 16#2) -> {more, 16#ea, 16#0a}; +dec_huffman_lookup(16#bd, 16#3) -> {more, 16#ea, 16#0f}; +dec_huffman_lookup(16#bd, 16#4) -> {more, 16#ea, 16#18}; +dec_huffman_lookup(16#bd, 16#5) -> {more, 16#ea, 16#1f}; +dec_huffman_lookup(16#bd, 16#6) -> {more, 16#ea, 16#29}; +dec_huffman_lookup(16#bd, 16#7) -> {ok, 16#ea, 16#38}; +dec_huffman_lookup(16#bd, 16#8) -> {more, 16#eb, 16#03}; +dec_huffman_lookup(16#bd, 16#9) -> {more, 16#eb, 16#06}; +dec_huffman_lookup(16#bd, 16#a) -> {more, 16#eb, 16#0a}; +dec_huffman_lookup(16#bd, 16#b) -> {more, 16#eb, 16#0f}; +dec_huffman_lookup(16#bd, 16#c) -> {more, 16#eb, 16#18}; +dec_huffman_lookup(16#bd, 16#d) -> {more, 16#eb, 16#1f}; +dec_huffman_lookup(16#bd, 16#e) -> {more, 16#eb, 16#29}; +dec_huffman_lookup(16#bd, 16#f) -> {ok, 16#eb, 16#38}; +dec_huffman_lookup(16#be, 16#0) -> {more, undefined, 16#c2}; +dec_huffman_lookup(16#be, 16#1) -> {more, undefined, 16#c3}; +dec_huffman_lookup(16#be, 16#2) -> {more, undefined, 16#c5}; +dec_huffman_lookup(16#be, 16#3) -> {more, undefined, 16#c6}; +dec_huffman_lookup(16#be, 16#4) -> {more, undefined, 16#c9}; +dec_huffman_lookup(16#be, 16#5) -> {more, undefined, 16#ca}; +dec_huffman_lookup(16#be, 16#6) -> {more, undefined, 16#cc}; +dec_huffman_lookup(16#be, 16#7) -> {more, undefined, 16#cd}; +dec_huffman_lookup(16#be, 16#8) -> {more, undefined, 16#d2}; +dec_huffman_lookup(16#be, 16#9) -> {more, undefined, 16#d5}; +dec_huffman_lookup(16#be, 16#a) -> {more, undefined, 16#d9}; +dec_huffman_lookup(16#be, 16#b) -> {more, undefined, 16#dc}; +dec_huffman_lookup(16#be, 16#c) -> {more, undefined, 16#e1}; +dec_huffman_lookup(16#be, 16#d) -> {more, undefined, 16#e7}; +dec_huffman_lookup(16#be, 16#e) -> {more, undefined, 16#ef}; +dec_huffman_lookup(16#be, 16#f) -> {ok, undefined, 16#f6}; +dec_huffman_lookup(16#bf, 16#0) -> {ok, 16#c0, 16#00}; +dec_huffman_lookup(16#bf, 16#1) -> {ok, 16#c1, 16#00}; +dec_huffman_lookup(16#bf, 16#2) -> {ok, 16#c8, 16#00}; +dec_huffman_lookup(16#bf, 16#3) -> {ok, 16#c9, 16#00}; +dec_huffman_lookup(16#bf, 16#4) -> {ok, 16#ca, 16#00}; +dec_huffman_lookup(16#bf, 16#5) -> {ok, 16#cd, 16#00}; +dec_huffman_lookup(16#bf, 16#6) -> {ok, 16#d2, 16#00}; +dec_huffman_lookup(16#bf, 16#7) -> {ok, 16#d5, 16#00}; +dec_huffman_lookup(16#bf, 16#8) -> {ok, 16#da, 16#00}; +dec_huffman_lookup(16#bf, 16#9) -> {ok, 16#db, 16#00}; +dec_huffman_lookup(16#bf, 16#a) -> {ok, 16#ee, 16#00}; +dec_huffman_lookup(16#bf, 16#b) -> {ok, 16#f0, 16#00}; +dec_huffman_lookup(16#bf, 16#c) -> {ok, 16#f2, 16#00}; +dec_huffman_lookup(16#bf, 16#d) -> {ok, 16#f3, 16#00}; +dec_huffman_lookup(16#bf, 16#e) -> {ok, 16#ff, 16#00}; +dec_huffman_lookup(16#bf, 16#f) -> {more, undefined, 16#ce}; +dec_huffman_lookup(16#c0, 16#0) -> {more, 16#c0, 16#01}; +dec_huffman_lookup(16#c0, 16#1) -> {ok, 16#c0, 16#16}; +dec_huffman_lookup(16#c0, 16#2) -> {more, 16#c1, 16#01}; +dec_huffman_lookup(16#c0, 16#3) -> {ok, 16#c1, 16#16}; +dec_huffman_lookup(16#c0, 16#4) -> {more, 16#c8, 16#01}; +dec_huffman_lookup(16#c0, 16#5) -> {ok, 16#c8, 16#16}; +dec_huffman_lookup(16#c0, 16#6) -> {more, 16#c9, 16#01}; +dec_huffman_lookup(16#c0, 16#7) -> {ok, 16#c9, 16#16}; +dec_huffman_lookup(16#c0, 16#8) -> {more, 16#ca, 16#01}; +dec_huffman_lookup(16#c0, 16#9) -> {ok, 16#ca, 16#16}; +dec_huffman_lookup(16#c0, 16#a) -> {more, 16#cd, 16#01}; +dec_huffman_lookup(16#c0, 16#b) -> {ok, 16#cd, 16#16}; +dec_huffman_lookup(16#c0, 16#c) -> {more, 16#d2, 16#01}; +dec_huffman_lookup(16#c0, 16#d) -> {ok, 16#d2, 16#16}; +dec_huffman_lookup(16#c0, 16#e) -> {more, 16#d5, 16#01}; +dec_huffman_lookup(16#c0, 16#f) -> {ok, 16#d5, 16#16}; +dec_huffman_lookup(16#c1, 16#0) -> {more, 16#c0, 16#02}; +dec_huffman_lookup(16#c1, 16#1) -> {more, 16#c0, 16#09}; +dec_huffman_lookup(16#c1, 16#2) -> {more, 16#c0, 16#17}; +dec_huffman_lookup(16#c1, 16#3) -> {ok, 16#c0, 16#28}; +dec_huffman_lookup(16#c1, 16#4) -> {more, 16#c1, 16#02}; +dec_huffman_lookup(16#c1, 16#5) -> {more, 16#c1, 16#09}; +dec_huffman_lookup(16#c1, 16#6) -> {more, 16#c1, 16#17}; +dec_huffman_lookup(16#c1, 16#7) -> {ok, 16#c1, 16#28}; +dec_huffman_lookup(16#c1, 16#8) -> {more, 16#c8, 16#02}; +dec_huffman_lookup(16#c1, 16#9) -> {more, 16#c8, 16#09}; +dec_huffman_lookup(16#c1, 16#a) -> {more, 16#c8, 16#17}; +dec_huffman_lookup(16#c1, 16#b) -> {ok, 16#c8, 16#28}; +dec_huffman_lookup(16#c1, 16#c) -> {more, 16#c9, 16#02}; +dec_huffman_lookup(16#c1, 16#d) -> {more, 16#c9, 16#09}; +dec_huffman_lookup(16#c1, 16#e) -> {more, 16#c9, 16#17}; +dec_huffman_lookup(16#c1, 16#f) -> {ok, 16#c9, 16#28}; +dec_huffman_lookup(16#c2, 16#0) -> {more, 16#c0, 16#03}; +dec_huffman_lookup(16#c2, 16#1) -> {more, 16#c0, 16#06}; +dec_huffman_lookup(16#c2, 16#2) -> {more, 16#c0, 16#0a}; +dec_huffman_lookup(16#c2, 16#3) -> {more, 16#c0, 16#0f}; +dec_huffman_lookup(16#c2, 16#4) -> {more, 16#c0, 16#18}; +dec_huffman_lookup(16#c2, 16#5) -> {more, 16#c0, 16#1f}; +dec_huffman_lookup(16#c2, 16#6) -> {more, 16#c0, 16#29}; +dec_huffman_lookup(16#c2, 16#7) -> {ok, 16#c0, 16#38}; +dec_huffman_lookup(16#c2, 16#8) -> {more, 16#c1, 16#03}; +dec_huffman_lookup(16#c2, 16#9) -> {more, 16#c1, 16#06}; +dec_huffman_lookup(16#c2, 16#a) -> {more, 16#c1, 16#0a}; +dec_huffman_lookup(16#c2, 16#b) -> {more, 16#c1, 16#0f}; +dec_huffman_lookup(16#c2, 16#c) -> {more, 16#c1, 16#18}; +dec_huffman_lookup(16#c2, 16#d) -> {more, 16#c1, 16#1f}; +dec_huffman_lookup(16#c2, 16#e) -> {more, 16#c1, 16#29}; +dec_huffman_lookup(16#c2, 16#f) -> {ok, 16#c1, 16#38}; +dec_huffman_lookup(16#c3, 16#0) -> {more, 16#c8, 16#03}; +dec_huffman_lookup(16#c3, 16#1) -> {more, 16#c8, 16#06}; +dec_huffman_lookup(16#c3, 16#2) -> {more, 16#c8, 16#0a}; +dec_huffman_lookup(16#c3, 16#3) -> {more, 16#c8, 16#0f}; +dec_huffman_lookup(16#c3, 16#4) -> {more, 16#c8, 16#18}; +dec_huffman_lookup(16#c3, 16#5) -> {more, 16#c8, 16#1f}; +dec_huffman_lookup(16#c3, 16#6) -> {more, 16#c8, 16#29}; +dec_huffman_lookup(16#c3, 16#7) -> {ok, 16#c8, 16#38}; +dec_huffman_lookup(16#c3, 16#8) -> {more, 16#c9, 16#03}; +dec_huffman_lookup(16#c3, 16#9) -> {more, 16#c9, 16#06}; +dec_huffman_lookup(16#c3, 16#a) -> {more, 16#c9, 16#0a}; +dec_huffman_lookup(16#c3, 16#b) -> {more, 16#c9, 16#0f}; +dec_huffman_lookup(16#c3, 16#c) -> {more, 16#c9, 16#18}; +dec_huffman_lookup(16#c3, 16#d) -> {more, 16#c9, 16#1f}; +dec_huffman_lookup(16#c3, 16#e) -> {more, 16#c9, 16#29}; +dec_huffman_lookup(16#c3, 16#f) -> {ok, 16#c9, 16#38}; +dec_huffman_lookup(16#c4, 16#0) -> {more, 16#ca, 16#02}; +dec_huffman_lookup(16#c4, 16#1) -> {more, 16#ca, 16#09}; +dec_huffman_lookup(16#c4, 16#2) -> {more, 16#ca, 16#17}; +dec_huffman_lookup(16#c4, 16#3) -> {ok, 16#ca, 16#28}; +dec_huffman_lookup(16#c4, 16#4) -> {more, 16#cd, 16#02}; +dec_huffman_lookup(16#c4, 16#5) -> {more, 16#cd, 16#09}; +dec_huffman_lookup(16#c4, 16#6) -> {more, 16#cd, 16#17}; +dec_huffman_lookup(16#c4, 16#7) -> {ok, 16#cd, 16#28}; +dec_huffman_lookup(16#c4, 16#8) -> {more, 16#d2, 16#02}; +dec_huffman_lookup(16#c4, 16#9) -> {more, 16#d2, 16#09}; +dec_huffman_lookup(16#c4, 16#a) -> {more, 16#d2, 16#17}; +dec_huffman_lookup(16#c4, 16#b) -> {ok, 16#d2, 16#28}; +dec_huffman_lookup(16#c4, 16#c) -> {more, 16#d5, 16#02}; +dec_huffman_lookup(16#c4, 16#d) -> {more, 16#d5, 16#09}; +dec_huffman_lookup(16#c4, 16#e) -> {more, 16#d5, 16#17}; +dec_huffman_lookup(16#c4, 16#f) -> {ok, 16#d5, 16#28}; +dec_huffman_lookup(16#c5, 16#0) -> {more, 16#ca, 16#03}; +dec_huffman_lookup(16#c5, 16#1) -> {more, 16#ca, 16#06}; +dec_huffman_lookup(16#c5, 16#2) -> {more, 16#ca, 16#0a}; +dec_huffman_lookup(16#c5, 16#3) -> {more, 16#ca, 16#0f}; +dec_huffman_lookup(16#c5, 16#4) -> {more, 16#ca, 16#18}; +dec_huffman_lookup(16#c5, 16#5) -> {more, 16#ca, 16#1f}; +dec_huffman_lookup(16#c5, 16#6) -> {more, 16#ca, 16#29}; +dec_huffman_lookup(16#c5, 16#7) -> {ok, 16#ca, 16#38}; +dec_huffman_lookup(16#c5, 16#8) -> {more, 16#cd, 16#03}; +dec_huffman_lookup(16#c5, 16#9) -> {more, 16#cd, 16#06}; +dec_huffman_lookup(16#c5, 16#a) -> {more, 16#cd, 16#0a}; +dec_huffman_lookup(16#c5, 16#b) -> {more, 16#cd, 16#0f}; +dec_huffman_lookup(16#c5, 16#c) -> {more, 16#cd, 16#18}; +dec_huffman_lookup(16#c5, 16#d) -> {more, 16#cd, 16#1f}; +dec_huffman_lookup(16#c5, 16#e) -> {more, 16#cd, 16#29}; +dec_huffman_lookup(16#c5, 16#f) -> {ok, 16#cd, 16#38}; +dec_huffman_lookup(16#c6, 16#0) -> {more, 16#d2, 16#03}; +dec_huffman_lookup(16#c6, 16#1) -> {more, 16#d2, 16#06}; +dec_huffman_lookup(16#c6, 16#2) -> {more, 16#d2, 16#0a}; +dec_huffman_lookup(16#c6, 16#3) -> {more, 16#d2, 16#0f}; +dec_huffman_lookup(16#c6, 16#4) -> {more, 16#d2, 16#18}; +dec_huffman_lookup(16#c6, 16#5) -> {more, 16#d2, 16#1f}; +dec_huffman_lookup(16#c6, 16#6) -> {more, 16#d2, 16#29}; +dec_huffman_lookup(16#c6, 16#7) -> {ok, 16#d2, 16#38}; +dec_huffman_lookup(16#c6, 16#8) -> {more, 16#d5, 16#03}; +dec_huffman_lookup(16#c6, 16#9) -> {more, 16#d5, 16#06}; +dec_huffman_lookup(16#c6, 16#a) -> {more, 16#d5, 16#0a}; +dec_huffman_lookup(16#c6, 16#b) -> {more, 16#d5, 16#0f}; +dec_huffman_lookup(16#c6, 16#c) -> {more, 16#d5, 16#18}; +dec_huffman_lookup(16#c6, 16#d) -> {more, 16#d5, 16#1f}; +dec_huffman_lookup(16#c6, 16#e) -> {more, 16#d5, 16#29}; +dec_huffman_lookup(16#c6, 16#f) -> {ok, 16#d5, 16#38}; +dec_huffman_lookup(16#c7, 16#0) -> {more, 16#da, 16#01}; +dec_huffman_lookup(16#c7, 16#1) -> {ok, 16#da, 16#16}; +dec_huffman_lookup(16#c7, 16#2) -> {more, 16#db, 16#01}; +dec_huffman_lookup(16#c7, 16#3) -> {ok, 16#db, 16#16}; +dec_huffman_lookup(16#c7, 16#4) -> {more, 16#ee, 16#01}; +dec_huffman_lookup(16#c7, 16#5) -> {ok, 16#ee, 16#16}; +dec_huffman_lookup(16#c7, 16#6) -> {more, 16#f0, 16#01}; +dec_huffman_lookup(16#c7, 16#7) -> {ok, 16#f0, 16#16}; +dec_huffman_lookup(16#c7, 16#8) -> {more, 16#f2, 16#01}; +dec_huffman_lookup(16#c7, 16#9) -> {ok, 16#f2, 16#16}; +dec_huffman_lookup(16#c7, 16#a) -> {more, 16#f3, 16#01}; +dec_huffman_lookup(16#c7, 16#b) -> {ok, 16#f3, 16#16}; +dec_huffman_lookup(16#c7, 16#c) -> {more, 16#ff, 16#01}; +dec_huffman_lookup(16#c7, 16#d) -> {ok, 16#ff, 16#16}; +dec_huffman_lookup(16#c7, 16#e) -> {ok, 16#cb, 16#00}; +dec_huffman_lookup(16#c7, 16#f) -> {ok, 16#cc, 16#00}; +dec_huffman_lookup(16#c8, 16#0) -> {more, 16#da, 16#02}; +dec_huffman_lookup(16#c8, 16#1) -> {more, 16#da, 16#09}; +dec_huffman_lookup(16#c8, 16#2) -> {more, 16#da, 16#17}; +dec_huffman_lookup(16#c8, 16#3) -> {ok, 16#da, 16#28}; +dec_huffman_lookup(16#c8, 16#4) -> {more, 16#db, 16#02}; +dec_huffman_lookup(16#c8, 16#5) -> {more, 16#db, 16#09}; +dec_huffman_lookup(16#c8, 16#6) -> {more, 16#db, 16#17}; +dec_huffman_lookup(16#c8, 16#7) -> {ok, 16#db, 16#28}; +dec_huffman_lookup(16#c8, 16#8) -> {more, 16#ee, 16#02}; +dec_huffman_lookup(16#c8, 16#9) -> {more, 16#ee, 16#09}; +dec_huffman_lookup(16#c8, 16#a) -> {more, 16#ee, 16#17}; +dec_huffman_lookup(16#c8, 16#b) -> {ok, 16#ee, 16#28}; +dec_huffman_lookup(16#c8, 16#c) -> {more, 16#f0, 16#02}; +dec_huffman_lookup(16#c8, 16#d) -> {more, 16#f0, 16#09}; +dec_huffman_lookup(16#c8, 16#e) -> {more, 16#f0, 16#17}; +dec_huffman_lookup(16#c8, 16#f) -> {ok, 16#f0, 16#28}; +dec_huffman_lookup(16#c9, 16#0) -> {more, 16#da, 16#03}; +dec_huffman_lookup(16#c9, 16#1) -> {more, 16#da, 16#06}; +dec_huffman_lookup(16#c9, 16#2) -> {more, 16#da, 16#0a}; +dec_huffman_lookup(16#c9, 16#3) -> {more, 16#da, 16#0f}; +dec_huffman_lookup(16#c9, 16#4) -> {more, 16#da, 16#18}; +dec_huffman_lookup(16#c9, 16#5) -> {more, 16#da, 16#1f}; +dec_huffman_lookup(16#c9, 16#6) -> {more, 16#da, 16#29}; +dec_huffman_lookup(16#c9, 16#7) -> {ok, 16#da, 16#38}; +dec_huffman_lookup(16#c9, 16#8) -> {more, 16#db, 16#03}; +dec_huffman_lookup(16#c9, 16#9) -> {more, 16#db, 16#06}; +dec_huffman_lookup(16#c9, 16#a) -> {more, 16#db, 16#0a}; +dec_huffman_lookup(16#c9, 16#b) -> {more, 16#db, 16#0f}; +dec_huffman_lookup(16#c9, 16#c) -> {more, 16#db, 16#18}; +dec_huffman_lookup(16#c9, 16#d) -> {more, 16#db, 16#1f}; +dec_huffman_lookup(16#c9, 16#e) -> {more, 16#db, 16#29}; +dec_huffman_lookup(16#c9, 16#f) -> {ok, 16#db, 16#38}; +dec_huffman_lookup(16#ca, 16#0) -> {more, 16#ee, 16#03}; +dec_huffman_lookup(16#ca, 16#1) -> {more, 16#ee, 16#06}; +dec_huffman_lookup(16#ca, 16#2) -> {more, 16#ee, 16#0a}; +dec_huffman_lookup(16#ca, 16#3) -> {more, 16#ee, 16#0f}; +dec_huffman_lookup(16#ca, 16#4) -> {more, 16#ee, 16#18}; +dec_huffman_lookup(16#ca, 16#5) -> {more, 16#ee, 16#1f}; +dec_huffman_lookup(16#ca, 16#6) -> {more, 16#ee, 16#29}; +dec_huffman_lookup(16#ca, 16#7) -> {ok, 16#ee, 16#38}; +dec_huffman_lookup(16#ca, 16#8) -> {more, 16#f0, 16#03}; +dec_huffman_lookup(16#ca, 16#9) -> {more, 16#f0, 16#06}; +dec_huffman_lookup(16#ca, 16#a) -> {more, 16#f0, 16#0a}; +dec_huffman_lookup(16#ca, 16#b) -> {more, 16#f0, 16#0f}; +dec_huffman_lookup(16#ca, 16#c) -> {more, 16#f0, 16#18}; +dec_huffman_lookup(16#ca, 16#d) -> {more, 16#f0, 16#1f}; +dec_huffman_lookup(16#ca, 16#e) -> {more, 16#f0, 16#29}; +dec_huffman_lookup(16#ca, 16#f) -> {ok, 16#f0, 16#38}; +dec_huffman_lookup(16#cb, 16#0) -> {more, 16#f2, 16#02}; +dec_huffman_lookup(16#cb, 16#1) -> {more, 16#f2, 16#09}; +dec_huffman_lookup(16#cb, 16#2) -> {more, 16#f2, 16#17}; +dec_huffman_lookup(16#cb, 16#3) -> {ok, 16#f2, 16#28}; +dec_huffman_lookup(16#cb, 16#4) -> {more, 16#f3, 16#02}; +dec_huffman_lookup(16#cb, 16#5) -> {more, 16#f3, 16#09}; +dec_huffman_lookup(16#cb, 16#6) -> {more, 16#f3, 16#17}; +dec_huffman_lookup(16#cb, 16#7) -> {ok, 16#f3, 16#28}; +dec_huffman_lookup(16#cb, 16#8) -> {more, 16#ff, 16#02}; +dec_huffman_lookup(16#cb, 16#9) -> {more, 16#ff, 16#09}; +dec_huffman_lookup(16#cb, 16#a) -> {more, 16#ff, 16#17}; +dec_huffman_lookup(16#cb, 16#b) -> {ok, 16#ff, 16#28}; +dec_huffman_lookup(16#cb, 16#c) -> {more, 16#cb, 16#01}; +dec_huffman_lookup(16#cb, 16#d) -> {ok, 16#cb, 16#16}; +dec_huffman_lookup(16#cb, 16#e) -> {more, 16#cc, 16#01}; +dec_huffman_lookup(16#cb, 16#f) -> {ok, 16#cc, 16#16}; +dec_huffman_lookup(16#cc, 16#0) -> {more, 16#f2, 16#03}; +dec_huffman_lookup(16#cc, 16#1) -> {more, 16#f2, 16#06}; +dec_huffman_lookup(16#cc, 16#2) -> {more, 16#f2, 16#0a}; +dec_huffman_lookup(16#cc, 16#3) -> {more, 16#f2, 16#0f}; +dec_huffman_lookup(16#cc, 16#4) -> {more, 16#f2, 16#18}; +dec_huffman_lookup(16#cc, 16#5) -> {more, 16#f2, 16#1f}; +dec_huffman_lookup(16#cc, 16#6) -> {more, 16#f2, 16#29}; +dec_huffman_lookup(16#cc, 16#7) -> {ok, 16#f2, 16#38}; +dec_huffman_lookup(16#cc, 16#8) -> {more, 16#f3, 16#03}; +dec_huffman_lookup(16#cc, 16#9) -> {more, 16#f3, 16#06}; +dec_huffman_lookup(16#cc, 16#a) -> {more, 16#f3, 16#0a}; +dec_huffman_lookup(16#cc, 16#b) -> {more, 16#f3, 16#0f}; +dec_huffman_lookup(16#cc, 16#c) -> {more, 16#f3, 16#18}; +dec_huffman_lookup(16#cc, 16#d) -> {more, 16#f3, 16#1f}; +dec_huffman_lookup(16#cc, 16#e) -> {more, 16#f3, 16#29}; +dec_huffman_lookup(16#cc, 16#f) -> {ok, 16#f3, 16#38}; +dec_huffman_lookup(16#cd, 16#0) -> {more, 16#ff, 16#03}; +dec_huffman_lookup(16#cd, 16#1) -> {more, 16#ff, 16#06}; +dec_huffman_lookup(16#cd, 16#2) -> {more, 16#ff, 16#0a}; +dec_huffman_lookup(16#cd, 16#3) -> {more, 16#ff, 16#0f}; +dec_huffman_lookup(16#cd, 16#4) -> {more, 16#ff, 16#18}; +dec_huffman_lookup(16#cd, 16#5) -> {more, 16#ff, 16#1f}; +dec_huffman_lookup(16#cd, 16#6) -> {more, 16#ff, 16#29}; +dec_huffman_lookup(16#cd, 16#7) -> {ok, 16#ff, 16#38}; +dec_huffman_lookup(16#cd, 16#8) -> {more, 16#cb, 16#02}; +dec_huffman_lookup(16#cd, 16#9) -> {more, 16#cb, 16#09}; +dec_huffman_lookup(16#cd, 16#a) -> {more, 16#cb, 16#17}; +dec_huffman_lookup(16#cd, 16#b) -> {ok, 16#cb, 16#28}; +dec_huffman_lookup(16#cd, 16#c) -> {more, 16#cc, 16#02}; +dec_huffman_lookup(16#cd, 16#d) -> {more, 16#cc, 16#09}; +dec_huffman_lookup(16#cd, 16#e) -> {more, 16#cc, 16#17}; +dec_huffman_lookup(16#cd, 16#f) -> {ok, 16#cc, 16#28}; +dec_huffman_lookup(16#ce, 16#0) -> {more, 16#cb, 16#03}; +dec_huffman_lookup(16#ce, 16#1) -> {more, 16#cb, 16#06}; +dec_huffman_lookup(16#ce, 16#2) -> {more, 16#cb, 16#0a}; +dec_huffman_lookup(16#ce, 16#3) -> {more, 16#cb, 16#0f}; +dec_huffman_lookup(16#ce, 16#4) -> {more, 16#cb, 16#18}; +dec_huffman_lookup(16#ce, 16#5) -> {more, 16#cb, 16#1f}; +dec_huffman_lookup(16#ce, 16#6) -> {more, 16#cb, 16#29}; +dec_huffman_lookup(16#ce, 16#7) -> {ok, 16#cb, 16#38}; +dec_huffman_lookup(16#ce, 16#8) -> {more, 16#cc, 16#03}; +dec_huffman_lookup(16#ce, 16#9) -> {more, 16#cc, 16#06}; +dec_huffman_lookup(16#ce, 16#a) -> {more, 16#cc, 16#0a}; +dec_huffman_lookup(16#ce, 16#b) -> {more, 16#cc, 16#0f}; +dec_huffman_lookup(16#ce, 16#c) -> {more, 16#cc, 16#18}; +dec_huffman_lookup(16#ce, 16#d) -> {more, 16#cc, 16#1f}; +dec_huffman_lookup(16#ce, 16#e) -> {more, 16#cc, 16#29}; +dec_huffman_lookup(16#ce, 16#f) -> {ok, 16#cc, 16#38}; +dec_huffman_lookup(16#cf, 16#0) -> {more, undefined, 16#d3}; +dec_huffman_lookup(16#cf, 16#1) -> {more, undefined, 16#d4}; +dec_huffman_lookup(16#cf, 16#2) -> {more, undefined, 16#d6}; +dec_huffman_lookup(16#cf, 16#3) -> {more, undefined, 16#d7}; +dec_huffman_lookup(16#cf, 16#4) -> {more, undefined, 16#da}; +dec_huffman_lookup(16#cf, 16#5) -> {more, undefined, 16#db}; +dec_huffman_lookup(16#cf, 16#6) -> {more, undefined, 16#dd}; +dec_huffman_lookup(16#cf, 16#7) -> {more, undefined, 16#de}; +dec_huffman_lookup(16#cf, 16#8) -> {more, undefined, 16#e2}; +dec_huffman_lookup(16#cf, 16#9) -> {more, undefined, 16#e4}; +dec_huffman_lookup(16#cf, 16#a) -> {more, undefined, 16#e8}; +dec_huffman_lookup(16#cf, 16#b) -> {more, undefined, 16#eb}; +dec_huffman_lookup(16#cf, 16#c) -> {more, undefined, 16#f0}; +dec_huffman_lookup(16#cf, 16#d) -> {more, undefined, 16#f3}; +dec_huffman_lookup(16#cf, 16#e) -> {more, undefined, 16#f7}; +dec_huffman_lookup(16#cf, 16#f) -> {ok, undefined, 16#fa}; +dec_huffman_lookup(16#d0, 16#0) -> {ok, 16#d3, 16#00}; +dec_huffman_lookup(16#d0, 16#1) -> {ok, 16#d4, 16#00}; +dec_huffman_lookup(16#d0, 16#2) -> {ok, 16#d6, 16#00}; +dec_huffman_lookup(16#d0, 16#3) -> {ok, 16#dd, 16#00}; +dec_huffman_lookup(16#d0, 16#4) -> {ok, 16#de, 16#00}; +dec_huffman_lookup(16#d0, 16#5) -> {ok, 16#df, 16#00}; +dec_huffman_lookup(16#d0, 16#6) -> {ok, 16#f1, 16#00}; +dec_huffman_lookup(16#d0, 16#7) -> {ok, 16#f4, 16#00}; +dec_huffman_lookup(16#d0, 16#8) -> {ok, 16#f5, 16#00}; +dec_huffman_lookup(16#d0, 16#9) -> {ok, 16#f6, 16#00}; +dec_huffman_lookup(16#d0, 16#a) -> {ok, 16#f7, 16#00}; +dec_huffman_lookup(16#d0, 16#b) -> {ok, 16#f8, 16#00}; +dec_huffman_lookup(16#d0, 16#c) -> {ok, 16#fa, 16#00}; +dec_huffman_lookup(16#d0, 16#d) -> {ok, 16#fb, 16#00}; +dec_huffman_lookup(16#d0, 16#e) -> {ok, 16#fc, 16#00}; +dec_huffman_lookup(16#d0, 16#f) -> {ok, 16#fd, 16#00}; +dec_huffman_lookup(16#d1, 16#0) -> {more, 16#d3, 16#01}; +dec_huffman_lookup(16#d1, 16#1) -> {ok, 16#d3, 16#16}; +dec_huffman_lookup(16#d1, 16#2) -> {more, 16#d4, 16#01}; +dec_huffman_lookup(16#d1, 16#3) -> {ok, 16#d4, 16#16}; +dec_huffman_lookup(16#d1, 16#4) -> {more, 16#d6, 16#01}; +dec_huffman_lookup(16#d1, 16#5) -> {ok, 16#d6, 16#16}; +dec_huffman_lookup(16#d1, 16#6) -> {more, 16#dd, 16#01}; +dec_huffman_lookup(16#d1, 16#7) -> {ok, 16#dd, 16#16}; +dec_huffman_lookup(16#d1, 16#8) -> {more, 16#de, 16#01}; +dec_huffman_lookup(16#d1, 16#9) -> {ok, 16#de, 16#16}; +dec_huffman_lookup(16#d1, 16#a) -> {more, 16#df, 16#01}; +dec_huffman_lookup(16#d1, 16#b) -> {ok, 16#df, 16#16}; +dec_huffman_lookup(16#d1, 16#c) -> {more, 16#f1, 16#01}; +dec_huffman_lookup(16#d1, 16#d) -> {ok, 16#f1, 16#16}; +dec_huffman_lookup(16#d1, 16#e) -> {more, 16#f4, 16#01}; +dec_huffman_lookup(16#d1, 16#f) -> {ok, 16#f4, 16#16}; +dec_huffman_lookup(16#d2, 16#0) -> {more, 16#d3, 16#02}; +dec_huffman_lookup(16#d2, 16#1) -> {more, 16#d3, 16#09}; +dec_huffman_lookup(16#d2, 16#2) -> {more, 16#d3, 16#17}; +dec_huffman_lookup(16#d2, 16#3) -> {ok, 16#d3, 16#28}; +dec_huffman_lookup(16#d2, 16#4) -> {more, 16#d4, 16#02}; +dec_huffman_lookup(16#d2, 16#5) -> {more, 16#d4, 16#09}; +dec_huffman_lookup(16#d2, 16#6) -> {more, 16#d4, 16#17}; +dec_huffman_lookup(16#d2, 16#7) -> {ok, 16#d4, 16#28}; +dec_huffman_lookup(16#d2, 16#8) -> {more, 16#d6, 16#02}; +dec_huffman_lookup(16#d2, 16#9) -> {more, 16#d6, 16#09}; +dec_huffman_lookup(16#d2, 16#a) -> {more, 16#d6, 16#17}; +dec_huffman_lookup(16#d2, 16#b) -> {ok, 16#d6, 16#28}; +dec_huffman_lookup(16#d2, 16#c) -> {more, 16#dd, 16#02}; +dec_huffman_lookup(16#d2, 16#d) -> {more, 16#dd, 16#09}; +dec_huffman_lookup(16#d2, 16#e) -> {more, 16#dd, 16#17}; +dec_huffman_lookup(16#d2, 16#f) -> {ok, 16#dd, 16#28}; +dec_huffman_lookup(16#d3, 16#0) -> {more, 16#d3, 16#03}; +dec_huffman_lookup(16#d3, 16#1) -> {more, 16#d3, 16#06}; +dec_huffman_lookup(16#d3, 16#2) -> {more, 16#d3, 16#0a}; +dec_huffman_lookup(16#d3, 16#3) -> {more, 16#d3, 16#0f}; +dec_huffman_lookup(16#d3, 16#4) -> {more, 16#d3, 16#18}; +dec_huffman_lookup(16#d3, 16#5) -> {more, 16#d3, 16#1f}; +dec_huffman_lookup(16#d3, 16#6) -> {more, 16#d3, 16#29}; +dec_huffman_lookup(16#d3, 16#7) -> {ok, 16#d3, 16#38}; +dec_huffman_lookup(16#d3, 16#8) -> {more, 16#d4, 16#03}; +dec_huffman_lookup(16#d3, 16#9) -> {more, 16#d4, 16#06}; +dec_huffman_lookup(16#d3, 16#a) -> {more, 16#d4, 16#0a}; +dec_huffman_lookup(16#d3, 16#b) -> {more, 16#d4, 16#0f}; +dec_huffman_lookup(16#d3, 16#c) -> {more, 16#d4, 16#18}; +dec_huffman_lookup(16#d3, 16#d) -> {more, 16#d4, 16#1f}; +dec_huffman_lookup(16#d3, 16#e) -> {more, 16#d4, 16#29}; +dec_huffman_lookup(16#d3, 16#f) -> {ok, 16#d4, 16#38}; +dec_huffman_lookup(16#d4, 16#0) -> {more, 16#d6, 16#03}; +dec_huffman_lookup(16#d4, 16#1) -> {more, 16#d6, 16#06}; +dec_huffman_lookup(16#d4, 16#2) -> {more, 16#d6, 16#0a}; +dec_huffman_lookup(16#d4, 16#3) -> {more, 16#d6, 16#0f}; +dec_huffman_lookup(16#d4, 16#4) -> {more, 16#d6, 16#18}; +dec_huffman_lookup(16#d4, 16#5) -> {more, 16#d6, 16#1f}; +dec_huffman_lookup(16#d4, 16#6) -> {more, 16#d6, 16#29}; +dec_huffman_lookup(16#d4, 16#7) -> {ok, 16#d6, 16#38}; +dec_huffman_lookup(16#d4, 16#8) -> {more, 16#dd, 16#03}; +dec_huffman_lookup(16#d4, 16#9) -> {more, 16#dd, 16#06}; +dec_huffman_lookup(16#d4, 16#a) -> {more, 16#dd, 16#0a}; +dec_huffman_lookup(16#d4, 16#b) -> {more, 16#dd, 16#0f}; +dec_huffman_lookup(16#d4, 16#c) -> {more, 16#dd, 16#18}; +dec_huffman_lookup(16#d4, 16#d) -> {more, 16#dd, 16#1f}; +dec_huffman_lookup(16#d4, 16#e) -> {more, 16#dd, 16#29}; +dec_huffman_lookup(16#d4, 16#f) -> {ok, 16#dd, 16#38}; +dec_huffman_lookup(16#d5, 16#0) -> {more, 16#de, 16#02}; +dec_huffman_lookup(16#d5, 16#1) -> {more, 16#de, 16#09}; +dec_huffman_lookup(16#d5, 16#2) -> {more, 16#de, 16#17}; +dec_huffman_lookup(16#d5, 16#3) -> {ok, 16#de, 16#28}; +dec_huffman_lookup(16#d5, 16#4) -> {more, 16#df, 16#02}; +dec_huffman_lookup(16#d5, 16#5) -> {more, 16#df, 16#09}; +dec_huffman_lookup(16#d5, 16#6) -> {more, 16#df, 16#17}; +dec_huffman_lookup(16#d5, 16#7) -> {ok, 16#df, 16#28}; +dec_huffman_lookup(16#d5, 16#8) -> {more, 16#f1, 16#02}; +dec_huffman_lookup(16#d5, 16#9) -> {more, 16#f1, 16#09}; +dec_huffman_lookup(16#d5, 16#a) -> {more, 16#f1, 16#17}; +dec_huffman_lookup(16#d5, 16#b) -> {ok, 16#f1, 16#28}; +dec_huffman_lookup(16#d5, 16#c) -> {more, 16#f4, 16#02}; +dec_huffman_lookup(16#d5, 16#d) -> {more, 16#f4, 16#09}; +dec_huffman_lookup(16#d5, 16#e) -> {more, 16#f4, 16#17}; +dec_huffman_lookup(16#d5, 16#f) -> {ok, 16#f4, 16#28}; +dec_huffman_lookup(16#d6, 16#0) -> {more, 16#de, 16#03}; +dec_huffman_lookup(16#d6, 16#1) -> {more, 16#de, 16#06}; +dec_huffman_lookup(16#d6, 16#2) -> {more, 16#de, 16#0a}; +dec_huffman_lookup(16#d6, 16#3) -> {more, 16#de, 16#0f}; +dec_huffman_lookup(16#d6, 16#4) -> {more, 16#de, 16#18}; +dec_huffman_lookup(16#d6, 16#5) -> {more, 16#de, 16#1f}; +dec_huffman_lookup(16#d6, 16#6) -> {more, 16#de, 16#29}; +dec_huffman_lookup(16#d6, 16#7) -> {ok, 16#de, 16#38}; +dec_huffman_lookup(16#d6, 16#8) -> {more, 16#df, 16#03}; +dec_huffman_lookup(16#d6, 16#9) -> {more, 16#df, 16#06}; +dec_huffman_lookup(16#d6, 16#a) -> {more, 16#df, 16#0a}; +dec_huffman_lookup(16#d6, 16#b) -> {more, 16#df, 16#0f}; +dec_huffman_lookup(16#d6, 16#c) -> {more, 16#df, 16#18}; +dec_huffman_lookup(16#d6, 16#d) -> {more, 16#df, 16#1f}; +dec_huffman_lookup(16#d6, 16#e) -> {more, 16#df, 16#29}; +dec_huffman_lookup(16#d6, 16#f) -> {ok, 16#df, 16#38}; +dec_huffman_lookup(16#d7, 16#0) -> {more, 16#f1, 16#03}; +dec_huffman_lookup(16#d7, 16#1) -> {more, 16#f1, 16#06}; +dec_huffman_lookup(16#d7, 16#2) -> {more, 16#f1, 16#0a}; +dec_huffman_lookup(16#d7, 16#3) -> {more, 16#f1, 16#0f}; +dec_huffman_lookup(16#d7, 16#4) -> {more, 16#f1, 16#18}; +dec_huffman_lookup(16#d7, 16#5) -> {more, 16#f1, 16#1f}; +dec_huffman_lookup(16#d7, 16#6) -> {more, 16#f1, 16#29}; +dec_huffman_lookup(16#d7, 16#7) -> {ok, 16#f1, 16#38}; +dec_huffman_lookup(16#d7, 16#8) -> {more, 16#f4, 16#03}; +dec_huffman_lookup(16#d7, 16#9) -> {more, 16#f4, 16#06}; +dec_huffman_lookup(16#d7, 16#a) -> {more, 16#f4, 16#0a}; +dec_huffman_lookup(16#d7, 16#b) -> {more, 16#f4, 16#0f}; +dec_huffman_lookup(16#d7, 16#c) -> {more, 16#f4, 16#18}; +dec_huffman_lookup(16#d7, 16#d) -> {more, 16#f4, 16#1f}; +dec_huffman_lookup(16#d7, 16#e) -> {more, 16#f4, 16#29}; +dec_huffman_lookup(16#d7, 16#f) -> {ok, 16#f4, 16#38}; +dec_huffman_lookup(16#d8, 16#0) -> {more, 16#f5, 16#01}; +dec_huffman_lookup(16#d8, 16#1) -> {ok, 16#f5, 16#16}; +dec_huffman_lookup(16#d8, 16#2) -> {more, 16#f6, 16#01}; +dec_huffman_lookup(16#d8, 16#3) -> {ok, 16#f6, 16#16}; +dec_huffman_lookup(16#d8, 16#4) -> {more, 16#f7, 16#01}; +dec_huffman_lookup(16#d8, 16#5) -> {ok, 16#f7, 16#16}; +dec_huffman_lookup(16#d8, 16#6) -> {more, 16#f8, 16#01}; +dec_huffman_lookup(16#d8, 16#7) -> {ok, 16#f8, 16#16}; +dec_huffman_lookup(16#d8, 16#8) -> {more, 16#fa, 16#01}; +dec_huffman_lookup(16#d8, 16#9) -> {ok, 16#fa, 16#16}; +dec_huffman_lookup(16#d8, 16#a) -> {more, 16#fb, 16#01}; +dec_huffman_lookup(16#d8, 16#b) -> {ok, 16#fb, 16#16}; +dec_huffman_lookup(16#d8, 16#c) -> {more, 16#fc, 16#01}; +dec_huffman_lookup(16#d8, 16#d) -> {ok, 16#fc, 16#16}; +dec_huffman_lookup(16#d8, 16#e) -> {more, 16#fd, 16#01}; +dec_huffman_lookup(16#d8, 16#f) -> {ok, 16#fd, 16#16}; +dec_huffman_lookup(16#d9, 16#0) -> {more, 16#f5, 16#02}; +dec_huffman_lookup(16#d9, 16#1) -> {more, 16#f5, 16#09}; +dec_huffman_lookup(16#d9, 16#2) -> {more, 16#f5, 16#17}; +dec_huffman_lookup(16#d9, 16#3) -> {ok, 16#f5, 16#28}; +dec_huffman_lookup(16#d9, 16#4) -> {more, 16#f6, 16#02}; +dec_huffman_lookup(16#d9, 16#5) -> {more, 16#f6, 16#09}; +dec_huffman_lookup(16#d9, 16#6) -> {more, 16#f6, 16#17}; +dec_huffman_lookup(16#d9, 16#7) -> {ok, 16#f6, 16#28}; +dec_huffman_lookup(16#d9, 16#8) -> {more, 16#f7, 16#02}; +dec_huffman_lookup(16#d9, 16#9) -> {more, 16#f7, 16#09}; +dec_huffman_lookup(16#d9, 16#a) -> {more, 16#f7, 16#17}; +dec_huffman_lookup(16#d9, 16#b) -> {ok, 16#f7, 16#28}; +dec_huffman_lookup(16#d9, 16#c) -> {more, 16#f8, 16#02}; +dec_huffman_lookup(16#d9, 16#d) -> {more, 16#f8, 16#09}; +dec_huffman_lookup(16#d9, 16#e) -> {more, 16#f8, 16#17}; +dec_huffman_lookup(16#d9, 16#f) -> {ok, 16#f8, 16#28}; +dec_huffman_lookup(16#da, 16#0) -> {more, 16#f5, 16#03}; +dec_huffman_lookup(16#da, 16#1) -> {more, 16#f5, 16#06}; +dec_huffman_lookup(16#da, 16#2) -> {more, 16#f5, 16#0a}; +dec_huffman_lookup(16#da, 16#3) -> {more, 16#f5, 16#0f}; +dec_huffman_lookup(16#da, 16#4) -> {more, 16#f5, 16#18}; +dec_huffman_lookup(16#da, 16#5) -> {more, 16#f5, 16#1f}; +dec_huffman_lookup(16#da, 16#6) -> {more, 16#f5, 16#29}; +dec_huffman_lookup(16#da, 16#7) -> {ok, 16#f5, 16#38}; +dec_huffman_lookup(16#da, 16#8) -> {more, 16#f6, 16#03}; +dec_huffman_lookup(16#da, 16#9) -> {more, 16#f6, 16#06}; +dec_huffman_lookup(16#da, 16#a) -> {more, 16#f6, 16#0a}; +dec_huffman_lookup(16#da, 16#b) -> {more, 16#f6, 16#0f}; +dec_huffman_lookup(16#da, 16#c) -> {more, 16#f6, 16#18}; +dec_huffman_lookup(16#da, 16#d) -> {more, 16#f6, 16#1f}; +dec_huffman_lookup(16#da, 16#e) -> {more, 16#f6, 16#29}; +dec_huffman_lookup(16#da, 16#f) -> {ok, 16#f6, 16#38}; +dec_huffman_lookup(16#db, 16#0) -> {more, 16#f7, 16#03}; +dec_huffman_lookup(16#db, 16#1) -> {more, 16#f7, 16#06}; +dec_huffman_lookup(16#db, 16#2) -> {more, 16#f7, 16#0a}; +dec_huffman_lookup(16#db, 16#3) -> {more, 16#f7, 16#0f}; +dec_huffman_lookup(16#db, 16#4) -> {more, 16#f7, 16#18}; +dec_huffman_lookup(16#db, 16#5) -> {more, 16#f7, 16#1f}; +dec_huffman_lookup(16#db, 16#6) -> {more, 16#f7, 16#29}; +dec_huffman_lookup(16#db, 16#7) -> {ok, 16#f7, 16#38}; +dec_huffman_lookup(16#db, 16#8) -> {more, 16#f8, 16#03}; +dec_huffman_lookup(16#db, 16#9) -> {more, 16#f8, 16#06}; +dec_huffman_lookup(16#db, 16#a) -> {more, 16#f8, 16#0a}; +dec_huffman_lookup(16#db, 16#b) -> {more, 16#f8, 16#0f}; +dec_huffman_lookup(16#db, 16#c) -> {more, 16#f8, 16#18}; +dec_huffman_lookup(16#db, 16#d) -> {more, 16#f8, 16#1f}; +dec_huffman_lookup(16#db, 16#e) -> {more, 16#f8, 16#29}; +dec_huffman_lookup(16#db, 16#f) -> {ok, 16#f8, 16#38}; +dec_huffman_lookup(16#dc, 16#0) -> {more, 16#fa, 16#02}; +dec_huffman_lookup(16#dc, 16#1) -> {more, 16#fa, 16#09}; +dec_huffman_lookup(16#dc, 16#2) -> {more, 16#fa, 16#17}; +dec_huffman_lookup(16#dc, 16#3) -> {ok, 16#fa, 16#28}; +dec_huffman_lookup(16#dc, 16#4) -> {more, 16#fb, 16#02}; +dec_huffman_lookup(16#dc, 16#5) -> {more, 16#fb, 16#09}; +dec_huffman_lookup(16#dc, 16#6) -> {more, 16#fb, 16#17}; +dec_huffman_lookup(16#dc, 16#7) -> {ok, 16#fb, 16#28}; +dec_huffman_lookup(16#dc, 16#8) -> {more, 16#fc, 16#02}; +dec_huffman_lookup(16#dc, 16#9) -> {more, 16#fc, 16#09}; +dec_huffman_lookup(16#dc, 16#a) -> {more, 16#fc, 16#17}; +dec_huffman_lookup(16#dc, 16#b) -> {ok, 16#fc, 16#28}; +dec_huffman_lookup(16#dc, 16#c) -> {more, 16#fd, 16#02}; +dec_huffman_lookup(16#dc, 16#d) -> {more, 16#fd, 16#09}; +dec_huffman_lookup(16#dc, 16#e) -> {more, 16#fd, 16#17}; +dec_huffman_lookup(16#dc, 16#f) -> {ok, 16#fd, 16#28}; +dec_huffman_lookup(16#dd, 16#0) -> {more, 16#fa, 16#03}; +dec_huffman_lookup(16#dd, 16#1) -> {more, 16#fa, 16#06}; +dec_huffman_lookup(16#dd, 16#2) -> {more, 16#fa, 16#0a}; +dec_huffman_lookup(16#dd, 16#3) -> {more, 16#fa, 16#0f}; +dec_huffman_lookup(16#dd, 16#4) -> {more, 16#fa, 16#18}; +dec_huffman_lookup(16#dd, 16#5) -> {more, 16#fa, 16#1f}; +dec_huffman_lookup(16#dd, 16#6) -> {more, 16#fa, 16#29}; +dec_huffman_lookup(16#dd, 16#7) -> {ok, 16#fa, 16#38}; +dec_huffman_lookup(16#dd, 16#8) -> {more, 16#fb, 16#03}; +dec_huffman_lookup(16#dd, 16#9) -> {more, 16#fb, 16#06}; +dec_huffman_lookup(16#dd, 16#a) -> {more, 16#fb, 16#0a}; +dec_huffman_lookup(16#dd, 16#b) -> {more, 16#fb, 16#0f}; +dec_huffman_lookup(16#dd, 16#c) -> {more, 16#fb, 16#18}; +dec_huffman_lookup(16#dd, 16#d) -> {more, 16#fb, 16#1f}; +dec_huffman_lookup(16#dd, 16#e) -> {more, 16#fb, 16#29}; +dec_huffman_lookup(16#dd, 16#f) -> {ok, 16#fb, 16#38}; +dec_huffman_lookup(16#de, 16#0) -> {more, 16#fc, 16#03}; +dec_huffman_lookup(16#de, 16#1) -> {more, 16#fc, 16#06}; +dec_huffman_lookup(16#de, 16#2) -> {more, 16#fc, 16#0a}; +dec_huffman_lookup(16#de, 16#3) -> {more, 16#fc, 16#0f}; +dec_huffman_lookup(16#de, 16#4) -> {more, 16#fc, 16#18}; +dec_huffman_lookup(16#de, 16#5) -> {more, 16#fc, 16#1f}; +dec_huffman_lookup(16#de, 16#6) -> {more, 16#fc, 16#29}; +dec_huffman_lookup(16#de, 16#7) -> {ok, 16#fc, 16#38}; +dec_huffman_lookup(16#de, 16#8) -> {more, 16#fd, 16#03}; +dec_huffman_lookup(16#de, 16#9) -> {more, 16#fd, 16#06}; +dec_huffman_lookup(16#de, 16#a) -> {more, 16#fd, 16#0a}; +dec_huffman_lookup(16#de, 16#b) -> {more, 16#fd, 16#0f}; +dec_huffman_lookup(16#de, 16#c) -> {more, 16#fd, 16#18}; +dec_huffman_lookup(16#de, 16#d) -> {more, 16#fd, 16#1f}; +dec_huffman_lookup(16#de, 16#e) -> {more, 16#fd, 16#29}; +dec_huffman_lookup(16#de, 16#f) -> {ok, 16#fd, 16#38}; +dec_huffman_lookup(16#df, 16#0) -> {ok, 16#fe, 16#00}; +dec_huffman_lookup(16#df, 16#1) -> {more, undefined, 16#e3}; +dec_huffman_lookup(16#df, 16#2) -> {more, undefined, 16#e5}; +dec_huffman_lookup(16#df, 16#3) -> {more, undefined, 16#e6}; +dec_huffman_lookup(16#df, 16#4) -> {more, undefined, 16#e9}; +dec_huffman_lookup(16#df, 16#5) -> {more, undefined, 16#ea}; +dec_huffman_lookup(16#df, 16#6) -> {more, undefined, 16#ec}; +dec_huffman_lookup(16#df, 16#7) -> {more, undefined, 16#ed}; +dec_huffman_lookup(16#df, 16#8) -> {more, undefined, 16#f1}; +dec_huffman_lookup(16#df, 16#9) -> {more, undefined, 16#f2}; +dec_huffman_lookup(16#df, 16#a) -> {more, undefined, 16#f4}; +dec_huffman_lookup(16#df, 16#b) -> {more, undefined, 16#f5}; +dec_huffman_lookup(16#df, 16#c) -> {more, undefined, 16#f8}; +dec_huffman_lookup(16#df, 16#d) -> {more, undefined, 16#f9}; +dec_huffman_lookup(16#df, 16#e) -> {more, undefined, 16#fb}; +dec_huffman_lookup(16#df, 16#f) -> {ok, undefined, 16#fc}; +dec_huffman_lookup(16#e0, 16#0) -> {more, 16#fe, 16#01}; +dec_huffman_lookup(16#e0, 16#1) -> {ok, 16#fe, 16#16}; +dec_huffman_lookup(16#e0, 16#2) -> {ok, 16#02, 16#00}; +dec_huffman_lookup(16#e0, 16#3) -> {ok, 16#03, 16#00}; +dec_huffman_lookup(16#e0, 16#4) -> {ok, 16#04, 16#00}; +dec_huffman_lookup(16#e0, 16#5) -> {ok, 16#05, 16#00}; +dec_huffman_lookup(16#e0, 16#6) -> {ok, 16#06, 16#00}; +dec_huffman_lookup(16#e0, 16#7) -> {ok, 16#07, 16#00}; +dec_huffman_lookup(16#e0, 16#8) -> {ok, 16#08, 16#00}; +dec_huffman_lookup(16#e0, 16#9) -> {ok, 16#0b, 16#00}; +dec_huffman_lookup(16#e0, 16#a) -> {ok, 16#0c, 16#00}; +dec_huffman_lookup(16#e0, 16#b) -> {ok, 16#0e, 16#00}; +dec_huffman_lookup(16#e0, 16#c) -> {ok, 16#0f, 16#00}; +dec_huffman_lookup(16#e0, 16#d) -> {ok, 16#10, 16#00}; +dec_huffman_lookup(16#e0, 16#e) -> {ok, 16#11, 16#00}; +dec_huffman_lookup(16#e0, 16#f) -> {ok, 16#12, 16#00}; +dec_huffman_lookup(16#e1, 16#0) -> {more, 16#fe, 16#02}; +dec_huffman_lookup(16#e1, 16#1) -> {more, 16#fe, 16#09}; +dec_huffman_lookup(16#e1, 16#2) -> {more, 16#fe, 16#17}; +dec_huffman_lookup(16#e1, 16#3) -> {ok, 16#fe, 16#28}; +dec_huffman_lookup(16#e1, 16#4) -> {more, 16#02, 16#01}; +dec_huffman_lookup(16#e1, 16#5) -> {ok, 16#02, 16#16}; +dec_huffman_lookup(16#e1, 16#6) -> {more, 16#03, 16#01}; +dec_huffman_lookup(16#e1, 16#7) -> {ok, 16#03, 16#16}; +dec_huffman_lookup(16#e1, 16#8) -> {more, 16#04, 16#01}; +dec_huffman_lookup(16#e1, 16#9) -> {ok, 16#04, 16#16}; +dec_huffman_lookup(16#e1, 16#a) -> {more, 16#05, 16#01}; +dec_huffman_lookup(16#e1, 16#b) -> {ok, 16#05, 16#16}; +dec_huffman_lookup(16#e1, 16#c) -> {more, 16#06, 16#01}; +dec_huffman_lookup(16#e1, 16#d) -> {ok, 16#06, 16#16}; +dec_huffman_lookup(16#e1, 16#e) -> {more, 16#07, 16#01}; +dec_huffman_lookup(16#e1, 16#f) -> {ok, 16#07, 16#16}; +dec_huffman_lookup(16#e2, 16#0) -> {more, 16#fe, 16#03}; +dec_huffman_lookup(16#e2, 16#1) -> {more, 16#fe, 16#06}; +dec_huffman_lookup(16#e2, 16#2) -> {more, 16#fe, 16#0a}; +dec_huffman_lookup(16#e2, 16#3) -> {more, 16#fe, 16#0f}; +dec_huffman_lookup(16#e2, 16#4) -> {more, 16#fe, 16#18}; +dec_huffman_lookup(16#e2, 16#5) -> {more, 16#fe, 16#1f}; +dec_huffman_lookup(16#e2, 16#6) -> {more, 16#fe, 16#29}; +dec_huffman_lookup(16#e2, 16#7) -> {ok, 16#fe, 16#38}; +dec_huffman_lookup(16#e2, 16#8) -> {more, 16#02, 16#02}; +dec_huffman_lookup(16#e2, 16#9) -> {more, 16#02, 16#09}; +dec_huffman_lookup(16#e2, 16#a) -> {more, 16#02, 16#17}; +dec_huffman_lookup(16#e2, 16#b) -> {ok, 16#02, 16#28}; +dec_huffman_lookup(16#e2, 16#c) -> {more, 16#03, 16#02}; +dec_huffman_lookup(16#e2, 16#d) -> {more, 16#03, 16#09}; +dec_huffman_lookup(16#e2, 16#e) -> {more, 16#03, 16#17}; +dec_huffman_lookup(16#e2, 16#f) -> {ok, 16#03, 16#28}; +dec_huffman_lookup(16#e3, 16#0) -> {more, 16#02, 16#03}; +dec_huffman_lookup(16#e3, 16#1) -> {more, 16#02, 16#06}; +dec_huffman_lookup(16#e3, 16#2) -> {more, 16#02, 16#0a}; +dec_huffman_lookup(16#e3, 16#3) -> {more, 16#02, 16#0f}; +dec_huffman_lookup(16#e3, 16#4) -> {more, 16#02, 16#18}; +dec_huffman_lookup(16#e3, 16#5) -> {more, 16#02, 16#1f}; +dec_huffman_lookup(16#e3, 16#6) -> {more, 16#02, 16#29}; +dec_huffman_lookup(16#e3, 16#7) -> {ok, 16#02, 16#38}; +dec_huffman_lookup(16#e3, 16#8) -> {more, 16#03, 16#03}; +dec_huffman_lookup(16#e3, 16#9) -> {more, 16#03, 16#06}; +dec_huffman_lookup(16#e3, 16#a) -> {more, 16#03, 16#0a}; +dec_huffman_lookup(16#e3, 16#b) -> {more, 16#03, 16#0f}; +dec_huffman_lookup(16#e3, 16#c) -> {more, 16#03, 16#18}; +dec_huffman_lookup(16#e3, 16#d) -> {more, 16#03, 16#1f}; +dec_huffman_lookup(16#e3, 16#e) -> {more, 16#03, 16#29}; +dec_huffman_lookup(16#e3, 16#f) -> {ok, 16#03, 16#38}; +dec_huffman_lookup(16#e4, 16#0) -> {more, 16#04, 16#02}; +dec_huffman_lookup(16#e4, 16#1) -> {more, 16#04, 16#09}; +dec_huffman_lookup(16#e4, 16#2) -> {more, 16#04, 16#17}; +dec_huffman_lookup(16#e4, 16#3) -> {ok, 16#04, 16#28}; +dec_huffman_lookup(16#e4, 16#4) -> {more, 16#05, 16#02}; +dec_huffman_lookup(16#e4, 16#5) -> {more, 16#05, 16#09}; +dec_huffman_lookup(16#e4, 16#6) -> {more, 16#05, 16#17}; +dec_huffman_lookup(16#e4, 16#7) -> {ok, 16#05, 16#28}; +dec_huffman_lookup(16#e4, 16#8) -> {more, 16#06, 16#02}; +dec_huffman_lookup(16#e4, 16#9) -> {more, 16#06, 16#09}; +dec_huffman_lookup(16#e4, 16#a) -> {more, 16#06, 16#17}; +dec_huffman_lookup(16#e4, 16#b) -> {ok, 16#06, 16#28}; +dec_huffman_lookup(16#e4, 16#c) -> {more, 16#07, 16#02}; +dec_huffman_lookup(16#e4, 16#d) -> {more, 16#07, 16#09}; +dec_huffman_lookup(16#e4, 16#e) -> {more, 16#07, 16#17}; +dec_huffman_lookup(16#e4, 16#f) -> {ok, 16#07, 16#28}; +dec_huffman_lookup(16#e5, 16#0) -> {more, 16#04, 16#03}; +dec_huffman_lookup(16#e5, 16#1) -> {more, 16#04, 16#06}; +dec_huffman_lookup(16#e5, 16#2) -> {more, 16#04, 16#0a}; +dec_huffman_lookup(16#e5, 16#3) -> {more, 16#04, 16#0f}; +dec_huffman_lookup(16#e5, 16#4) -> {more, 16#04, 16#18}; +dec_huffman_lookup(16#e5, 16#5) -> {more, 16#04, 16#1f}; +dec_huffman_lookup(16#e5, 16#6) -> {more, 16#04, 16#29}; +dec_huffman_lookup(16#e5, 16#7) -> {ok, 16#04, 16#38}; +dec_huffman_lookup(16#e5, 16#8) -> {more, 16#05, 16#03}; +dec_huffman_lookup(16#e5, 16#9) -> {more, 16#05, 16#06}; +dec_huffman_lookup(16#e5, 16#a) -> {more, 16#05, 16#0a}; +dec_huffman_lookup(16#e5, 16#b) -> {more, 16#05, 16#0f}; +dec_huffman_lookup(16#e5, 16#c) -> {more, 16#05, 16#18}; +dec_huffman_lookup(16#e5, 16#d) -> {more, 16#05, 16#1f}; +dec_huffman_lookup(16#e5, 16#e) -> {more, 16#05, 16#29}; +dec_huffman_lookup(16#e5, 16#f) -> {ok, 16#05, 16#38}; +dec_huffman_lookup(16#e6, 16#0) -> {more, 16#06, 16#03}; +dec_huffman_lookup(16#e6, 16#1) -> {more, 16#06, 16#06}; +dec_huffman_lookup(16#e6, 16#2) -> {more, 16#06, 16#0a}; +dec_huffman_lookup(16#e6, 16#3) -> {more, 16#06, 16#0f}; +dec_huffman_lookup(16#e6, 16#4) -> {more, 16#06, 16#18}; +dec_huffman_lookup(16#e6, 16#5) -> {more, 16#06, 16#1f}; +dec_huffman_lookup(16#e6, 16#6) -> {more, 16#06, 16#29}; +dec_huffman_lookup(16#e6, 16#7) -> {ok, 16#06, 16#38}; +dec_huffman_lookup(16#e6, 16#8) -> {more, 16#07, 16#03}; +dec_huffman_lookup(16#e6, 16#9) -> {more, 16#07, 16#06}; +dec_huffman_lookup(16#e6, 16#a) -> {more, 16#07, 16#0a}; +dec_huffman_lookup(16#e6, 16#b) -> {more, 16#07, 16#0f}; +dec_huffman_lookup(16#e6, 16#c) -> {more, 16#07, 16#18}; +dec_huffman_lookup(16#e6, 16#d) -> {more, 16#07, 16#1f}; +dec_huffman_lookup(16#e6, 16#e) -> {more, 16#07, 16#29}; +dec_huffman_lookup(16#e6, 16#f) -> {ok, 16#07, 16#38}; +dec_huffman_lookup(16#e7, 16#0) -> {more, 16#08, 16#01}; +dec_huffman_lookup(16#e7, 16#1) -> {ok, 16#08, 16#16}; +dec_huffman_lookup(16#e7, 16#2) -> {more, 16#0b, 16#01}; +dec_huffman_lookup(16#e7, 16#3) -> {ok, 16#0b, 16#16}; +dec_huffman_lookup(16#e7, 16#4) -> {more, 16#0c, 16#01}; +dec_huffman_lookup(16#e7, 16#5) -> {ok, 16#0c, 16#16}; +dec_huffman_lookup(16#e7, 16#6) -> {more, 16#0e, 16#01}; +dec_huffman_lookup(16#e7, 16#7) -> {ok, 16#0e, 16#16}; +dec_huffman_lookup(16#e7, 16#8) -> {more, 16#0f, 16#01}; +dec_huffman_lookup(16#e7, 16#9) -> {ok, 16#0f, 16#16}; +dec_huffman_lookup(16#e7, 16#a) -> {more, 16#10, 16#01}; +dec_huffman_lookup(16#e7, 16#b) -> {ok, 16#10, 16#16}; +dec_huffman_lookup(16#e7, 16#c) -> {more, 16#11, 16#01}; +dec_huffman_lookup(16#e7, 16#d) -> {ok, 16#11, 16#16}; +dec_huffman_lookup(16#e7, 16#e) -> {more, 16#12, 16#01}; +dec_huffman_lookup(16#e7, 16#f) -> {ok, 16#12, 16#16}; +dec_huffman_lookup(16#e8, 16#0) -> {more, 16#08, 16#02}; +dec_huffman_lookup(16#e8, 16#1) -> {more, 16#08, 16#09}; +dec_huffman_lookup(16#e8, 16#2) -> {more, 16#08, 16#17}; +dec_huffman_lookup(16#e8, 16#3) -> {ok, 16#08, 16#28}; +dec_huffman_lookup(16#e8, 16#4) -> {more, 16#0b, 16#02}; +dec_huffman_lookup(16#e8, 16#5) -> {more, 16#0b, 16#09}; +dec_huffman_lookup(16#e8, 16#6) -> {more, 16#0b, 16#17}; +dec_huffman_lookup(16#e8, 16#7) -> {ok, 16#0b, 16#28}; +dec_huffman_lookup(16#e8, 16#8) -> {more, 16#0c, 16#02}; +dec_huffman_lookup(16#e8, 16#9) -> {more, 16#0c, 16#09}; +dec_huffman_lookup(16#e8, 16#a) -> {more, 16#0c, 16#17}; +dec_huffman_lookup(16#e8, 16#b) -> {ok, 16#0c, 16#28}; +dec_huffman_lookup(16#e8, 16#c) -> {more, 16#0e, 16#02}; +dec_huffman_lookup(16#e8, 16#d) -> {more, 16#0e, 16#09}; +dec_huffman_lookup(16#e8, 16#e) -> {more, 16#0e, 16#17}; +dec_huffman_lookup(16#e8, 16#f) -> {ok, 16#0e, 16#28}; +dec_huffman_lookup(16#e9, 16#0) -> {more, 16#08, 16#03}; +dec_huffman_lookup(16#e9, 16#1) -> {more, 16#08, 16#06}; +dec_huffman_lookup(16#e9, 16#2) -> {more, 16#08, 16#0a}; +dec_huffman_lookup(16#e9, 16#3) -> {more, 16#08, 16#0f}; +dec_huffman_lookup(16#e9, 16#4) -> {more, 16#08, 16#18}; +dec_huffman_lookup(16#e9, 16#5) -> {more, 16#08, 16#1f}; +dec_huffman_lookup(16#e9, 16#6) -> {more, 16#08, 16#29}; +dec_huffman_lookup(16#e9, 16#7) -> {ok, 16#08, 16#38}; +dec_huffman_lookup(16#e9, 16#8) -> {more, 16#0b, 16#03}; +dec_huffman_lookup(16#e9, 16#9) -> {more, 16#0b, 16#06}; +dec_huffman_lookup(16#e9, 16#a) -> {more, 16#0b, 16#0a}; +dec_huffman_lookup(16#e9, 16#b) -> {more, 16#0b, 16#0f}; +dec_huffman_lookup(16#e9, 16#c) -> {more, 16#0b, 16#18}; +dec_huffman_lookup(16#e9, 16#d) -> {more, 16#0b, 16#1f}; +dec_huffman_lookup(16#e9, 16#e) -> {more, 16#0b, 16#29}; +dec_huffman_lookup(16#e9, 16#f) -> {ok, 16#0b, 16#38}; +dec_huffman_lookup(16#ea, 16#0) -> {more, 16#0c, 16#03}; +dec_huffman_lookup(16#ea, 16#1) -> {more, 16#0c, 16#06}; +dec_huffman_lookup(16#ea, 16#2) -> {more, 16#0c, 16#0a}; +dec_huffman_lookup(16#ea, 16#3) -> {more, 16#0c, 16#0f}; +dec_huffman_lookup(16#ea, 16#4) -> {more, 16#0c, 16#18}; +dec_huffman_lookup(16#ea, 16#5) -> {more, 16#0c, 16#1f}; +dec_huffman_lookup(16#ea, 16#6) -> {more, 16#0c, 16#29}; +dec_huffman_lookup(16#ea, 16#7) -> {ok, 16#0c, 16#38}; +dec_huffman_lookup(16#ea, 16#8) -> {more, 16#0e, 16#03}; +dec_huffman_lookup(16#ea, 16#9) -> {more, 16#0e, 16#06}; +dec_huffman_lookup(16#ea, 16#a) -> {more, 16#0e, 16#0a}; +dec_huffman_lookup(16#ea, 16#b) -> {more, 16#0e, 16#0f}; +dec_huffman_lookup(16#ea, 16#c) -> {more, 16#0e, 16#18}; +dec_huffman_lookup(16#ea, 16#d) -> {more, 16#0e, 16#1f}; +dec_huffman_lookup(16#ea, 16#e) -> {more, 16#0e, 16#29}; +dec_huffman_lookup(16#ea, 16#f) -> {ok, 16#0e, 16#38}; +dec_huffman_lookup(16#eb, 16#0) -> {more, 16#0f, 16#02}; +dec_huffman_lookup(16#eb, 16#1) -> {more, 16#0f, 16#09}; +dec_huffman_lookup(16#eb, 16#2) -> {more, 16#0f, 16#17}; +dec_huffman_lookup(16#eb, 16#3) -> {ok, 16#0f, 16#28}; +dec_huffman_lookup(16#eb, 16#4) -> {more, 16#10, 16#02}; +dec_huffman_lookup(16#eb, 16#5) -> {more, 16#10, 16#09}; +dec_huffman_lookup(16#eb, 16#6) -> {more, 16#10, 16#17}; +dec_huffman_lookup(16#eb, 16#7) -> {ok, 16#10, 16#28}; +dec_huffman_lookup(16#eb, 16#8) -> {more, 16#11, 16#02}; +dec_huffman_lookup(16#eb, 16#9) -> {more, 16#11, 16#09}; +dec_huffman_lookup(16#eb, 16#a) -> {more, 16#11, 16#17}; +dec_huffman_lookup(16#eb, 16#b) -> {ok, 16#11, 16#28}; +dec_huffman_lookup(16#eb, 16#c) -> {more, 16#12, 16#02}; +dec_huffman_lookup(16#eb, 16#d) -> {more, 16#12, 16#09}; +dec_huffman_lookup(16#eb, 16#e) -> {more, 16#12, 16#17}; +dec_huffman_lookup(16#eb, 16#f) -> {ok, 16#12, 16#28}; +dec_huffman_lookup(16#ec, 16#0) -> {more, 16#0f, 16#03}; +dec_huffman_lookup(16#ec, 16#1) -> {more, 16#0f, 16#06}; +dec_huffman_lookup(16#ec, 16#2) -> {more, 16#0f, 16#0a}; +dec_huffman_lookup(16#ec, 16#3) -> {more, 16#0f, 16#0f}; +dec_huffman_lookup(16#ec, 16#4) -> {more, 16#0f, 16#18}; +dec_huffman_lookup(16#ec, 16#5) -> {more, 16#0f, 16#1f}; +dec_huffman_lookup(16#ec, 16#6) -> {more, 16#0f, 16#29}; +dec_huffman_lookup(16#ec, 16#7) -> {ok, 16#0f, 16#38}; +dec_huffman_lookup(16#ec, 16#8) -> {more, 16#10, 16#03}; +dec_huffman_lookup(16#ec, 16#9) -> {more, 16#10, 16#06}; +dec_huffman_lookup(16#ec, 16#a) -> {more, 16#10, 16#0a}; +dec_huffman_lookup(16#ec, 16#b) -> {more, 16#10, 16#0f}; +dec_huffman_lookup(16#ec, 16#c) -> {more, 16#10, 16#18}; +dec_huffman_lookup(16#ec, 16#d) -> {more, 16#10, 16#1f}; +dec_huffman_lookup(16#ec, 16#e) -> {more, 16#10, 16#29}; +dec_huffman_lookup(16#ec, 16#f) -> {ok, 16#10, 16#38}; +dec_huffman_lookup(16#ed, 16#0) -> {more, 16#11, 16#03}; +dec_huffman_lookup(16#ed, 16#1) -> {more, 16#11, 16#06}; +dec_huffman_lookup(16#ed, 16#2) -> {more, 16#11, 16#0a}; +dec_huffman_lookup(16#ed, 16#3) -> {more, 16#11, 16#0f}; +dec_huffman_lookup(16#ed, 16#4) -> {more, 16#11, 16#18}; +dec_huffman_lookup(16#ed, 16#5) -> {more, 16#11, 16#1f}; +dec_huffman_lookup(16#ed, 16#6) -> {more, 16#11, 16#29}; +dec_huffman_lookup(16#ed, 16#7) -> {ok, 16#11, 16#38}; +dec_huffman_lookup(16#ed, 16#8) -> {more, 16#12, 16#03}; +dec_huffman_lookup(16#ed, 16#9) -> {more, 16#12, 16#06}; +dec_huffman_lookup(16#ed, 16#a) -> {more, 16#12, 16#0a}; +dec_huffman_lookup(16#ed, 16#b) -> {more, 16#12, 16#0f}; +dec_huffman_lookup(16#ed, 16#c) -> {more, 16#12, 16#18}; +dec_huffman_lookup(16#ed, 16#d) -> {more, 16#12, 16#1f}; +dec_huffman_lookup(16#ed, 16#e) -> {more, 16#12, 16#29}; +dec_huffman_lookup(16#ed, 16#f) -> {ok, 16#12, 16#38}; +dec_huffman_lookup(16#ee, 16#0) -> {ok, 16#13, 16#00}; +dec_huffman_lookup(16#ee, 16#1) -> {ok, 16#14, 16#00}; +dec_huffman_lookup(16#ee, 16#2) -> {ok, 16#15, 16#00}; +dec_huffman_lookup(16#ee, 16#3) -> {ok, 16#17, 16#00}; +dec_huffman_lookup(16#ee, 16#4) -> {ok, 16#18, 16#00}; +dec_huffman_lookup(16#ee, 16#5) -> {ok, 16#19, 16#00}; +dec_huffman_lookup(16#ee, 16#6) -> {ok, 16#1a, 16#00}; +dec_huffman_lookup(16#ee, 16#7) -> {ok, 16#1b, 16#00}; +dec_huffman_lookup(16#ee, 16#8) -> {ok, 16#1c, 16#00}; +dec_huffman_lookup(16#ee, 16#9) -> {ok, 16#1d, 16#00}; +dec_huffman_lookup(16#ee, 16#a) -> {ok, 16#1e, 16#00}; +dec_huffman_lookup(16#ee, 16#b) -> {ok, 16#1f, 16#00}; +dec_huffman_lookup(16#ee, 16#c) -> {ok, 16#7f, 16#00}; +dec_huffman_lookup(16#ee, 16#d) -> {ok, 16#dc, 16#00}; +dec_huffman_lookup(16#ee, 16#e) -> {ok, 16#f9, 16#00}; +dec_huffman_lookup(16#ee, 16#f) -> {ok, undefined, 16#fd}; +dec_huffman_lookup(16#ef, 16#0) -> {more, 16#13, 16#01}; +dec_huffman_lookup(16#ef, 16#1) -> {ok, 16#13, 16#16}; +dec_huffman_lookup(16#ef, 16#2) -> {more, 16#14, 16#01}; +dec_huffman_lookup(16#ef, 16#3) -> {ok, 16#14, 16#16}; +dec_huffman_lookup(16#ef, 16#4) -> {more, 16#15, 16#01}; +dec_huffman_lookup(16#ef, 16#5) -> {ok, 16#15, 16#16}; +dec_huffman_lookup(16#ef, 16#6) -> {more, 16#17, 16#01}; +dec_huffman_lookup(16#ef, 16#7) -> {ok, 16#17, 16#16}; +dec_huffman_lookup(16#ef, 16#8) -> {more, 16#18, 16#01}; +dec_huffman_lookup(16#ef, 16#9) -> {ok, 16#18, 16#16}; +dec_huffman_lookup(16#ef, 16#a) -> {more, 16#19, 16#01}; +dec_huffman_lookup(16#ef, 16#b) -> {ok, 16#19, 16#16}; +dec_huffman_lookup(16#ef, 16#c) -> {more, 16#1a, 16#01}; +dec_huffman_lookup(16#ef, 16#d) -> {ok, 16#1a, 16#16}; +dec_huffman_lookup(16#ef, 16#e) -> {more, 16#1b, 16#01}; +dec_huffman_lookup(16#ef, 16#f) -> {ok, 16#1b, 16#16}; +dec_huffman_lookup(16#f0, 16#0) -> {more, 16#13, 16#02}; +dec_huffman_lookup(16#f0, 16#1) -> {more, 16#13, 16#09}; +dec_huffman_lookup(16#f0, 16#2) -> {more, 16#13, 16#17}; +dec_huffman_lookup(16#f0, 16#3) -> {ok, 16#13, 16#28}; +dec_huffman_lookup(16#f0, 16#4) -> {more, 16#14, 16#02}; +dec_huffman_lookup(16#f0, 16#5) -> {more, 16#14, 16#09}; +dec_huffman_lookup(16#f0, 16#6) -> {more, 16#14, 16#17}; +dec_huffman_lookup(16#f0, 16#7) -> {ok, 16#14, 16#28}; +dec_huffman_lookup(16#f0, 16#8) -> {more, 16#15, 16#02}; +dec_huffman_lookup(16#f0, 16#9) -> {more, 16#15, 16#09}; +dec_huffman_lookup(16#f0, 16#a) -> {more, 16#15, 16#17}; +dec_huffman_lookup(16#f0, 16#b) -> {ok, 16#15, 16#28}; +dec_huffman_lookup(16#f0, 16#c) -> {more, 16#17, 16#02}; +dec_huffman_lookup(16#f0, 16#d) -> {more, 16#17, 16#09}; +dec_huffman_lookup(16#f0, 16#e) -> {more, 16#17, 16#17}; +dec_huffman_lookup(16#f0, 16#f) -> {ok, 16#17, 16#28}; +dec_huffman_lookup(16#f1, 16#0) -> {more, 16#13, 16#03}; +dec_huffman_lookup(16#f1, 16#1) -> {more, 16#13, 16#06}; +dec_huffman_lookup(16#f1, 16#2) -> {more, 16#13, 16#0a}; +dec_huffman_lookup(16#f1, 16#3) -> {more, 16#13, 16#0f}; +dec_huffman_lookup(16#f1, 16#4) -> {more, 16#13, 16#18}; +dec_huffman_lookup(16#f1, 16#5) -> {more, 16#13, 16#1f}; +dec_huffman_lookup(16#f1, 16#6) -> {more, 16#13, 16#29}; +dec_huffman_lookup(16#f1, 16#7) -> {ok, 16#13, 16#38}; +dec_huffman_lookup(16#f1, 16#8) -> {more, 16#14, 16#03}; +dec_huffman_lookup(16#f1, 16#9) -> {more, 16#14, 16#06}; +dec_huffman_lookup(16#f1, 16#a) -> {more, 16#14, 16#0a}; +dec_huffman_lookup(16#f1, 16#b) -> {more, 16#14, 16#0f}; +dec_huffman_lookup(16#f1, 16#c) -> {more, 16#14, 16#18}; +dec_huffman_lookup(16#f1, 16#d) -> {more, 16#14, 16#1f}; +dec_huffman_lookup(16#f1, 16#e) -> {more, 16#14, 16#29}; +dec_huffman_lookup(16#f1, 16#f) -> {ok, 16#14, 16#38}; +dec_huffman_lookup(16#f2, 16#0) -> {more, 16#15, 16#03}; +dec_huffman_lookup(16#f2, 16#1) -> {more, 16#15, 16#06}; +dec_huffman_lookup(16#f2, 16#2) -> {more, 16#15, 16#0a}; +dec_huffman_lookup(16#f2, 16#3) -> {more, 16#15, 16#0f}; +dec_huffman_lookup(16#f2, 16#4) -> {more, 16#15, 16#18}; +dec_huffman_lookup(16#f2, 16#5) -> {more, 16#15, 16#1f}; +dec_huffman_lookup(16#f2, 16#6) -> {more, 16#15, 16#29}; +dec_huffman_lookup(16#f2, 16#7) -> {ok, 16#15, 16#38}; +dec_huffman_lookup(16#f2, 16#8) -> {more, 16#17, 16#03}; +dec_huffman_lookup(16#f2, 16#9) -> {more, 16#17, 16#06}; +dec_huffman_lookup(16#f2, 16#a) -> {more, 16#17, 16#0a}; +dec_huffman_lookup(16#f2, 16#b) -> {more, 16#17, 16#0f}; +dec_huffman_lookup(16#f2, 16#c) -> {more, 16#17, 16#18}; +dec_huffman_lookup(16#f2, 16#d) -> {more, 16#17, 16#1f}; +dec_huffman_lookup(16#f2, 16#e) -> {more, 16#17, 16#29}; +dec_huffman_lookup(16#f2, 16#f) -> {ok, 16#17, 16#38}; +dec_huffman_lookup(16#f3, 16#0) -> {more, 16#18, 16#02}; +dec_huffman_lookup(16#f3, 16#1) -> {more, 16#18, 16#09}; +dec_huffman_lookup(16#f3, 16#2) -> {more, 16#18, 16#17}; +dec_huffman_lookup(16#f3, 16#3) -> {ok, 16#18, 16#28}; +dec_huffman_lookup(16#f3, 16#4) -> {more, 16#19, 16#02}; +dec_huffman_lookup(16#f3, 16#5) -> {more, 16#19, 16#09}; +dec_huffman_lookup(16#f3, 16#6) -> {more, 16#19, 16#17}; +dec_huffman_lookup(16#f3, 16#7) -> {ok, 16#19, 16#28}; +dec_huffman_lookup(16#f3, 16#8) -> {more, 16#1a, 16#02}; +dec_huffman_lookup(16#f3, 16#9) -> {more, 16#1a, 16#09}; +dec_huffman_lookup(16#f3, 16#a) -> {more, 16#1a, 16#17}; +dec_huffman_lookup(16#f3, 16#b) -> {ok, 16#1a, 16#28}; +dec_huffman_lookup(16#f3, 16#c) -> {more, 16#1b, 16#02}; +dec_huffman_lookup(16#f3, 16#d) -> {more, 16#1b, 16#09}; +dec_huffman_lookup(16#f3, 16#e) -> {more, 16#1b, 16#17}; +dec_huffman_lookup(16#f3, 16#f) -> {ok, 16#1b, 16#28}; +dec_huffman_lookup(16#f4, 16#0) -> {more, 16#18, 16#03}; +dec_huffman_lookup(16#f4, 16#1) -> {more, 16#18, 16#06}; +dec_huffman_lookup(16#f4, 16#2) -> {more, 16#18, 16#0a}; +dec_huffman_lookup(16#f4, 16#3) -> {more, 16#18, 16#0f}; +dec_huffman_lookup(16#f4, 16#4) -> {more, 16#18, 16#18}; +dec_huffman_lookup(16#f4, 16#5) -> {more, 16#18, 16#1f}; +dec_huffman_lookup(16#f4, 16#6) -> {more, 16#18, 16#29}; +dec_huffman_lookup(16#f4, 16#7) -> {ok, 16#18, 16#38}; +dec_huffman_lookup(16#f4, 16#8) -> {more, 16#19, 16#03}; +dec_huffman_lookup(16#f4, 16#9) -> {more, 16#19, 16#06}; +dec_huffman_lookup(16#f4, 16#a) -> {more, 16#19, 16#0a}; +dec_huffman_lookup(16#f4, 16#b) -> {more, 16#19, 16#0f}; +dec_huffman_lookup(16#f4, 16#c) -> {more, 16#19, 16#18}; +dec_huffman_lookup(16#f4, 16#d) -> {more, 16#19, 16#1f}; +dec_huffman_lookup(16#f4, 16#e) -> {more, 16#19, 16#29}; +dec_huffman_lookup(16#f4, 16#f) -> {ok, 16#19, 16#38}; +dec_huffman_lookup(16#f5, 16#0) -> {more, 16#1a, 16#03}; +dec_huffman_lookup(16#f5, 16#1) -> {more, 16#1a, 16#06}; +dec_huffman_lookup(16#f5, 16#2) -> {more, 16#1a, 16#0a}; +dec_huffman_lookup(16#f5, 16#3) -> {more, 16#1a, 16#0f}; +dec_huffman_lookup(16#f5, 16#4) -> {more, 16#1a, 16#18}; +dec_huffman_lookup(16#f5, 16#5) -> {more, 16#1a, 16#1f}; +dec_huffman_lookup(16#f5, 16#6) -> {more, 16#1a, 16#29}; +dec_huffman_lookup(16#f5, 16#7) -> {ok, 16#1a, 16#38}; +dec_huffman_lookup(16#f5, 16#8) -> {more, 16#1b, 16#03}; +dec_huffman_lookup(16#f5, 16#9) -> {more, 16#1b, 16#06}; +dec_huffman_lookup(16#f5, 16#a) -> {more, 16#1b, 16#0a}; +dec_huffman_lookup(16#f5, 16#b) -> {more, 16#1b, 16#0f}; +dec_huffman_lookup(16#f5, 16#c) -> {more, 16#1b, 16#18}; +dec_huffman_lookup(16#f5, 16#d) -> {more, 16#1b, 16#1f}; +dec_huffman_lookup(16#f5, 16#e) -> {more, 16#1b, 16#29}; +dec_huffman_lookup(16#f5, 16#f) -> {ok, 16#1b, 16#38}; +dec_huffman_lookup(16#f6, 16#0) -> {more, 16#1c, 16#01}; +dec_huffman_lookup(16#f6, 16#1) -> {ok, 16#1c, 16#16}; +dec_huffman_lookup(16#f6, 16#2) -> {more, 16#1d, 16#01}; +dec_huffman_lookup(16#f6, 16#3) -> {ok, 16#1d, 16#16}; +dec_huffman_lookup(16#f6, 16#4) -> {more, 16#1e, 16#01}; +dec_huffman_lookup(16#f6, 16#5) -> {ok, 16#1e, 16#16}; +dec_huffman_lookup(16#f6, 16#6) -> {more, 16#1f, 16#01}; +dec_huffman_lookup(16#f6, 16#7) -> {ok, 16#1f, 16#16}; +dec_huffman_lookup(16#f6, 16#8) -> {more, 16#7f, 16#01}; +dec_huffman_lookup(16#f6, 16#9) -> {ok, 16#7f, 16#16}; +dec_huffman_lookup(16#f6, 16#a) -> {more, 16#dc, 16#01}; +dec_huffman_lookup(16#f6, 16#b) -> {ok, 16#dc, 16#16}; +dec_huffman_lookup(16#f6, 16#c) -> {more, 16#f9, 16#01}; +dec_huffman_lookup(16#f6, 16#d) -> {ok, 16#f9, 16#16}; +dec_huffman_lookup(16#f6, 16#e) -> {more, undefined, 16#fe}; +dec_huffman_lookup(16#f6, 16#f) -> {ok, undefined, 16#ff}; +dec_huffman_lookup(16#f7, 16#0) -> {more, 16#1c, 16#02}; +dec_huffman_lookup(16#f7, 16#1) -> {more, 16#1c, 16#09}; +dec_huffman_lookup(16#f7, 16#2) -> {more, 16#1c, 16#17}; +dec_huffman_lookup(16#f7, 16#3) -> {ok, 16#1c, 16#28}; +dec_huffman_lookup(16#f7, 16#4) -> {more, 16#1d, 16#02}; +dec_huffman_lookup(16#f7, 16#5) -> {more, 16#1d, 16#09}; +dec_huffman_lookup(16#f7, 16#6) -> {more, 16#1d, 16#17}; +dec_huffman_lookup(16#f7, 16#7) -> {ok, 16#1d, 16#28}; +dec_huffman_lookup(16#f7, 16#8) -> {more, 16#1e, 16#02}; +dec_huffman_lookup(16#f7, 16#9) -> {more, 16#1e, 16#09}; +dec_huffman_lookup(16#f7, 16#a) -> {more, 16#1e, 16#17}; +dec_huffman_lookup(16#f7, 16#b) -> {ok, 16#1e, 16#28}; +dec_huffman_lookup(16#f7, 16#c) -> {more, 16#1f, 16#02}; +dec_huffman_lookup(16#f7, 16#d) -> {more, 16#1f, 16#09}; +dec_huffman_lookup(16#f7, 16#e) -> {more, 16#1f, 16#17}; +dec_huffman_lookup(16#f7, 16#f) -> {ok, 16#1f, 16#28}; +dec_huffman_lookup(16#f8, 16#0) -> {more, 16#1c, 16#03}; +dec_huffman_lookup(16#f8, 16#1) -> {more, 16#1c, 16#06}; +dec_huffman_lookup(16#f8, 16#2) -> {more, 16#1c, 16#0a}; +dec_huffman_lookup(16#f8, 16#3) -> {more, 16#1c, 16#0f}; +dec_huffman_lookup(16#f8, 16#4) -> {more, 16#1c, 16#18}; +dec_huffman_lookup(16#f8, 16#5) -> {more, 16#1c, 16#1f}; +dec_huffman_lookup(16#f8, 16#6) -> {more, 16#1c, 16#29}; +dec_huffman_lookup(16#f8, 16#7) -> {ok, 16#1c, 16#38}; +dec_huffman_lookup(16#f8, 16#8) -> {more, 16#1d, 16#03}; +dec_huffman_lookup(16#f8, 16#9) -> {more, 16#1d, 16#06}; +dec_huffman_lookup(16#f8, 16#a) -> {more, 16#1d, 16#0a}; +dec_huffman_lookup(16#f8, 16#b) -> {more, 16#1d, 16#0f}; +dec_huffman_lookup(16#f8, 16#c) -> {more, 16#1d, 16#18}; +dec_huffman_lookup(16#f8, 16#d) -> {more, 16#1d, 16#1f}; +dec_huffman_lookup(16#f8, 16#e) -> {more, 16#1d, 16#29}; +dec_huffman_lookup(16#f8, 16#f) -> {ok, 16#1d, 16#38}; +dec_huffman_lookup(16#f9, 16#0) -> {more, 16#1e, 16#03}; +dec_huffman_lookup(16#f9, 16#1) -> {more, 16#1e, 16#06}; +dec_huffman_lookup(16#f9, 16#2) -> {more, 16#1e, 16#0a}; +dec_huffman_lookup(16#f9, 16#3) -> {more, 16#1e, 16#0f}; +dec_huffman_lookup(16#f9, 16#4) -> {more, 16#1e, 16#18}; +dec_huffman_lookup(16#f9, 16#5) -> {more, 16#1e, 16#1f}; +dec_huffman_lookup(16#f9, 16#6) -> {more, 16#1e, 16#29}; +dec_huffman_lookup(16#f9, 16#7) -> {ok, 16#1e, 16#38}; +dec_huffman_lookup(16#f9, 16#8) -> {more, 16#1f, 16#03}; +dec_huffman_lookup(16#f9, 16#9) -> {more, 16#1f, 16#06}; +dec_huffman_lookup(16#f9, 16#a) -> {more, 16#1f, 16#0a}; +dec_huffman_lookup(16#f9, 16#b) -> {more, 16#1f, 16#0f}; +dec_huffman_lookup(16#f9, 16#c) -> {more, 16#1f, 16#18}; +dec_huffman_lookup(16#f9, 16#d) -> {more, 16#1f, 16#1f}; +dec_huffman_lookup(16#f9, 16#e) -> {more, 16#1f, 16#29}; +dec_huffman_lookup(16#f9, 16#f) -> {ok, 16#1f, 16#38}; +dec_huffman_lookup(16#fa, 16#0) -> {more, 16#7f, 16#02}; +dec_huffman_lookup(16#fa, 16#1) -> {more, 16#7f, 16#09}; +dec_huffman_lookup(16#fa, 16#2) -> {more, 16#7f, 16#17}; +dec_huffman_lookup(16#fa, 16#3) -> {ok, 16#7f, 16#28}; +dec_huffman_lookup(16#fa, 16#4) -> {more, 16#dc, 16#02}; +dec_huffman_lookup(16#fa, 16#5) -> {more, 16#dc, 16#09}; +dec_huffman_lookup(16#fa, 16#6) -> {more, 16#dc, 16#17}; +dec_huffman_lookup(16#fa, 16#7) -> {ok, 16#dc, 16#28}; +dec_huffman_lookup(16#fa, 16#8) -> {more, 16#f9, 16#02}; +dec_huffman_lookup(16#fa, 16#9) -> {more, 16#f9, 16#09}; +dec_huffman_lookup(16#fa, 16#a) -> {more, 16#f9, 16#17}; +dec_huffman_lookup(16#fa, 16#b) -> {ok, 16#f9, 16#28}; +dec_huffman_lookup(16#fa, 16#c) -> {ok, 16#0a, 16#00}; +dec_huffman_lookup(16#fa, 16#d) -> {ok, 16#0d, 16#00}; +dec_huffman_lookup(16#fa, 16#e) -> {ok, 16#16, 16#00}; +dec_huffman_lookup(16#fa, 16#f) -> error; +dec_huffman_lookup(16#fb, 16#0) -> {more, 16#7f, 16#03}; +dec_huffman_lookup(16#fb, 16#1) -> {more, 16#7f, 16#06}; +dec_huffman_lookup(16#fb, 16#2) -> {more, 16#7f, 16#0a}; +dec_huffman_lookup(16#fb, 16#3) -> {more, 16#7f, 16#0f}; +dec_huffman_lookup(16#fb, 16#4) -> {more, 16#7f, 16#18}; +dec_huffman_lookup(16#fb, 16#5) -> {more, 16#7f, 16#1f}; +dec_huffman_lookup(16#fb, 16#6) -> {more, 16#7f, 16#29}; +dec_huffman_lookup(16#fb, 16#7) -> {ok, 16#7f, 16#38}; +dec_huffman_lookup(16#fb, 16#8) -> {more, 16#dc, 16#03}; +dec_huffman_lookup(16#fb, 16#9) -> {more, 16#dc, 16#06}; +dec_huffman_lookup(16#fb, 16#a) -> {more, 16#dc, 16#0a}; +dec_huffman_lookup(16#fb, 16#b) -> {more, 16#dc, 16#0f}; +dec_huffman_lookup(16#fb, 16#c) -> {more, 16#dc, 16#18}; +dec_huffman_lookup(16#fb, 16#d) -> {more, 16#dc, 16#1f}; +dec_huffman_lookup(16#fb, 16#e) -> {more, 16#dc, 16#29}; +dec_huffman_lookup(16#fb, 16#f) -> {ok, 16#dc, 16#38}; +dec_huffman_lookup(16#fc, 16#0) -> {more, 16#f9, 16#03}; +dec_huffman_lookup(16#fc, 16#1) -> {more, 16#f9, 16#06}; +dec_huffman_lookup(16#fc, 16#2) -> {more, 16#f9, 16#0a}; +dec_huffman_lookup(16#fc, 16#3) -> {more, 16#f9, 16#0f}; +dec_huffman_lookup(16#fc, 16#4) -> {more, 16#f9, 16#18}; +dec_huffman_lookup(16#fc, 16#5) -> {more, 16#f9, 16#1f}; +dec_huffman_lookup(16#fc, 16#6) -> {more, 16#f9, 16#29}; +dec_huffman_lookup(16#fc, 16#7) -> {ok, 16#f9, 16#38}; +dec_huffman_lookup(16#fc, 16#8) -> {more, 16#0a, 16#01}; +dec_huffman_lookup(16#fc, 16#9) -> {ok, 16#0a, 16#16}; +dec_huffman_lookup(16#fc, 16#a) -> {more, 16#0d, 16#01}; +dec_huffman_lookup(16#fc, 16#b) -> {ok, 16#0d, 16#16}; +dec_huffman_lookup(16#fc, 16#c) -> {more, 16#16, 16#01}; +dec_huffman_lookup(16#fc, 16#d) -> {ok, 16#16, 16#16}; +dec_huffman_lookup(16#fc, 16#e) -> error; +dec_huffman_lookup(16#fc, 16#f) -> error; +dec_huffman_lookup(16#fd, 16#0) -> {more, 16#0a, 16#02}; +dec_huffman_lookup(16#fd, 16#1) -> {more, 16#0a, 16#09}; +dec_huffman_lookup(16#fd, 16#2) -> {more, 16#0a, 16#17}; +dec_huffman_lookup(16#fd, 16#3) -> {ok, 16#0a, 16#28}; +dec_huffman_lookup(16#fd, 16#4) -> {more, 16#0d, 16#02}; +dec_huffman_lookup(16#fd, 16#5) -> {more, 16#0d, 16#09}; +dec_huffman_lookup(16#fd, 16#6) -> {more, 16#0d, 16#17}; +dec_huffman_lookup(16#fd, 16#7) -> {ok, 16#0d, 16#28}; +dec_huffman_lookup(16#fd, 16#8) -> {more, 16#16, 16#02}; +dec_huffman_lookup(16#fd, 16#9) -> {more, 16#16, 16#09}; +dec_huffman_lookup(16#fd, 16#a) -> {more, 16#16, 16#17}; +dec_huffman_lookup(16#fd, 16#b) -> {ok, 16#16, 16#28}; +dec_huffman_lookup(16#fd, 16#c) -> error; +dec_huffman_lookup(16#fd, 16#d) -> error; +dec_huffman_lookup(16#fd, 16#e) -> error; +dec_huffman_lookup(16#fd, 16#f) -> error; +dec_huffman_lookup(16#fe, 16#0) -> {more, 16#0a, 16#03}; +dec_huffman_lookup(16#fe, 16#1) -> {more, 16#0a, 16#06}; +dec_huffman_lookup(16#fe, 16#2) -> {more, 16#0a, 16#0a}; +dec_huffman_lookup(16#fe, 16#3) -> {more, 16#0a, 16#0f}; +dec_huffman_lookup(16#fe, 16#4) -> {more, 16#0a, 16#18}; +dec_huffman_lookup(16#fe, 16#5) -> {more, 16#0a, 16#1f}; +dec_huffman_lookup(16#fe, 16#6) -> {more, 16#0a, 16#29}; +dec_huffman_lookup(16#fe, 16#7) -> {ok, 16#0a, 16#38}; +dec_huffman_lookup(16#fe, 16#8) -> {more, 16#0d, 16#03}; +dec_huffman_lookup(16#fe, 16#9) -> {more, 16#0d, 16#06}; +dec_huffman_lookup(16#fe, 16#a) -> {more, 16#0d, 16#0a}; +dec_huffman_lookup(16#fe, 16#b) -> {more, 16#0d, 16#0f}; +dec_huffman_lookup(16#fe, 16#c) -> {more, 16#0d, 16#18}; +dec_huffman_lookup(16#fe, 16#d) -> {more, 16#0d, 16#1f}; +dec_huffman_lookup(16#fe, 16#e) -> {more, 16#0d, 16#29}; +dec_huffman_lookup(16#fe, 16#f) -> {ok, 16#0d, 16#38}; +dec_huffman_lookup(16#ff, 16#0) -> {more, 16#16, 16#03}; +dec_huffman_lookup(16#ff, 16#1) -> {more, 16#16, 16#06}; +dec_huffman_lookup(16#ff, 16#2) -> {more, 16#16, 16#0a}; +dec_huffman_lookup(16#ff, 16#3) -> {more, 16#16, 16#0f}; +dec_huffman_lookup(16#ff, 16#4) -> {more, 16#16, 16#18}; +dec_huffman_lookup(16#ff, 16#5) -> {more, 16#16, 16#1f}; +dec_huffman_lookup(16#ff, 16#6) -> {more, 16#16, 16#29}; +dec_huffman_lookup(16#ff, 16#7) -> {ok, 16#16, 16#38}; +dec_huffman_lookup(16#ff, 16#8) -> error; +dec_huffman_lookup(16#ff, 16#9) -> error; +dec_huffman_lookup(16#ff, 16#a) -> error; +dec_huffman_lookup(16#ff, 16#b) -> error; +dec_huffman_lookup(16#ff, 16#c) -> error; +dec_huffman_lookup(16#ff, 16#d) -> error; +dec_huffman_lookup(16#ff, 16#e) -> error; +dec_huffman_lookup(16#ff, 16#f) -> error. diff --git a/src/wsLib/cow_http.erl b/src/wsLib/cow_http.erl new file mode 100644 index 0000000..bfaace3 --- /dev/null +++ b/src/wsLib/cow_http.erl @@ -0,0 +1,426 @@ +%% Copyright (c) 2013-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http). + +-export([parse_request_line/1]). +-export([parse_status_line/1]). +-export([status_to_integer/1]). +-export([parse_headers/1]). + +-export([parse_fullpath/1]). +-export([parse_version/1]). + +-export([request/4]). +-export([response/3]). +-export([headers/1]). +-export([version/1]). + +-type version() :: 'HTTP/1.0' | 'HTTP/1.1'. +-export_type([version/0]). + +-type status() :: 100..999. +-export_type([status/0]). + +-type headers() :: [{binary(), iodata()}]. +-export_type([headers/0]). + +-include("cow_inline.hrl"). + +%% @doc Parse the request line. + +-spec parse_request_line(binary()) -> {binary(), binary(), version(), binary()}. +parse_request_line(Data) -> + {Pos, _} = binary:match(Data, <<"\r">>), + <> = Data, + [Method, Target, Version0] = binary:split(RequestLine, <<$\s>>, [trim_all, global]), + Version = case Version0 of + <<"HTTP/1.1">> -> 'HTTP/1.1'; + <<"HTTP/1.0">> -> 'HTTP/1.0' + end, + {Method, Target, Version, Rest}. + +-ifdef(TEST). +parse_request_line_test_() -> + Tests = [ + {<<"GET /path HTTP/1.0\r\nRest">>, + {<<"GET">>, <<"/path">>, 'HTTP/1.0', <<"Rest">>}}, + {<<"GET /path HTTP/1.1\r\nRest">>, + {<<"GET">>, <<"/path">>, 'HTTP/1.1', <<"Rest">>}}, + {<<"CONNECT proxy.example.org:1080 HTTP/1.1\r\nRest">>, + {<<"CONNECT">>, <<"proxy.example.org:1080">>, 'HTTP/1.1', <<"Rest">>}} + ], + [{V, fun() -> R = parse_request_line(V) end} + || {V, R} <- Tests]. + +parse_request_line_error_test_() -> + Tests = [ + <<>>, + <<"GET">>, + <<"GET /path\r\n">>, + <<"GET /path HTTP/1.1">>, + <<"GET /path HTTP/1.1\r">>, + <<"GET /path HTTP/1.1\n">>, + <<"GET /path HTTP/0.9\r\n">>, + <<"content-type: text/plain\r\n">>, + <<0:80, "\r\n">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_request_line(V)) end} + || V <- Tests]. + +horse_parse_request_line_get_path() -> + horse:repeat(200000, + parse_request_line(<<"GET /path HTTP/1.1\r\n">>) + ). +-endif. + +%% @doc Parse the status line. + +-spec parse_status_line(binary()) -> {version(), status(), binary(), binary()}. +parse_status_line(<< "HTTP/1.1 200 OK\r\n", Rest/bits >>) -> + {'HTTP/1.1', 200, <<"OK">>, Rest}; +parse_status_line(<< "HTTP/1.1 404 Not Found\r\n", Rest/bits >>) -> + {'HTTP/1.1', 404, <<"Not Found">>, Rest}; +parse_status_line(<< "HTTP/1.1 500 Internal Server Error\r\n", Rest/bits >>) -> + {'HTTP/1.1', 500, <<"Internal Server Error">>, Rest}; +parse_status_line(<< "HTTP/1.1 ", Status/bits >>) -> + parse_status_line(Status, 'HTTP/1.1'); +parse_status_line(<< "HTTP/1.0 ", Status/bits >>) -> + parse_status_line(Status, 'HTTP/1.0'). + +parse_status_line(<>, Version) -> + Status = status_to_integer(H, T, U), + {Pos, _} = binary:match(Rest, <<"\r">>), + << StatusStr:Pos/binary, "\r\n", Rest2/bits >> = Rest, + {Version, Status, StatusStr, Rest2}. + +-spec status_to_integer(status() | binary()) -> status(). +status_to_integer(Status) when is_integer(Status) -> + Status; +status_to_integer(Status) -> + case Status of + <> -> + status_to_integer(H, T, U); + <> -> + status_to_integer(H, T, U) + end. + +status_to_integer(H, T, U) + when $0 =< H, H =< $9, $0 =< T, T =< $9, $0 =< U, U =< $9 -> + (H - $0) * 100 + (T - $0) * 10 + (U - $0). + +-ifdef(TEST). +parse_status_line_test_() -> + Tests = [ + {<<"HTTP/1.1 200 OK\r\nRest">>, + {'HTTP/1.1', 200, <<"OK">>, <<"Rest">>}}, + {<<"HTTP/1.0 404 Not Found\r\nRest">>, + {'HTTP/1.0', 404, <<"Not Found">>, <<"Rest">>}}, + {<<"HTTP/1.1 500 Something very funny here\r\nRest">>, + {'HTTP/1.1', 500, <<"Something very funny here">>, <<"Rest">>}}, + {<<"HTTP/1.1 200 \r\nRest">>, + {'HTTP/1.1', 200, <<>>, <<"Rest">>}} + ], + [{V, fun() -> R = parse_status_line(V) end} + || {V, R} <- Tests]. + +parse_status_line_error_test_() -> + Tests = [ + <<>>, + <<"HTTP/1.1">>, + <<"HTTP/1.1 200\r\n">>, + <<"HTTP/1.1 200 OK">>, + <<"HTTP/1.1 200 OK\r">>, + <<"HTTP/1.1 200 OK\n">>, + <<"HTTP/0.9 200 OK\r\n">>, + <<"HTTP/1.1 42 Answer\r\n">>, + <<"HTTP/1.1 999999999 More than OK\r\n">>, + <<"content-type: text/plain\r\n">>, + <<0:80, "\r\n">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_status_line(V)) end} + || V <- Tests]. + +horse_parse_status_line_200() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 200 OK\r\n">>) + ). + +horse_parse_status_line_404() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 404 Not Found\r\n">>) + ). + +horse_parse_status_line_500() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 500 Internal Server Error\r\n">>) + ). + +horse_parse_status_line_other() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 416 Requested range not satisfiable\r\n">>) + ). +-endif. + +%% @doc Parse the list of headers. + +-spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}. +parse_headers(Data) -> + parse_header(Data, []). + +parse_header(<< $\r, $\n, Rest/bits >>, Acc) -> + {lists:reverse(Acc), Rest}; +parse_header(Data, Acc) -> + parse_hd_name(Data, Acc, <<>>). + +parse_hd_name(<< C, Rest/bits >>, Acc, SoFar) -> + case C of + $: -> parse_hd_before_value(Rest, Acc, SoFar); + $\s -> parse_hd_name_ws(Rest, Acc, SoFar); + $\t -> parse_hd_name_ws(Rest, Acc, SoFar); + _ -> ?LOWER(parse_hd_name, Rest, Acc, SoFar) + end. + +parse_hd_name_ws(<< C, Rest/bits >>, Acc, Name) -> + case C of + $: -> parse_hd_before_value(Rest, Acc, Name); + $\s -> parse_hd_name_ws(Rest, Acc, Name); + $\t -> parse_hd_name_ws(Rest, Acc, Name) + end. + +parse_hd_before_value(<< $\s, Rest/bits >>, Acc, Name) -> + parse_hd_before_value(Rest, Acc, Name); +parse_hd_before_value(<< $\t, Rest/bits >>, Acc, Name) -> + parse_hd_before_value(Rest, Acc, Name); +parse_hd_before_value(Data, Acc, Name) -> + parse_hd_value(Data, Acc, Name, <<>>). + +parse_hd_value(<< $\r, Rest/bits >>, Acc, Name, SoFar) -> + case Rest of + << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t -> + parse_hd_value(Rest2, Acc, Name, << SoFar/binary, C >>); + << $\n, Rest2/bits >> -> + Value = clean_value_ws_end(SoFar, byte_size(SoFar) - 1), + parse_header(Rest2, [{Name, Value}|Acc]) + end; +parse_hd_value(<< C, Rest/bits >>, Acc, Name, SoFar) -> + parse_hd_value(Rest, Acc, Name, << SoFar/binary, C >>). + +%% This function has been copied from cowboy_http. +clean_value_ws_end(_, -1) -> + <<>>; +clean_value_ws_end(Value, N) -> + case binary:at(Value, N) of + $\s -> clean_value_ws_end(Value, N - 1); + $\t -> clean_value_ws_end(Value, N - 1); + _ -> + S = N + 1, + << Value2:S/binary, _/bits >> = Value, + Value2 + end. + +-ifdef(TEST). +parse_headers_test_() -> + Tests = [ + {<<"\r\nRest">>, + {[], <<"Rest">>}}, + {<<"Server: Erlang/R17 \r\n\r\n">>, + {[{<<"server">>, <<"Erlang/R17">>}], <<>>}}, + {<<"Server: Erlang/R17\r\n" + "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n" + "Multiline-Header: why hello!\r\n" + " I didn't see you all the way over there!\r\n" + "Content-Length: 12\r\n" + "Content-Type: text/plain\r\n" + "\r\nRest">>, + {[{<<"server">>, <<"Erlang/R17">>}, + {<<"date">>, <<"Sun, 23 Feb 2014 09:30:39 GMT">>}, + {<<"multiline-header">>, + <<"why hello! I didn't see you all the way over there!">>}, + {<<"content-length">>, <<"12">>}, + {<<"content-type">>, <<"text/plain">>}], + <<"Rest">>}} + ], + [{V, fun() -> R = parse_headers(V) end} + || {V, R} <- Tests]. + +parse_headers_error_test_() -> + Tests = [ + <<>>, + <<"\r">>, + <<"Malformed\r\n\r\n">>, + <<"content-type: text/plain\r\nMalformed\r\n\r\n">>, + <<"HTTP/1.1 200 OK\r\n\r\n">>, + <<0:80, "\r\n\r\n">>, + <<"content-type: text/plain\r\ncontent-length: 12\r\n">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_headers(V)) end} + || V <- Tests]. + +horse_parse_headers() -> + horse:repeat(50000, + parse_headers(<<"Server: Erlang/R17\r\n" + "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n" + "Multiline-Header: why hello!\r\n" + " I didn't see you all the way over there!\r\n" + "Content-Length: 12\r\n" + "Content-Type: text/plain\r\n" + "\r\nRest">>) + ). +-endif. + +%% @doc Extract path and query string from a binary, +%% removing any fragment component. + +-spec parse_fullpath(binary()) -> {binary(), binary()}. +parse_fullpath(Fullpath) -> + parse_fullpath(Fullpath, <<>>). + +parse_fullpath(<<>>, Path) -> {Path, <<>>}; +parse_fullpath(<< $#, _/bits >>, Path) -> {Path, <<>>}; +parse_fullpath(<< $?, Qs/bits >>, Path) -> parse_fullpath_query(Qs, Path, <<>>); +parse_fullpath(<< C, Rest/bits >>, SoFar) -> parse_fullpath(Rest, << SoFar/binary, C >>). + +parse_fullpath_query(<<>>, Path, Query) -> {Path, Query}; +parse_fullpath_query(<< $#, _/bits >>, Path, Query) -> {Path, Query}; +parse_fullpath_query(<< C, Rest/bits >>, Path, SoFar) -> + parse_fullpath_query(Rest, Path, << SoFar/binary, C >>). + +-ifdef(TEST). +parse_fullpath_test() -> + {<<"*">>, <<>>} = parse_fullpath(<<"*">>), + {<<"/">>, <<>>} = parse_fullpath(<<"/">>), + {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource#fragment">>), + {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource">>), + {<<"/">>, <<>>} = parse_fullpath(<<"/?">>), + {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy#fragment">>), + {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy">>), + {<<"/path/to/resource">>, <<"q=cowboy">>} + = parse_fullpath(<<"/path/to/resource?q=cowboy">>), + ok. +-endif. + +%% @doc Convert an HTTP version to atom. + +-spec parse_version(binary()) -> version(). +parse_version(<<"HTTP/1.1">>) -> 'HTTP/1.1'; +parse_version(<<"HTTP/1.0">>) -> 'HTTP/1.0'. + +-ifdef(TEST). +parse_version_test() -> + 'HTTP/1.1' = parse_version(<<"HTTP/1.1">>), + 'HTTP/1.0' = parse_version(<<"HTTP/1.0">>), + {'EXIT', _} = (catch parse_version(<<"HTTP/1.2">>)), + ok. +-endif. + +%% @doc Return formatted request-line and headers. +%% @todo Add tests when the corresponding reverse functions are added. + +-spec request(binary(), iodata(), version(), headers()) -> iodata(). +request(Method, Path, Version, Headers) -> + [Method, <<" ">>, Path, <<" ">>, version(Version), <<"\r\n">>, + [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers], + <<"\r\n">>]. + +-spec response(status() | binary(), version(), headers()) -> iodata(). +response(Status, Version, Headers) -> + [version(Version), <<" ">>, status(Status), <<"\r\n">>, + headers(Headers), <<"\r\n">>]. + +-spec headers(headers()) -> iodata(). +headers(Headers) -> + [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers]. + +%% @doc Return the version as a binary. + +-spec version(version()) -> binary(). +version('HTTP/1.1') -> <<"HTTP/1.1">>; +version('HTTP/1.0') -> <<"HTTP/1.0">>. + +-ifdef(TEST). +version_test() -> + <<"HTTP/1.1">> = version('HTTP/1.1'), + <<"HTTP/1.0">> = version('HTTP/1.0'), + {'EXIT', _} = (catch version('HTTP/1.2')), + ok. +-endif. + +%% @doc Return the status code and string as binary. + +-spec status(status() | binary()) -> binary(). +status(100) -> <<"100 Continue">>; +status(101) -> <<"101 Switching Protocols">>; +status(102) -> <<"102 Processing">>; +status(103) -> <<"103 Early Hints">>; +status(200) -> <<"200 OK">>; +status(201) -> <<"201 Created">>; +status(202) -> <<"202 Accepted">>; +status(203) -> <<"203 Non-Authoritative Information">>; +status(204) -> <<"204 No Content">>; +status(205) -> <<"205 Reset Content">>; +status(206) -> <<"206 Partial Content">>; +status(207) -> <<"207 Multi-Status">>; +status(208) -> <<"208 Already Reported">>; +status(226) -> <<"226 IM Used">>; +status(300) -> <<"300 Multiple Choices">>; +status(301) -> <<"301 Moved Permanently">>; +status(302) -> <<"302 Found">>; +status(303) -> <<"303 See Other">>; +status(304) -> <<"304 Not Modified">>; +status(305) -> <<"305 Use Proxy">>; +status(306) -> <<"306 Switch Proxy">>; +status(307) -> <<"307 Temporary Redirect">>; +status(308) -> <<"308 Permanent Redirect">>; +status(400) -> <<"400 Bad Request">>; +status(401) -> <<"401 Unauthorized">>; +status(402) -> <<"402 Payment Required">>; +status(403) -> <<"403 Forbidden">>; +status(404) -> <<"404 Not Found">>; +status(405) -> <<"405 Method Not Allowed">>; +status(406) -> <<"406 Not Acceptable">>; +status(407) -> <<"407 Proxy Authentication Required">>; +status(408) -> <<"408 Request Timeout">>; +status(409) -> <<"409 Conflict">>; +status(410) -> <<"410 Gone">>; +status(411) -> <<"411 Length Required">>; +status(412) -> <<"412 Precondition Failed">>; +status(413) -> <<"413 Request Entity Too Large">>; +status(414) -> <<"414 Request-URI Too Long">>; +status(415) -> <<"415 Unsupported Media Type">>; +status(416) -> <<"416 Requested Range Not Satisfiable">>; +status(417) -> <<"417 Expectation Failed">>; +status(418) -> <<"418 I'm a teapot">>; +status(421) -> <<"421 Misdirected Request">>; +status(422) -> <<"422 Unprocessable Entity">>; +status(423) -> <<"423 Locked">>; +status(424) -> <<"424 Failed Dependency">>; +status(425) -> <<"425 Unordered Collection">>; +status(426) -> <<"426 Upgrade Required">>; +status(428) -> <<"428 Precondition Required">>; +status(429) -> <<"429 Too Many Requests">>; +status(431) -> <<"431 Request Header Fields Too Large">>; +status(451) -> <<"451 Unavailable For Legal Reasons">>; +status(500) -> <<"500 Internal Server Error">>; +status(501) -> <<"501 Not Implemented">>; +status(502) -> <<"502 Bad Gateway">>; +status(503) -> <<"503 Service Unavailable">>; +status(504) -> <<"504 Gateway Timeout">>; +status(505) -> <<"505 HTTP Version Not Supported">>; +status(506) -> <<"506 Variant Also Negotiates">>; +status(507) -> <<"507 Insufficient Storage">>; +status(508) -> <<"508 Loop Detected">>; +status(510) -> <<"510 Not Extended">>; +status(511) -> <<"511 Network Authentication Required">>; +status(B) when is_binary(B) -> B. diff --git a/src/wsLib/cow_http2.erl b/src/wsLib/cow_http2.erl new file mode 100644 index 0000000..225d2ec --- /dev/null +++ b/src/wsLib/cow_http2.erl @@ -0,0 +1,483 @@ +%% Copyright (c) 2015-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http2). + +%% Parsing. +-export([parse_sequence/1]). +-export([parse/1]). +-export([parse/2]). +-export([parse_settings_payload/1]). + +%% Building. +-export([data/3]). +-export([data_header/3]). +-export([headers/3]). +-export([priority/4]). +-export([rst_stream/2]). +-export([settings/1]). +-export([settings_payload/1]). +-export([settings_ack/0]). +-export([push_promise/3]). +-export([ping/1]). +-export([ping_ack/1]). +-export([goaway/3]). +-export([window_update/1]). +-export([window_update/2]). + +-type streamid() :: pos_integer(). +-export_type([streamid/0]). + +-type fin() :: fin | nofin. +-export_type([fin/0]). + +-type head_fin() :: head_fin | head_nofin. +-export_type([head_fin/0]). + +-type exclusive() :: exclusive | shared. +-type weight() :: 1..256. +-type settings() :: map(). + +-type error() :: no_error + | protocol_error + | internal_error + | flow_control_error + | settings_timeout + | stream_closed + | frame_size_error + | refused_stream + | cancel + | compression_error + | connect_error + | enhance_your_calm + | inadequate_security + | http_1_1_required + | unknown_error. +-export_type([error/0]). + +-type frame() :: {data, streamid(), fin(), binary()} + | {headers, streamid(), fin(), head_fin(), binary()} + | {headers, streamid(), fin(), head_fin(), exclusive(), streamid(), weight(), binary()} + | {priority, streamid(), exclusive(), streamid(), weight()} + | {rst_stream, streamid(), error()} + | {settings, settings()} + | settings_ack + | {push_promise, streamid(), head_fin(), streamid(), binary()} + | {ping, integer()} + | {ping_ack, integer()} + | {goaway, streamid(), error(), binary()} + | {window_update, non_neg_integer()} + | {window_update, streamid(), non_neg_integer()} + | {continuation, streamid(), head_fin(), binary()}. +-export_type([frame/0]). + +%% Parsing. + +-spec parse_sequence(binary()) + -> {ok, binary()} | more | {connection_error, error(), atom()}. +parse_sequence(<<"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n", Rest/bits>>) -> + {ok, Rest}; +parse_sequence(Data) when byte_size(Data) >= 24 -> + {connection_error, protocol_error, + 'The connection preface was invalid. (RFC7540 3.5)'}; +parse_sequence(Data) -> + Len = byte_size(Data), + <> = <<"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n">>, + case Data of + Preface -> + more; + _ -> + {connection_error, protocol_error, + 'The connection preface was invalid. (RFC7540 3.5)'} + end. + +parse(<< Len:24, _/bits >>, MaxFrameSize) when Len > MaxFrameSize -> + {connection_error, frame_size_error, 'The frame size exceeded SETTINGS_MAX_FRAME_SIZE. (RFC7540 4.2)'}; +parse(Data, _) -> + parse(Data). + +%% +%% DATA frames. +%% +parse(<< _:24, 0:8, _:9, 0:31, _/bits >>) -> + {connection_error, protocol_error, 'DATA frames MUST be associated with a stream. (RFC7540 6.1)'}; +parse(<< 0:24, 0:8, _:4, 1:1, _:35, _/bits >>) -> + {connection_error, frame_size_error, 'DATA frames with padding flag MUST have a length > 0. (RFC7540 6.1)'}; +parse(<< Len0:24, 0:8, _:4, 1:1, _:35, PadLen:8, _/bits >>) when PadLen >= Len0 -> + {connection_error, protocol_error, 'Length of padding MUST be less than length of payload. (RFC7540 6.1)'}; +%% No padding. +parse(<< Len:24, 0:8, _:4, 0:1, _:2, FlagEndStream:1, _:1, StreamID:31, Data:Len/binary, Rest/bits >>) -> + {ok, {data, StreamID, parse_fin(FlagEndStream), Data}, Rest}; +%% Padding. +parse(<< Len0:24, 0:8, _:4, 1:1, _:2, FlagEndStream:1, _:1, StreamID:31, PadLen:8, Rest0/bits >>) + when byte_size(Rest0) >= Len0 - 1 -> + Len = Len0 - PadLen - 1, + case Rest0 of + << Data:Len/binary, 0:PadLen/unit:8, Rest/bits >> -> + {ok, {data, StreamID, parse_fin(FlagEndStream), Data}, Rest}; + _ -> + {connection_error, protocol_error, 'Padding octets MUST be set to zero. (RFC7540 6.1)'} + end; +%% +%% HEADERS frames. +%% +parse(<< _:24, 1:8, _:9, 0:31, _/bits >>) -> + {connection_error, protocol_error, 'HEADERS frames MUST be associated with a stream. (RFC7540 6.2)'}; +parse(<< 0:24, 1:8, _:4, 1:1, _:35, _/bits >>) -> + {connection_error, frame_size_error, 'HEADERS frames with padding flag MUST have a length > 0. (RFC7540 6.1)'}; +parse(<< Len:24, 1:8, _:2, 1:1, _:37, _/bits >>) when Len < 5 -> + {connection_error, frame_size_error, 'HEADERS frames with priority flag MUST have a length >= 5. (RFC7540 6.1)'}; +parse(<< Len:24, 1:8, _:2, 1:1, _:1, 1:1, _:35, _/bits >>) when Len < 6 -> + {connection_error, frame_size_error, 'HEADERS frames with padding and priority flags MUST have a length >= 6. (RFC7540 6.1)'}; +parse(<< Len0:24, 1:8, _:4, 1:1, _:35, PadLen:8, _/bits >>) when PadLen >= Len0 -> + {connection_error, protocol_error, 'Length of padding MUST be less than length of payload. (RFC7540 6.2)'}; +parse(<< Len0:24, 1:8, _:2, 1:1, _:1, 1:1, _:35, PadLen:8, _/bits >>) when PadLen >= Len0 - 5 -> + {connection_error, protocol_error, 'Length of padding MUST be less than length of payload. (RFC7540 6.2)'}; +%% No padding, no priority. +parse(<< Len:24, 1:8, _:2, 0:1, _:1, 0:1, FlagEndHeaders:1, _:1, FlagEndStream:1, _:1, StreamID:31, + HeaderBlockFragment:Len/binary, Rest/bits >>) -> + {ok, {headers, StreamID, parse_fin(FlagEndStream), parse_head_fin(FlagEndHeaders), HeaderBlockFragment}, Rest}; +%% Padding, no priority. +parse(<< Len0:24, 1:8, _:2, 0:1, _:1, 1:1, FlagEndHeaders:1, _:1, FlagEndStream:1, _:1, StreamID:31, + PadLen:8, Rest0/bits >>) when byte_size(Rest0) >= Len0 - 1 -> + Len = Len0 - PadLen - 1, + case Rest0 of + << HeaderBlockFragment:Len/binary, 0:PadLen/unit:8, Rest/bits >> -> + {ok, {headers, StreamID, parse_fin(FlagEndStream), parse_head_fin(FlagEndHeaders), HeaderBlockFragment}, Rest}; + _ -> + {connection_error, protocol_error, 'Padding octets MUST be set to zero. (RFC7540 6.2)'} + end; +%% No padding, priority. +parse(<< _:24, 1:8, _:2, 1:1, _:1, 0:1, _:4, StreamID:31, _:1, StreamID:31, _/bits >>) -> + {connection_error, protocol_error, + 'HEADERS frames cannot define a stream that depends on itself. (RFC7540 5.3.1)'}; +parse(<< Len0:24, 1:8, _:2, 1:1, _:1, 0:1, FlagEndHeaders:1, _:1, FlagEndStream:1, _:1, StreamID:31, + E:1, DepStreamID:31, Weight:8, Rest0/bits >>) when byte_size(Rest0) >= Len0 - 5 -> + Len = Len0 - 5, + << HeaderBlockFragment:Len/binary, Rest/bits >> = Rest0, + {ok, {headers, StreamID, parse_fin(FlagEndStream), parse_head_fin(FlagEndHeaders), + parse_exclusive(E), DepStreamID, Weight + 1, HeaderBlockFragment}, Rest}; +%% Padding, priority. +parse(<< _:24, 1:8, _:2, 1:1, _:1, 1:1, _:4, StreamID:31, _:9, StreamID:31, _/bits >>) -> + {connection_error, protocol_error, + 'HEADERS frames cannot define a stream that depends on itself. (RFC7540 5.3.1)'}; +parse(<< Len0:24, 1:8, _:2, 1:1, _:1, 1:1, FlagEndHeaders:1, _:1, FlagEndStream:1, _:1, StreamID:31, + PadLen:8, E:1, DepStreamID:31, Weight:8, Rest0/bits >>) when byte_size(Rest0) >= Len0 - 6 -> + Len = Len0 - PadLen - 6, + case Rest0 of + << HeaderBlockFragment:Len/binary, 0:PadLen/unit:8, Rest/bits >> -> + {ok, {headers, StreamID, parse_fin(FlagEndStream), parse_head_fin(FlagEndHeaders), + parse_exclusive(E), DepStreamID, Weight + 1, HeaderBlockFragment}, Rest}; + _ -> + {connection_error, protocol_error, 'Padding octets MUST be set to zero. (RFC7540 6.2)'} + end; +%% +%% PRIORITY frames. +%% +parse(<< 5:24, 2:8, _:9, 0:31, _/bits >>) -> + {connection_error, protocol_error, 'PRIORITY frames MUST be associated with a stream. (RFC7540 6.3)'}; +parse(<< 5:24, 2:8, _:9, StreamID:31, _:1, StreamID:31, _:8, Rest/bits >>) -> + {stream_error, StreamID, protocol_error, + 'PRIORITY frames cannot make a stream depend on itself. (RFC7540 5.3.1)', Rest}; +parse(<< 5:24, 2:8, _:9, StreamID:31, E:1, DepStreamID:31, Weight:8, Rest/bits >>) -> + {ok, {priority, StreamID, parse_exclusive(E), DepStreamID, Weight + 1}, Rest}; +%% @todo figure out how to best deal with frame size errors; if we have everything fine +%% if not we might want to inform the caller how much he should expect so that it can +%% decide if it should just close the connection +parse(<< BadLen:24, 2:8, _:9, StreamID:31, _:BadLen/binary, Rest/bits >>) -> + {stream_error, StreamID, frame_size_error, 'PRIORITY frames MUST be 5 bytes wide. (RFC7540 6.3)', Rest}; +%% +%% RST_STREAM frames. +%% +parse(<< 4:24, 3:8, _:9, 0:31, _/bits >>) -> + {connection_error, protocol_error, 'RST_STREAM frames MUST be associated with a stream. (RFC7540 6.4)'}; +parse(<< 4:24, 3:8, _:9, StreamID:31, ErrorCode:32, Rest/bits >>) -> + {ok, {rst_stream, StreamID, parse_error_code(ErrorCode)}, Rest}; +%% @todo same as priority +parse(<< _:24, 3:8, _:9, _:31, _/bits >>) -> + {connection_error, frame_size_error, 'RST_STREAM frames MUST be 4 bytes wide. (RFC7540 6.4)'}; +%% +%% SETTINGS frames. +%% +parse(<< 0:24, 4:8, _:7, 1:1, _:1, 0:31, Rest/bits >>) -> + {ok, settings_ack, Rest}; +parse(<< _:24, 4:8, _:7, 1:1, _:1, 0:31, _/bits >>) -> + {connection_error, frame_size_error, 'SETTINGS frames with the ACK flag set MUST have a length of 0. (RFC7540 6.5)'}; +parse(<< Len:24, 4:8, _:7, 0:1, _:1, 0:31, _/bits >>) when Len rem 6 =/= 0 -> + {connection_error, frame_size_error, 'SETTINGS frames MUST have a length multiple of 6. (RFC7540 6.5)'}; +parse(<< Len:24, 4:8, _:7, 0:1, _:1, 0:31, Rest/bits >>) when byte_size(Rest) >= Len -> + parse_settings_payload(Rest, Len, #{}); +parse(<< _:24, 4:8, _:8, _:1, StreamID:31, _/bits >>) when StreamID =/= 0 -> + {connection_error, protocol_error, 'SETTINGS frames MUST NOT be associated with a stream. (RFC7540 6.5)'}; +%% +%% PUSH_PROMISE frames. +%% +parse(<< Len:24, 5:8, _:40, _/bits >>) when Len < 4 -> + {connection_error, frame_size_error, 'PUSH_PROMISE frames MUST have a length >= 4. (RFC7540 4.2, RFC7540 6.6)'}; +parse(<< Len:24, 5:8, _:4, 1:1, _:35, _/bits >>) when Len < 5 -> + {connection_error, frame_size_error, 'PUSH_PROMISE frames with padding flag MUST have a length >= 5. (RFC7540 4.2, RFC7540 6.6)'}; +parse(<< _:24, 5:8, _:9, 0:31, _/bits >>) -> + {connection_error, protocol_error, 'PUSH_PROMISE frames MUST be associated with a stream. (RFC7540 6.6)'}; +parse(<< Len0:24, 5:8, _:4, 1:1, _:35, PadLen:8, _/bits >>) when PadLen >= Len0 - 4 -> + {connection_error, protocol_error, 'Length of padding MUST be less than length of payload. (RFC7540 6.6)'}; +parse(<< Len0:24, 5:8, _:4, 0:1, FlagEndHeaders:1, _:3, StreamID:31, _:1, PromisedStreamID:31, Rest0/bits >>) + when byte_size(Rest0) >= Len0 - 4 -> + Len = Len0 - 4, + << HeaderBlockFragment:Len/binary, Rest/bits >> = Rest0, + {ok, {push_promise, StreamID, parse_head_fin(FlagEndHeaders), PromisedStreamID, HeaderBlockFragment}, Rest}; +parse(<< Len0:24, 5:8, _:4, 1:1, FlagEndHeaders:1, _:2, StreamID:31, PadLen:8, _:1, PromisedStreamID:31, Rest0/bits >>) + when byte_size(Rest0) >= Len0 - 5 -> + Len = Len0 - 5, + case Rest0 of + << HeaderBlockFragment:Len/binary, 0:PadLen/unit:8, Rest/bits >> -> + {ok, {push_promise, StreamID, parse_head_fin(FlagEndHeaders), PromisedStreamID, HeaderBlockFragment}, Rest}; + _ -> + {connection_error, protocol_error, 'Padding octets MUST be set to zero. (RFC7540 6.6)'} + end; +%% +%% PING frames. +%% +parse(<< 8:24, 6:8, _:7, 1:1, _:1, 0:31, Opaque:64, Rest/bits >>) -> + {ok, {ping_ack, Opaque}, Rest}; +parse(<< 8:24, 6:8, _:7, 0:1, _:1, 0:31, Opaque:64, Rest/bits >>) -> + {ok, {ping, Opaque}, Rest}; +parse(<< 8:24, 6:8, _:104, _/bits >>) -> + {connection_error, protocol_error, 'PING frames MUST NOT be associated with a stream. (RFC7540 6.7)'}; +parse(<< Len:24, 6:8, _/bits >>) when Len =/= 8 -> + {connection_error, frame_size_error, 'PING frames MUST be 8 bytes wide. (RFC7540 6.7)'}; +%% +%% GOAWAY frames. +%% +parse(<< Len0:24, 7:8, _:9, 0:31, _:1, LastStreamID:31, ErrorCode:32, Rest0/bits >>) when byte_size(Rest0) >= Len0 - 8 -> + Len = Len0 - 8, + << DebugData:Len/binary, Rest/bits >> = Rest0, + {ok, {goaway, LastStreamID, parse_error_code(ErrorCode), DebugData}, Rest}; +parse(<< Len:24, 7:8, _:40, _/bits >>) when Len < 8 -> + {connection_error, frame_size_error, 'GOAWAY frames MUST have a length >= 8. (RFC7540 4.2, RFC7540 6.8)'}; +parse(<< _:24, 7:8, _:40, _/bits >>) -> + {connection_error, protocol_error, 'GOAWAY frames MUST NOT be associated with a stream. (RFC7540 6.8)'}; +%% +%% WINDOW_UPDATE frames. +%% +parse(<< 4:24, 8:8, _:9, 0:31, _:1, 0:31, _/bits >>) -> + {connection_error, protocol_error, 'WINDOW_UPDATE frames MUST have a non-zero increment. (RFC7540 6.9)'}; +parse(<< 4:24, 8:8, _:9, 0:31, _:1, Increment:31, Rest/bits >>) -> + {ok, {window_update, Increment}, Rest}; +parse(<< 4:24, 8:8, _:9, StreamID:31, _:1, 0:31, Rest/bits >>) -> + {stream_error, StreamID, protocol_error, 'WINDOW_UPDATE frames MUST have a non-zero increment. (RFC7540 6.9)', Rest}; +parse(<< 4:24, 8:8, _:9, StreamID:31, _:1, Increment:31, Rest/bits >>) -> + {ok, {window_update, StreamID, Increment}, Rest}; +parse(<< Len:24, 8:8, _/bits >>) when Len =/= 4-> + {connection_error, frame_size_error, 'WINDOW_UPDATE frames MUST be 4 bytes wide. (RFC7540 6.9)'}; +%% +%% CONTINUATION frames. +%% +parse(<< _:24, 9:8, _:9, 0:31, _/bits >>) -> + {connection_error, protocol_error, 'CONTINUATION frames MUST be associated with a stream. (RFC7540 6.10)'}; +parse(<< Len:24, 9:8, _:5, FlagEndHeaders:1, _:3, StreamID:31, HeaderBlockFragment:Len/binary, Rest/bits >>) -> + {ok, {continuation, StreamID, parse_head_fin(FlagEndHeaders), HeaderBlockFragment}, Rest}; +%% +%% Unknown frames are ignored. +%% +parse(<< Len:24, Type:8, _:40, _:Len/binary, Rest/bits >>) when Type > 9 -> + {ignore, Rest}; +%% +%% Incomplete frames. +%% +parse(_) -> + more. + +-ifdef(TEST). +parse_ping_test() -> + Ping = ping(1234567890), + _ = [more = parse(binary:part(Ping, 0, I)) || I <- lists:seq(1, byte_size(Ping) - 1)], + {ok, {ping, 1234567890}, <<>>} = parse(Ping), + {ok, {ping, 1234567890}, << 42 >>} = parse(<< Ping/binary, 42 >>), + ok. + +parse_windows_update_test() -> + WindowUpdate = << 4:24, 8:8, 0:9, 0:31, 0:1, 12345:31 >>, + _ = [more = parse(binary:part(WindowUpdate, 0, I)) || I <- lists:seq(1, byte_size(WindowUpdate) - 1)], + {ok, {window_update, 12345}, <<>>} = parse(WindowUpdate), + {ok, {window_update, 12345}, << 42 >>} = parse(<< WindowUpdate/binary, 42 >>), + ok. + +parse_settings_test() -> + more = parse(<< 0:24, 4:8, 1:8, 0:8 >>), + {ok, settings_ack, <<>>} = parse(<< 0:24, 4:8, 1:8, 0:32 >>), + {connection_error, protocol_error, _} = parse(<< 0:24, 4:8, 1:8, 0:1, 1:31 >>), + ok. +-endif. + +parse_fin(0) -> nofin; +parse_fin(1) -> fin. + +parse_head_fin(0) -> head_nofin; +parse_head_fin(1) -> head_fin. + +parse_exclusive(0) -> shared; +parse_exclusive(1) -> exclusive. + +parse_error_code( 0) -> no_error; +parse_error_code( 1) -> protocol_error; +parse_error_code( 2) -> internal_error; +parse_error_code( 3) -> flow_control_error; +parse_error_code( 4) -> settings_timeout; +parse_error_code( 5) -> stream_closed; +parse_error_code( 6) -> frame_size_error; +parse_error_code( 7) -> refused_stream; +parse_error_code( 8) -> cancel; +parse_error_code( 9) -> compression_error; +parse_error_code(10) -> connect_error; +parse_error_code(11) -> enhance_your_calm; +parse_error_code(12) -> inadequate_security; +parse_error_code(13) -> http_1_1_required; +parse_error_code(_) -> unknown_error. + +parse_settings_payload(SettingsPayload) -> + {ok, {settings, Settings}, <<>>} + = parse_settings_payload(SettingsPayload, byte_size(SettingsPayload), #{}), + Settings. + +parse_settings_payload(Rest, 0, Settings) -> + {ok, {settings, Settings}, Rest}; +%% SETTINGS_HEADER_TABLE_SIZE. +parse_settings_payload(<< 1:16, Value:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{header_table_size => Value}); +%% SETTINGS_ENABLE_PUSH. +parse_settings_payload(<< 2:16, 0:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{enable_push => false}); +parse_settings_payload(<< 2:16, 1:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{enable_push => true}); +parse_settings_payload(<< 2:16, _:32, _/bits >>, _, _) -> + {connection_error, protocol_error, 'The SETTINGS_ENABLE_PUSH value MUST be 0 or 1. (RFC7540 6.5.2)'}; +%% SETTINGS_MAX_CONCURRENT_STREAMS. +parse_settings_payload(<< 3:16, Value:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{max_concurrent_streams => Value}); +%% SETTINGS_INITIAL_WINDOW_SIZE. +parse_settings_payload(<< 4:16, Value:32, _/bits >>, _, _) when Value > 16#7fffffff -> + {connection_error, flow_control_error, 'The maximum SETTINGS_INITIAL_WINDOW_SIZE value is 0x7fffffff. (RFC7540 6.5.2)'}; +parse_settings_payload(<< 4:16, Value:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{initial_window_size => Value}); +%% SETTINGS_MAX_FRAME_SIZE. +parse_settings_payload(<< 5:16, Value:32, _/bits >>, _, _) when Value =< 16#3fff -> + {connection_error, protocol_error, 'The SETTINGS_MAX_FRAME_SIZE value must be > 0x3fff. (RFC7540 6.5.2)'}; +parse_settings_payload(<< 5:16, Value:32, Rest/bits >>, Len, Settings) when Value =< 16#ffffff -> + parse_settings_payload(Rest, Len - 6, Settings#{max_frame_size => Value}); +parse_settings_payload(<< 5:16, _:32, _/bits >>, _, _) -> + {connection_error, protocol_error, 'The SETTINGS_MAX_FRAME_SIZE value must be =< 0xffffff. (RFC7540 6.5.2)'}; +%% SETTINGS_MAX_HEADER_LIST_SIZE. +parse_settings_payload(<< 6:16, Value:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{max_header_list_size => Value}); +%% SETTINGS_ENABLE_CONNECT_PROTOCOL. +parse_settings_payload(<< 8:16, 0:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{enable_connect_protocol => false}); +parse_settings_payload(<< 8:16, 1:32, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings#{enable_connect_protocol => true}); +parse_settings_payload(<< 8:16, _:32, _/bits >>, _, _) -> + {connection_error, protocol_error, 'The SETTINGS_ENABLE_CONNECT_PROTOCOL value MUST be 0 or 1. (draft-h2-websockets-01 3)'}; +%% Ignore unknown settings. +parse_settings_payload(<< _:48, Rest/bits >>, Len, Settings) -> + parse_settings_payload(Rest, Len - 6, Settings). + +%% Building. + +data(StreamID, IsFin, Data) -> + [data_header(StreamID, IsFin, iolist_size(Data)), Data]. + +data_header(StreamID, IsFin, Len) -> + FlagEndStream = flag_fin(IsFin), + << Len:24, 0:15, FlagEndStream:1, 0:1, StreamID:31 >>. + +%% @todo Check size of HeaderBlock and use CONTINUATION frames if needed. +headers(StreamID, IsFin, HeaderBlock) -> + Len = iolist_size(HeaderBlock), + FlagEndStream = flag_fin(IsFin), + FlagEndHeaders = 1, + [<< Len:24, 1:8, 0:5, FlagEndHeaders:1, 0:1, FlagEndStream:1, 0:1, StreamID:31 >>, HeaderBlock]. + +priority(StreamID, E, DepStreamID, Weight) -> + FlagExclusive = exclusive(E), + << 5:24, 2:8, 0:9, StreamID:31, FlagExclusive:1, DepStreamID:31, Weight:8 >>. + +rst_stream(StreamID, Reason) -> + ErrorCode = error_code(Reason), + << 4:24, 3:8, 0:9, StreamID:31, ErrorCode:32 >>. + +settings(Settings) -> + Payload = settings_payload(Settings), + Len = iolist_size(Payload), + [<< Len:24, 4:8, 0:40 >>, Payload]. + +settings_payload(Settings) -> + [case Key of + header_table_size -> <<1:16, Value:32>>; + enable_push when Value -> <<2:16, 1:32>>; + enable_push -> <<2:16, 0:32>>; + max_concurrent_streams when Value =:= infinity -> <<>>; + max_concurrent_streams -> <<3:16, Value:32>>; + initial_window_size -> <<4:16, Value:32>>; + max_frame_size -> <<5:16, Value:32>>; + max_header_list_size when Value =:= infinity -> <<>>; + max_header_list_size -> <<6:16, Value:32>>; + enable_connect_protocol when Value -> <<8:16, 1:32>>; + enable_connect_protocol -> <<8:16, 0:32>> + end || {Key, Value} <- maps:to_list(Settings)]. + +settings_ack() -> + << 0:24, 4:8, 1:8, 0:32 >>. + +%% @todo Check size of HeaderBlock and use CONTINUATION frames if needed. +push_promise(StreamID, PromisedStreamID, HeaderBlock) -> + Len = iolist_size(HeaderBlock) + 4, + FlagEndHeaders = 1, + [<< Len:24, 5:8, 0:5, FlagEndHeaders:1, 0:3, StreamID:31, 0:1, PromisedStreamID:31 >>, HeaderBlock]. + +ping(Opaque) -> + << 8:24, 6:8, 0:40, Opaque:64 >>. + +ping_ack(Opaque) -> + << 8:24, 6:8, 0:7, 1:1, 0:32, Opaque:64 >>. + +goaway(LastStreamID, Reason, DebugData) -> + ErrorCode = error_code(Reason), + Len = iolist_size(DebugData) + 8, + [<< Len:24, 7:8, 0:41, LastStreamID:31, ErrorCode:32 >>, DebugData]. + +window_update(Increment) -> + window_update(0, Increment). + +window_update(StreamID, Increment) when Increment =< 16#7fffffff -> + << 4:24, 8:8, 0:8, StreamID:32, 0:1, Increment:31 >>. + +flag_fin(nofin) -> 0; +flag_fin(fin) -> 1. + +exclusive(shared) -> 0; +exclusive(exclusive) -> 1. + +error_code(no_error) -> 0; +error_code(protocol_error) -> 1; +error_code(internal_error) -> 2; +error_code(flow_control_error) -> 3; +error_code(settings_timeout) -> 4; +error_code(stream_closed) -> 5; +error_code(frame_size_error) -> 6; +error_code(refused_stream) -> 7; +error_code(cancel) -> 8; +error_code(compression_error) -> 9; +error_code(connect_error) -> 10; +error_code(enhance_your_calm) -> 11; +error_code(inadequate_security) -> 12; +error_code(http_1_1_required) -> 13. diff --git a/src/wsLib/cow_http2_machine.erl b/src/wsLib/cow_http2_machine.erl new file mode 100644 index 0000000..35eb72e --- /dev/null +++ b/src/wsLib/cow_http2_machine.erl @@ -0,0 +1,1647 @@ +%% Copyright (c) 2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http2_machine). + +-export([init/2]). +-export([init_stream/2]). +-export([init_upgrade_stream/2]). +-export([frame/2]). +-export([ignored_frame/1]). +-export([timeout/3]). +-export([prepare_headers/5]). +-export([prepare_push_promise/4]). +-export([prepare_trailers/3]). +-export([send_or_queue_data/4]). +-export([ensure_window/2]). +-export([ensure_window/3]). +-export([update_window/2]). +-export([update_window/3]). +-export([reset_stream/2]). +-export([get_connection_local_buffer_size/1]). +-export([get_local_setting/2]). +-export([get_remote_settings/1]). +-export([get_last_streamid/1]). +-export([set_last_streamid/1]). +-export([get_stream_local_buffer_size/2]). +-export([get_stream_local_state/2]). +-export([get_stream_remote_state/2]). +-export([is_lingering_stream/2]). + +-type opts() :: #{ + connection_window_margin_size => 0..16#7fffffff, + connection_window_update_threshold => 0..16#7fffffff, + enable_connect_protocol => boolean(), + initial_connection_window_size => 65535..16#7fffffff, + initial_stream_window_size => 0..16#7fffffff, + max_connection_window_size => 0..16#7fffffff, + max_concurrent_streams => non_neg_integer() | infinity, + max_decode_table_size => non_neg_integer(), + max_encode_table_size => non_neg_integer(), + max_frame_size_received => 16384..16777215, + max_frame_size_sent => 16384..16777215 | infinity, + max_stream_window_size => 0..16#7fffffff, + message_tag => any(), + preface_timeout => timeout(), + settings_timeout => timeout(), + stream_window_data_threshold => 0..16#7fffffff, + stream_window_margin_size => 0..16#7fffffff, + stream_window_update_threshold => 0..16#7fffffff +}. +-export_type([opts/0]). + +%% The order of the fields is significant. +-record(sendfile, { + offset :: non_neg_integer(), + bytes :: pos_integer(), + path :: file:name_all() +}). + +-record(stream, { + id = undefined :: cow_http2:streamid(), + + %% Request method. + method = undefined :: binary(), + + %% Whether we finished sending data. + local = idle :: idle | cow_http2:fin(), + + %% Local flow control window (how much we can send). + local_window :: integer(), + + %% Buffered data waiting for the flow control window to increase. + local_buffer = queue:new() :: + queue:queue({cow_http2:fin(), non_neg_integer(), {data, iodata()} | #sendfile{}}), + local_buffer_size = 0 :: non_neg_integer(), + local_trailers = undefined :: undefined | cow_http:headers(), + + %% Whether we finished receiving data. + remote = idle :: idle | cow_http2:fin(), + + %% Remote flow control window (how much we accept to receive). + remote_window :: integer(), + + %% Size expected and read from the request body. + remote_expected_size = undefined :: undefined | non_neg_integer(), + remote_read_size = 0 :: non_neg_integer(), + + %% Unparsed te header. Used to know if we can send trailers. + %% Note that we can always send trailers to the server. + te :: undefined | binary() +}). + +-type stream() :: #stream{}. + +-type continued_frame() :: + {headers, cow_http2:streamid(), cow_http2:fin(), cow_http2:head_fin(), binary()} | + {push_promise, cow_http2:streamid(), cow_http2:head_fin(), cow_http2:streamid(), binary()}. + +-record(http2_machine, { + %% Whether the HTTP/2 endpoint is a client or a server. + mode :: client | server, + + %% HTTP/2 SETTINGS customization. + opts = #{} :: opts(), + + %% Connection-wide frame processing state. + state = settings :: settings | normal + | {continuation, request | response | trailers | push_promise, continued_frame()}, + + %% Timer for the connection preface. + preface_timer = undefined :: undefined | reference(), + + %% Timer for the ack for a SETTINGS frame we sent. + settings_timer = undefined :: undefined | reference(), + + %% Settings are separate for each endpoint. In addition, settings + %% must be acknowledged before they can be expected to be applied. + local_settings = #{ +% header_table_size => 4096, +% enable_push => true, +% max_concurrent_streams => infinity, + initial_window_size => 65535 +% max_frame_size => 16384 +% max_header_list_size => infinity + } :: map(), + next_settings = undefined :: undefined | map(), + remote_settings = #{ + initial_window_size => 65535 + } :: map(), + + %% Connection-wide flow control window. + local_window = 65535 :: integer(), %% How much we can send. + remote_window = 65535 :: integer(), %% How much we accept to receive. + + %% Stream identifiers. + local_streamid :: pos_integer(), %% The next streamid to be used. + remote_streamid = 0 :: non_neg_integer(), %% The last streamid received. + last_remote_streamid = 16#7fffffff :: non_neg_integer(), %% Used in GOAWAY. + + %% Currently active HTTP/2 streams. Streams may be initiated either + %% by the client or by the server through PUSH_PROMISE frames. + streams = #{} :: #{cow_http2:streamid() => stream()}, + + %% HTTP/2 streams that have recently been reset locally. + %% We are expected to keep receiving additional frames after + %% sending an RST_STREAM. + local_lingering_streams = [] :: [cow_http2:streamid()], + + %% HTTP/2 streams that have recently been reset remotely. + %% We keep a few of these around in order to reject subsequent + %% frames on these streams. + remote_lingering_streams = [] :: [cow_http2:streamid()], + + %% HPACK decoding and encoding state. + decode_state = cow_hpack:init() :: cow_hpack:state(), + encode_state = cow_hpack:init() :: cow_hpack:state() +}). + +-opaque http2_machine() :: #http2_machine{}. +-export_type([http2_machine/0]). + +-type pseudo_headers() :: #{} %% Trailers + | #{ %% Responses. + status := cow_http:status() + } | #{ %% Normal CONNECT requests. + method := binary(), + authority := binary() + } | #{ %% Other requests and extended CONNECT requests. + method := binary(), + scheme := binary(), + authority := binary(), + path := binary(), + protocol => binary() + }. + +%% Returns true when the given StreamID is for a local-initiated stream. +-define(IS_SERVER_LOCAL(StreamID), ((StreamID rem 2) =:= 0)). +-define(IS_CLIENT_LOCAL(StreamID), ((StreamID rem 2) =:= 1)). +-define(IS_LOCAL(Mode, StreamID), ( + ((Mode =:= server) andalso ?IS_SERVER_LOCAL(StreamID)) + orelse + ((Mode =:= client) andalso ?IS_CLIENT_LOCAL(StreamID)) +)). + +-spec init(client | server, opts()) -> {ok, iodata(), http2_machine()}. +init(client, Opts) -> + NextSettings = settings_init(Opts), + client_preface(#http2_machine{ + mode=client, + opts=Opts, + preface_timer=start_timer(preface_timeout, Opts), + settings_timer=start_timer(settings_timeout, Opts), + next_settings=NextSettings, + local_streamid=1 + }); +init(server, Opts) -> + NextSettings = settings_init(Opts), + common_preface(#http2_machine{ + mode=server, + opts=Opts, + preface_timer=start_timer(preface_timeout, Opts), + settings_timer=start_timer(settings_timeout, Opts), + next_settings=NextSettings, + local_streamid=2 + }). + +%% @todo In Cowlib 3.0 we should always include MessageTag in the message. +%% It can be set to 'undefined' if the option is missing. +start_timer(Name, Opts=#{message_tag := MessageTag}) -> + case maps:get(Name, Opts, 5000) of + infinity -> undefined; + Timeout -> erlang:start_timer(Timeout, self(), {?MODULE, MessageTag, Name}) + end; +start_timer(Name, Opts) -> + case maps:get(Name, Opts, 5000) of + infinity -> undefined; + Timeout -> erlang:start_timer(Timeout, self(), {?MODULE, Name}) + end. + +client_preface(State0) -> + {ok, CommonPreface, State} = common_preface(State0), + {ok, [ + <<"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n">>, + CommonPreface + ], State}. + +%% We send next_settings and use defaults until we get an ack. +%% +%% We also send a WINDOW_UPDATE frame for the connection when +%% the user specified an initial_connection_window_size. +common_preface(State=#http2_machine{opts=Opts, next_settings=NextSettings}) -> + case maps:get(initial_connection_window_size, Opts, 65535) of + 65535 -> + {ok, cow_http2:settings(NextSettings), State}; + Size -> + {ok, [ + cow_http2:settings(NextSettings), + cow_http2:window_update(Size - 65535) + ], update_window(Size - 65535, State)} + end. + +settings_init(Opts) -> + S0 = setting_from_opt(#{}, Opts, max_decode_table_size, + header_table_size, 4096), + S1 = setting_from_opt(S0, Opts, max_concurrent_streams, + max_concurrent_streams, infinity), + S2 = setting_from_opt(S1, Opts, initial_stream_window_size, + initial_window_size, 65535), + S3 = setting_from_opt(S2, Opts, max_frame_size_received, + max_frame_size, 16384), + %% @todo max_header_list_size + setting_from_opt(S3, Opts, enable_connect_protocol, + enable_connect_protocol, false). + +setting_from_opt(Settings, Opts, OptName, SettingName, Default) -> + case maps:get(OptName, Opts, Default) of + Default -> Settings; + Value -> Settings#{SettingName => Value} + end. + +-spec init_stream(binary(), State) + -> {ok, cow_http2:streamid(), State} when State::http2_machine(). +init_stream(Method, State=#http2_machine{mode=client, local_streamid=LocalStreamID, + local_settings=#{initial_window_size := RemoteWindow}, + remote_settings=#{initial_window_size := LocalWindow}}) -> + Stream = #stream{id=LocalStreamID, method=Method, + local_window=LocalWindow, remote_window=RemoteWindow}, + {ok, LocalStreamID, stream_store(Stream, State#http2_machine{ + local_streamid=LocalStreamID + 2})}. + +-spec init_upgrade_stream(binary(), State) + -> {ok, cow_http2:streamid(), State} when State::http2_machine(). +init_upgrade_stream(Method, State=#http2_machine{mode=server, remote_streamid=0, + local_settings=#{initial_window_size := RemoteWindow}, + remote_settings=#{initial_window_size := LocalWindow}}) -> + Stream = #stream{id=1, method=Method, + remote=fin, remote_expected_size=0, + local_window=LocalWindow, remote_window=RemoteWindow, te=undefined}, + {ok, 1, stream_store(Stream, State#http2_machine{remote_streamid=1})}. + +-spec frame(cow_http2:frame(), State) + -> {ok, State} + | {ok, {data, cow_http2:streamid(), cow_http2:fin(), binary()}, State} + | {ok, {headers, cow_http2:streamid(), cow_http2:fin(), + cow_http:headers(), pseudo_headers(), non_neg_integer() | undefined}, State} + | {ok, {trailers, cow_http2:streamid(), cow_http:headers()}, State} + | {ok, {rst_stream, cow_http2:streamid(), cow_http2:error()}, State} + | {ok, {push_promise, cow_http2:streamid(), cow_http2:streamid(), + cow_http:headers(), pseudo_headers()}, State} + | {ok, {goaway, cow_http2:streamid(), cow_http2:error(), binary()}, State} + | {send, [{cow_http2:streamid(), cow_http2:fin(), + [{data, iodata()} | #sendfile{} | {trailers, cow_http:headers()}]}], State} + | {error, {stream_error, cow_http2:streamid(), cow_http2:error(), atom()}, State} + | {error, {connection_error, cow_http2:error(), atom()}, State} + when State::http2_machine(). +frame(Frame, State=#http2_machine{state=settings, preface_timer=TRef}) -> + ok = case TRef of + undefined -> ok; + _ -> erlang:cancel_timer(TRef, [{async, true}, {info, false}]) + end, + settings_frame(Frame, State#http2_machine{state=normal, preface_timer=undefined}); +frame(Frame, State=#http2_machine{state={continuation, _, _}}) -> + maybe_discard_result(continuation_frame(Frame, State)); +frame(settings_ack, State=#http2_machine{state=normal}) -> + settings_ack_frame(State); +frame(Frame, State=#http2_machine{state=normal}) -> + Result = case element(1, Frame) of + data -> data_frame(Frame, State); + headers -> headers_frame(Frame, State); + priority -> priority_frame(Frame, State); + rst_stream -> rst_stream_frame(Frame, State); + settings -> settings_frame(Frame, State); + push_promise -> push_promise_frame(Frame, State); + ping -> ping_frame(Frame, State); + ping_ack -> ping_ack_frame(Frame, State); + goaway -> goaway_frame(Frame, State); + window_update -> window_update_frame(Frame, State); + continuation -> unexpected_continuation_frame(Frame, State); + _ -> ignored_frame(State) + end, + maybe_discard_result(Result). + +%% RFC7540 6.9. After sending a GOAWAY frame, the sender can discard frames for +%% streams initiated by the receiver with identifiers higher than the identified +%% last stream. However, any frames that alter connection state cannot be +%% completely ignored. For instance, HEADERS, PUSH_PROMISE, and CONTINUATION +%% frames MUST be minimally processed to ensure the state maintained for header +%% compression is consistent. +maybe_discard_result(FrameResult={ok, Result, State=#http2_machine{mode=Mode, + last_remote_streamid=MaxID}}) + when element(1, Result) =/= goaway -> + case element(2, Result) of + StreamID when StreamID > MaxID, not ?IS_LOCAL(Mode, StreamID) -> + {ok, State}; + _StreamID -> + FrameResult + end; +maybe_discard_result(FrameResult) -> + FrameResult. + +%% DATA frame. + +data_frame({data, StreamID, _, _}, State=#http2_machine{mode=Mode, + local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) + when (?IS_LOCAL(Mode, StreamID) andalso (StreamID >= LocalStreamID)) + orelse ((not ?IS_LOCAL(Mode, StreamID)) andalso (StreamID > RemoteStreamID)) -> + {error, {connection_error, protocol_error, + 'DATA frame received on a stream in idle state. (RFC7540 5.1)'}, + State}; +data_frame({data, _, _, Data}, State=#http2_machine{remote_window=ConnWindow}) + when byte_size(Data) > ConnWindow -> + {error, {connection_error, flow_control_error, + 'DATA frame overflowed the connection flow control window. (RFC7540 6.9, RFC7540 6.9.1)'}, + State}; +data_frame(Frame={data, StreamID, _, Data}, State0=#http2_machine{ + remote_window=ConnWindow, local_lingering_streams=Lingering}) -> + DataLen = byte_size(Data), + State = State0#http2_machine{remote_window=ConnWindow - DataLen}, + case stream_get(StreamID, State) of + #stream{remote_window=StreamWindow} when StreamWindow < DataLen -> + stream_reset(StreamID, State, flow_control_error, + 'DATA frame overflowed the stream flow control window. (RFC7540 6.9, RFC7540 6.9.1)'); + Stream = #stream{remote=nofin} -> + data_frame(Frame, State, Stream, DataLen); + #stream{remote=idle} -> + stream_reset(StreamID, State, protocol_error, + 'DATA frame received before a HEADERS frame. (RFC7540 8.1, RFC7540 8.1.2.6)'); + #stream{remote=fin} -> + stream_reset(StreamID, State, stream_closed, + 'DATA frame received for a half-closed (remote) stream. (RFC7540 5.1)'); + undefined -> + %% After we send an RST_STREAM frame and terminate a stream, + %% the remote endpoint might still be sending us some more + %% frames until it can process this RST_STREAM. + case lists:member(StreamID, Lingering) of + true -> + {ok, State}; + false -> + {error, {connection_error, stream_closed, + 'DATA frame received for a closed stream. (RFC7540 5.1)'}, + State} + end + end. + +data_frame(Frame={data, _, IsFin, _}, State0, Stream0=#stream{id=StreamID, + remote_window=StreamWindow, remote_read_size=StreamRead}, DataLen) -> + Stream = Stream0#stream{remote=IsFin, + remote_window=StreamWindow - DataLen, + remote_read_size=StreamRead + DataLen}, + State = stream_store(Stream, State0), + case is_body_size_valid(Stream) of + true -> + {ok, Frame, State}; + false -> + stream_reset(StreamID, State, protocol_error, + 'The total size of DATA frames is different than the content-length. (RFC7540 8.1.2.6)') + end. + +%% It's always valid when no content-length header was specified. +is_body_size_valid(#stream{remote_expected_size=undefined}) -> + true; +%% We didn't finish reading the body but the size is already larger than expected. +is_body_size_valid(#stream{remote=nofin, remote_expected_size=Expected, + remote_read_size=Read}) when Read > Expected -> + false; +is_body_size_valid(#stream{remote=nofin}) -> + true; +is_body_size_valid(#stream{remote=fin, remote_expected_size=Expected, + remote_read_size=Expected}) -> + true; +%% We finished reading the body and the size read is not the one expected. +is_body_size_valid(_) -> + false. + +%% HEADERS frame. +%% +%% We always close the connection when we detect errors before +%% decoding the headers to not waste resources on non-compliant +%% endpoints, making us stricter than the RFC requires. + +%% Convenience record to manipulate the tuple. +%% The order of the fields matter. +-record(headers, { + id :: cow_http2:streamid(), + fin :: cow_http2:fin(), + head :: cow_http2:head_fin(), + data :: binary() +}). + +headers_frame(Frame=#headers{}, State=#http2_machine{mode=Mode}) -> + case Mode of + server -> server_headers_frame(Frame, State); + client -> client_headers_frame(Frame, State) + end; +%% @todo Handle the PRIORITY data, but only if this returns an ok tuple. +%% @todo Do not lose the PRIORITY information if CONTINUATION frames follow. +headers_frame({headers, StreamID, IsFin, IsHeadFin, + _IsExclusive, _DepStreamID, _Weight, HeaderData}, + State=#http2_machine{mode=Mode}) -> + HeadersFrame = #headers{id=StreamID, fin=IsFin, head=IsHeadFin, data=HeaderData}, + case Mode of + server -> server_headers_frame(HeadersFrame, State); + client -> client_headers_frame(HeadersFrame, State) + end. + +%% Reject HEADERS frames with even-numbered streamid. +server_headers_frame(#headers{id=StreamID}, State) + when ?IS_SERVER_LOCAL(StreamID) -> + {error, {connection_error, protocol_error, + 'HEADERS frame received with even-numbered streamid. (RFC7540 5.1.1)'}, + State}; +%% HEADERS frame on an idle stream: new request. +server_headers_frame(Frame=#headers{id=StreamID, head=IsHeadFin}, + State=#http2_machine{mode=server, remote_streamid=RemoteStreamID}) + when StreamID > RemoteStreamID -> + case IsHeadFin of + head_fin -> + headers_decode(Frame, State, request, undefined); + head_nofin -> + {ok, State#http2_machine{state={continuation, request, Frame}}} + end; +%% Either a HEADERS frame received on (half-)closed stream, +%% or a HEADERS frame containing the trailers. +server_headers_frame(Frame=#headers{id=StreamID, fin=IsFin, head=IsHeadFin}, State) -> + case stream_get(StreamID, State) of + %% Trailers. + Stream = #stream{remote=nofin} when IsFin =:= fin -> + case IsHeadFin of + head_fin -> + headers_decode(Frame, State, trailers, Stream); + head_nofin -> + {ok, State#http2_machine{state={continuation, trailers, Frame}}} + end; + #stream{remote=nofin} -> + {error, {connection_error, protocol_error, + 'Trailing HEADERS frame received without the END_STREAM flag set. (RFC7540 8.1, RFC7540 8.1.2.6)'}, + State}; + _ -> + {error, {connection_error, stream_closed, + 'HEADERS frame received on a stream in closed or half-closed state. (RFC7540 5.1)'}, + State} + end. + +%% Either a HEADERS frame received on an (half-)closed stream, +%% or a HEADERS frame containing the response or the trailers. +client_headers_frame(Frame=#headers{id=StreamID, fin=IsFin, head=IsHeadFin}, + State=#http2_machine{local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) + when (?IS_CLIENT_LOCAL(StreamID) andalso (StreamID < LocalStreamID)) + orelse ((not ?IS_CLIENT_LOCAL(StreamID)) andalso (StreamID =< RemoteStreamID)) -> + case stream_get(StreamID, State) of + Stream = #stream{remote=idle} -> + case IsHeadFin of + head_fin -> + headers_decode(Frame, State, response, Stream); + head_nofin -> + {ok, State#http2_machine{state={continuation, response, Frame}}} + end; + Stream = #stream{remote=nofin} when IsFin =:= fin -> + case IsHeadFin of + head_fin -> + headers_decode(Frame, State, trailers, Stream); + head_nofin -> + {ok, State#http2_machine{state={continuation, trailers, Frame}}} + end; + #stream{remote=nofin} -> + {error, {connection_error, protocol_error, + 'Trailing HEADERS frame received without the END_STREAM flag set. (RFC7540 8.1, RFC7540 8.1.2.6)'}, + State}; + _ -> + {error, {connection_error, stream_closed, + 'HEADERS frame received on a stream in closed or half-closed state. (RFC7540 5.1)'}, + State} + end; +%% Reject HEADERS frames received on idle streams. +client_headers_frame(_, State) -> + {error, {connection_error, protocol_error, + 'HEADERS frame received on an idle stream. (RFC7540 5.1.1)'}, + State}. + +headers_decode(Frame=#headers{head=head_fin, data=HeaderData}, + State=#http2_machine{decode_state=DecodeState0}, Type, Stream) -> + try cow_hpack:decode(HeaderData, DecodeState0) of + {Headers, DecodeState} when Type =:= request -> + headers_enforce_concurrency_limit(Frame, + State#http2_machine{decode_state=DecodeState}, Type, Stream, Headers); + {Headers, DecodeState} -> + headers_pseudo_headers(Frame, + State#http2_machine{decode_state=DecodeState}, Type, Stream, Headers) + catch _:_ -> + {error, {connection_error, compression_error, + 'Error while trying to decode HPACK-encoded header block. (RFC7540 4.3)'}, + State} + end. + +headers_enforce_concurrency_limit(Frame=#headers{id=StreamID}, + State=#http2_machine{local_settings=LocalSettings, streams=Streams}, + Type, Stream, Headers) -> + MaxConcurrentStreams = maps:get(max_concurrent_streams, LocalSettings, infinity), + %% Using < is correct because this new stream is not included + %% in the Streams variable yet and so we'll end up with +1 stream. + case map_size(Streams) < MaxConcurrentStreams of + true -> + headers_pseudo_headers(Frame, State, Type, Stream, Headers); + false -> + {error, {stream_error, StreamID, refused_stream, + 'Maximum number of concurrent streams has been reached. (RFC7540 5.1.2)'}, + State} + end. + +headers_pseudo_headers(Frame, State=#http2_machine{local_settings=LocalSettings}, + Type, Stream, Headers0) when Type =:= request; Type =:= push_promise -> + IsExtendedConnectEnabled = maps:get(enable_connect_protocol, LocalSettings, false), + case request_pseudo_headers(Headers0, #{}) of + %% Extended CONNECT method (RFC8441). + {ok, PseudoHeaders=#{method := <<"CONNECT">>, scheme := _, + authority := _, path := _, protocol := _}, Headers} + when IsExtendedConnectEnabled -> + headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers); + {ok, #{method := <<"CONNECT">>, scheme := _, + authority := _, path := _}, _} + when IsExtendedConnectEnabled -> + headers_malformed(Frame, State, + 'The :protocol pseudo-header MUST be sent with an extended CONNECT. (RFC8441 4)'); + {ok, #{protocol := _}, _} -> + headers_malformed(Frame, State, + 'The :protocol pseudo-header is only defined for the extended CONNECT. (RFC8441 4)'); + %% Normal CONNECT (no scheme/path). + {ok, PseudoHeaders=#{method := <<"CONNECT">>, authority := _}, Headers} + when map_size(PseudoHeaders) =:= 2 -> + headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers); + {ok, #{method := <<"CONNECT">>}, _} -> + headers_malformed(Frame, State, + 'CONNECT requests only use the :method and :authority pseudo-headers. (RFC7540 8.3)'); + %% Other requests. + {ok, PseudoHeaders=#{method := _, scheme := _, path := _}, Headers} -> + headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers); + {ok, _, _} -> + headers_malformed(Frame, State, + 'A required pseudo-header was not found. (RFC7540 8.1.2.3)'); + {error, HumanReadable} -> + headers_malformed(Frame, State, HumanReadable) + end; +headers_pseudo_headers(Frame=#headers{id=StreamID}, + State, Type=response, Stream, Headers0) -> + case response_pseudo_headers(Headers0, #{}) of + {ok, PseudoHeaders=#{status := _}, Headers} -> + headers_regular_headers(Frame, State, Type, Stream, PseudoHeaders, Headers); + {ok, _, _} -> + stream_reset(StreamID, State, protocol_error, + 'A required pseudo-header was not found. (RFC7540 8.1.2.4)'); + {error, HumanReadable} -> + stream_reset(StreamID, State, protocol_error, HumanReadable) + end; +headers_pseudo_headers(Frame=#headers{id=StreamID}, + State, Type=trailers, Stream, Headers) -> + case trailers_contain_pseudo_headers(Headers) of + false -> + headers_regular_headers(Frame, State, Type, Stream, #{}, Headers); + true -> + stream_reset(StreamID, State, protocol_error, + 'Trailer header blocks must not contain pseudo-headers. (RFC7540 8.1.2.1)') + end. + +headers_malformed(#headers{id=StreamID}, State, HumanReadable) -> + {error, {stream_error, StreamID, protocol_error, HumanReadable}, State}. + +request_pseudo_headers([{<<":method">>, _}|_], #{method := _}) -> + {error, 'Multiple :method pseudo-headers were found. (RFC7540 8.1.2.3)'}; +request_pseudo_headers([{<<":method">>, Method}|Tail], PseudoHeaders) -> + request_pseudo_headers(Tail, PseudoHeaders#{method => Method}); +request_pseudo_headers([{<<":scheme">>, _}|_], #{scheme := _}) -> + {error, 'Multiple :scheme pseudo-headers were found. (RFC7540 8.1.2.3)'}; +request_pseudo_headers([{<<":scheme">>, Scheme}|Tail], PseudoHeaders) -> + request_pseudo_headers(Tail, PseudoHeaders#{scheme => Scheme}); +request_pseudo_headers([{<<":authority">>, _}|_], #{authority := _}) -> + {error, 'Multiple :authority pseudo-headers were found. (RFC7540 8.1.2.3)'}; +request_pseudo_headers([{<<":authority">>, Authority}|Tail], PseudoHeaders) -> + request_pseudo_headers(Tail, PseudoHeaders#{authority => Authority}); +request_pseudo_headers([{<<":path">>, _}|_], #{path := _}) -> + {error, 'Multiple :path pseudo-headers were found. (RFC7540 8.1.2.3)'}; +request_pseudo_headers([{<<":path">>, Path}|Tail], PseudoHeaders) -> + request_pseudo_headers(Tail, PseudoHeaders#{path => Path}); +request_pseudo_headers([{<<":protocol">>, _}|_], #{protocol := _}) -> + {error, 'Multiple :protocol pseudo-headers were found. (RFC7540 8.1.2.3)'}; +request_pseudo_headers([{<<":protocol">>, Protocol}|Tail], PseudoHeaders) -> + request_pseudo_headers(Tail, PseudoHeaders#{protocol => Protocol}); +request_pseudo_headers([{<<":", _/bits>>, _}|_], _) -> + {error, 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)'}; +request_pseudo_headers(Headers, PseudoHeaders) -> + {ok, PseudoHeaders, Headers}. + +response_pseudo_headers([{<<":status">>, _}|_], #{status := _}) -> + {error, 'Multiple :status pseudo-headers were found. (RFC7540 8.1.2.3)'}; +response_pseudo_headers([{<<":status">>, Status}|Tail], PseudoHeaders) -> + try cow_http:status_to_integer(Status) of + IntStatus -> + response_pseudo_headers(Tail, PseudoHeaders#{status => IntStatus}) + catch _:_ -> + {error, 'The :status pseudo-header value is invalid. (RFC7540 8.1.2.4)'} + end; +response_pseudo_headers([{<<":", _/bits>>, _}|_], _) -> + {error, 'An unknown or invalid pseudo-header was found. (RFC7540 8.1.2.1)'}; +response_pseudo_headers(Headers, PseudoHeaders) -> + {ok, PseudoHeaders, Headers}. + +trailers_contain_pseudo_headers([]) -> + false; +trailers_contain_pseudo_headers([{<<":", _/bits>>, _}|_]) -> + true; +trailers_contain_pseudo_headers([_|Tail]) -> + trailers_contain_pseudo_headers(Tail). + +%% Rejecting invalid regular headers might be a bit too strong for clients. +headers_regular_headers(Frame=#headers{id=StreamID}, + State, Type, Stream, PseudoHeaders, Headers) -> + case regular_headers(Headers, Type) of + ok when Type =:= request -> + request_expected_size(Frame, State, Type, Stream, PseudoHeaders, Headers); + ok when Type =:= push_promise -> + push_promise_frame(Frame, State, Stream, PseudoHeaders, Headers); + ok when Type =:= response -> + response_expected_size(Frame, State, Type, Stream, PseudoHeaders, Headers); + ok when Type =:= trailers -> + trailers_frame(Frame, State, Stream, Headers); + {error, HumanReadable} when Type =:= request -> + headers_malformed(Frame, State, HumanReadable); + {error, HumanReadable} -> + stream_reset(StreamID, State, protocol_error, HumanReadable) + end. + +regular_headers([{<<>>, _}|_], _) -> + {error, 'Empty header names are not valid regular headers. (CVE-2019-9516)'}; +regular_headers([{<<":", _/bits>>, _}|_], _) -> + {error, 'Pseudo-headers were found after regular headers. (RFC7540 8.1.2.1)'}; +regular_headers([{<<"connection">>, _}|_], _) -> + {error, 'The connection header is not allowed. (RFC7540 8.1.2.2)'}; +regular_headers([{<<"keep-alive">>, _}|_], _) -> + {error, 'The keep-alive header is not allowed. (RFC7540 8.1.2.2)'}; +regular_headers([{<<"proxy-authenticate">>, _}|_], _) -> + {error, 'The proxy-authenticate header is not allowed. (RFC7540 8.1.2.2)'}; +regular_headers([{<<"proxy-authorization">>, _}|_], _) -> + {error, 'The proxy-authorization header is not allowed. (RFC7540 8.1.2.2)'}; +regular_headers([{<<"transfer-encoding">>, _}|_], _) -> + {error, 'The transfer-encoding header is not allowed. (RFC7540 8.1.2.2)'}; +regular_headers([{<<"upgrade">>, _}|_], _) -> + {error, 'The upgrade header is not allowed. (RFC7540 8.1.2.2)'}; +regular_headers([{<<"te">>, Value}|_], request) when Value =/= <<"trailers">> -> + {error, 'The te header with a value other than "trailers" is not allowed. (RFC7540 8.1.2.2)'}; +regular_headers([{<<"te">>, _}|_], Type) when Type =/= request -> + {error, 'The te header is only allowed in request headers. (RFC7540 8.1.2.2)'}; +regular_headers([{Name, _}|Tail], Type) -> + Pattern = [ + <<$A>>, <<$B>>, <<$C>>, <<$D>>, <<$E>>, <<$F>>, <<$G>>, <<$H>>, <<$I>>, + <<$J>>, <<$K>>, <<$L>>, <<$M>>, <<$N>>, <<$O>>, <<$P>>, <<$Q>>, <<$R>>, + <<$S>>, <<$T>>, <<$U>>, <<$V>>, <<$W>>, <<$X>>, <<$Y>>, <<$Z>> + ], + case binary:match(Name, Pattern) of + nomatch -> regular_headers(Tail, Type); + _ -> {error, 'Header names must be lowercase. (RFC7540 8.1.2)'} + end; +regular_headers([], _) -> + ok. + +request_expected_size(Frame=#headers{fin=IsFin}, State, Type, Stream, PseudoHeaders, Headers) -> + case [CL || {<<"content-length">>, CL} <- Headers] of + [] when IsFin =:= fin -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0); + [] -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, undefined); + [<<"0">>] when IsFin =:= fin -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0); + [_] when IsFin =:= fin -> + headers_malformed(Frame, State, + 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)'); + [BinLen] -> + headers_parse_expected_size(Frame, State, Type, Stream, + PseudoHeaders, Headers, BinLen); + _ -> + headers_malformed(Frame, State, + 'Multiple content-length headers were received. (RFC7230 3.3.2)') + end. + +response_expected_size(Frame=#headers{id=StreamID, fin=IsFin}, State, Type, + Stream=#stream{method=Method}, PseudoHeaders=#{status := Status}, Headers) -> + case [CL || {<<"content-length">>, CL} <- Headers] of + [] when IsFin =:= fin -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0); + [] -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, undefined); + [_] when Status >= 100, Status =< 199 -> + stream_reset(StreamID, State, protocol_error, + 'Content-length header received in a 1xx response. (RFC7230 3.3.2)'); + [_] when Status =:= 204 -> + stream_reset(StreamID, State, protocol_error, + 'Content-length header received in a 204 response. (RFC7230 3.3.2)'); + [_] when Status >= 200, Status =< 299, Method =:= <<"CONNECT">> -> + stream_reset(StreamID, State, protocol_error, + 'Content-length header received in a 2xx response to a CONNECT request. (RFC7230 3.3.2).'); + %% Responses to HEAD requests, and 304 responses may contain + %% a content-length header that must be ignored. (RFC7230 3.3.2) + [_] when Method =:= <<"HEAD">> -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0); + [_] when Status =:= 304 -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0); + [<<"0">>] when IsFin =:= fin -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, 0); + [_] when IsFin =:= fin -> + stream_reset(StreamID, State, protocol_error, + 'HEADERS frame with the END_STREAM flag contains a non-zero content-length. (RFC7540 8.1.2.6)'); + [BinLen] -> + headers_parse_expected_size(Frame, State, Type, Stream, + PseudoHeaders, Headers, BinLen); + _ -> + stream_reset(StreamID, State, protocol_error, + 'Multiple content-length headers were received. (RFC7230 3.3.2)') + end. + +headers_parse_expected_size(Frame=#headers{id=StreamID}, + State, Type, Stream, PseudoHeaders, Headers, BinLen) -> + try cow_http_hd:parse_content_length(BinLen) of + Len -> + headers_frame(Frame, State, Type, Stream, PseudoHeaders, Headers, Len) + catch + _:_ -> + HumanReadable = 'The content-length header is invalid. (RFC7230 3.3.2)', + case Type of + request -> headers_malformed(Frame, State, HumanReadable); + response -> stream_reset(StreamID, State, protocol_error, HumanReadable) + end + end. + +headers_frame(#headers{id=StreamID, fin=IsFin}, State0=#http2_machine{ + local_settings=#{initial_window_size := RemoteWindow}, + remote_settings=#{initial_window_size := LocalWindow}}, + Type, Stream0, PseudoHeaders, Headers, Len) -> + {Stream, State1} = case Type of + request -> + TE = case lists:keyfind(<<"te">>, 1, Headers) of + {_, TE0} -> TE0; + false -> undefined + end, + {#stream{id=StreamID, method=maps:get(method, PseudoHeaders), + remote=IsFin, remote_expected_size=Len, + local_window=LocalWindow, remote_window=RemoteWindow, te=TE}, + State0#http2_machine{remote_streamid=StreamID}}; + response -> + Stream1 = case PseudoHeaders of + #{status := Status} when Status >= 100, Status =< 199 -> Stream0; + _ -> Stream0#stream{remote=IsFin, remote_expected_size=Len} + end, + {Stream1, State0} + end, + State = stream_store(Stream, State1), + {ok, {headers, StreamID, IsFin, Headers, PseudoHeaders, Len}, State}. + +trailers_frame(#headers{id=StreamID}, State0, Stream0, Headers) -> + Stream = Stream0#stream{remote=fin}, + State = stream_store(Stream, State0), + case is_body_size_valid(Stream) of + true -> + {ok, {trailers, StreamID, Headers}, State}; + false -> + stream_reset(StreamID, State, protocol_error, + 'The total size of DATA frames is different than the content-length. (RFC7540 8.1.2.6)') + end. + +%% PRIORITY frame. +%% +%% @todo Handle PRIORITY frames. + +priority_frame(_Frame, State) -> + {ok, State}. + +%% RST_STREAM frame. + +rst_stream_frame({rst_stream, StreamID, _}, State=#http2_machine{mode=Mode, + local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) + when (?IS_LOCAL(Mode, StreamID) andalso (StreamID >= LocalStreamID)) + orelse ((not ?IS_LOCAL(Mode, StreamID)) andalso (StreamID > RemoteStreamID)) -> + {error, {connection_error, protocol_error, + 'RST_STREAM frame received on a stream in idle state. (RFC7540 5.1)'}, + State}; +rst_stream_frame({rst_stream, StreamID, Reason}, State=#http2_machine{ + streams=Streams0, remote_lingering_streams=Lingering0}) -> + Streams = maps:remove(StreamID, Streams0), + %% We only keep up to 10 streams in this state. @todo Make it configurable? + Lingering = [StreamID|lists:sublist(Lingering0, 10 - 1)], + {ok, {rst_stream, StreamID, Reason}, + State#http2_machine{streams=Streams, remote_lingering_streams=Lingering}}. + +%% SETTINGS frame. + +settings_frame({settings, Settings}, State0=#http2_machine{ + opts=Opts, remote_settings=Settings0}) -> + State1 = State0#http2_machine{remote_settings=maps:merge(Settings0, Settings)}, + State2 = maps:fold(fun + (header_table_size, NewSize, State=#http2_machine{encode_state=EncodeState0}) -> + MaxSize = maps:get(max_encode_table_size, Opts, 4096), + EncodeState = cow_hpack:set_max_size(min(NewSize, MaxSize), EncodeState0), + State#http2_machine{encode_state=EncodeState}; + (initial_window_size, NewWindowSize, State) -> + OldWindowSize = maps:get(initial_window_size, Settings0, 65535), + streams_update_local_window(State, NewWindowSize - OldWindowSize); + (_, _, State) -> + State + end, State1, Settings), + case Settings of + #{initial_window_size := _} -> send_data(State2); + _ -> {ok, State2} + end; +%% We expect to receive a SETTINGS frame as part of the preface. +settings_frame(_F, State=#http2_machine{mode=server}) -> + {error, {connection_error, protocol_error, + 'The preface sequence must be followed by a SETTINGS frame. (RFC7540 3.5)'}, + State}; +settings_frame(_F, State) -> + {error, {connection_error, protocol_error, + 'The preface must begin with a SETTINGS frame. (RFC7540 3.5)'}, + State}. + +%% When SETTINGS_INITIAL_WINDOW_SIZE changes we need to update +%% the local stream windows for all active streams and perhaps +%% resume sending data. +streams_update_local_window(State=#http2_machine{streams=Streams0}, Increment) -> + Streams = maps:map(fun(_, S=#stream{local_window=StreamWindow}) -> + S#stream{local_window=StreamWindow + Increment} + end, Streams0), + State#http2_machine{streams=Streams}. + +%% Ack for a previously sent SETTINGS frame. + +settings_ack_frame(State0=#http2_machine{settings_timer=TRef, + local_settings=Local0, next_settings=NextSettings}) -> + ok = case TRef of + undefined -> ok; + _ -> erlang:cancel_timer(TRef, [{async, true}, {info, false}]) + end, + Local = maps:merge(Local0, NextSettings), + State1 = State0#http2_machine{settings_timer=undefined, + local_settings=Local, next_settings=#{}}, + {ok, maps:fold(fun + (header_table_size, MaxSize, State=#http2_machine{decode_state=DecodeState0}) -> + DecodeState = cow_hpack:set_max_size(MaxSize, DecodeState0), + State#http2_machine{decode_state=DecodeState}; + (initial_window_size, NewWindowSize, State) -> + OldWindowSize = maps:get(initial_window_size, Local0, 65535), + streams_update_remote_window(State, NewWindowSize - OldWindowSize); + (_, _, State) -> + State + end, State1, NextSettings)}. + +%% When we receive an ack to a SETTINGS frame we sent we need to update +%% the remote stream windows for all active streams. +streams_update_remote_window(State=#http2_machine{streams=Streams0}, Increment) -> + Streams = maps:map(fun(_, S=#stream{remote_window=StreamWindow}) -> + S#stream{remote_window=StreamWindow + Increment} + end, Streams0), + State#http2_machine{streams=Streams}. + +%% PUSH_PROMISE frame. + +%% Convenience record to manipulate the tuple. +%% The order of the fields matter. +-record(push_promise, { + id :: cow_http2:streamid(), + head :: cow_http2:head_fin(), + promised_id :: cow_http2:streamid(), + data :: binary() +}). + +push_promise_frame(_, State=#http2_machine{mode=server}) -> + {error, {connection_error, protocol_error, + 'PUSH_PROMISE frames MUST NOT be sent by the client. (RFC7540 6.6)'}, + State}; +push_promise_frame(_, State=#http2_machine{local_settings=#{enable_push := false}}) -> + {error, {connection_error, protocol_error, + 'PUSH_PROMISE frame received despite SETTINGS_ENABLE_PUSH set to 0. (RFC7540 6.6)'}, + State}; +push_promise_frame(#push_promise{promised_id=PromisedStreamID}, + State=#http2_machine{remote_streamid=RemoteStreamID}) + when PromisedStreamID =< RemoteStreamID -> + {error, {connection_error, protocol_error, + 'PUSH_PROMISE frame received for a promised stream in closed or half-closed state. (RFC7540 5.1, RFC7540 6.6)'}, + State}; +push_promise_frame(#push_promise{id=StreamID}, State) + when not ?IS_CLIENT_LOCAL(StreamID) -> + {error, {connection_error, protocol_error, + 'PUSH_PROMISE frame received on a server-initiated stream. (RFC7540 6.6)'}, + State}; +push_promise_frame(Frame=#push_promise{id=StreamID, head=IsHeadFin, + promised_id=PromisedStreamID, data=HeaderData}, State) -> + case stream_get(StreamID, State) of + Stream=#stream{remote=idle} -> + case IsHeadFin of + head_fin -> + headers_decode(#headers{id=PromisedStreamID, + fin=fin, head=IsHeadFin, data=HeaderData}, + State, push_promise, Stream); + head_nofin -> + {ok, State#http2_machine{state={continuation, push_promise, Frame}}} + end; + _ -> +%% @todo Check if the stream is lingering. If it is, decode the frame +%% and do what? That's the big question and why it's not implemented yet. +% However, an endpoint that +% has sent RST_STREAM on the associated stream MUST handle PUSH_PROMISE +% frames that might have been created before the RST_STREAM frame is +% received and processed. (RFC7540 6.6) + {error, {connection_error, stream_closed, + 'PUSH_PROMISE frame received on a stream in closed or half-closed state. (RFC7540 5.1, RFC7540 6.6)'}, + State} + end. + +push_promise_frame(#headers{id=PromisedStreamID}, + State0=#http2_machine{ + local_settings=#{initial_window_size := RemoteWindow}, + remote_settings=#{initial_window_size := LocalWindow}}, + #stream{id=StreamID}, PseudoHeaders=#{method := Method}, Headers) -> + TE = case lists:keyfind(<<"te">>, 1, Headers) of + {_, TE0} -> TE0; + false -> undefined + end, + PromisedStream = #stream{id=PromisedStreamID, method=Method, + local=fin, local_window=LocalWindow, + remote_window=RemoteWindow, te=TE}, + State = stream_store(PromisedStream, + State0#http2_machine{remote_streamid=PromisedStreamID}), + {ok, {push_promise, StreamID, PromisedStreamID, Headers, PseudoHeaders}, State}. + +%% PING frame. + +ping_frame({ping, _}, State) -> + {ok, State}. + +%% Ack for a previously sent PING frame. +%% +%% @todo Might want to check contents but probably a waste of time. + +ping_ack_frame({ping_ack, _}, State) -> + {ok, State}. + +%% GOAWAY frame. + +goaway_frame(Frame={goaway, _, _, _}, State) -> + {ok, Frame, State}. + +%% WINDOW_UPDATE frame. + +%% Connection-wide WINDOW_UPDATE frame. +window_update_frame({window_update, Increment}, State=#http2_machine{local_window=ConnWindow}) + when ConnWindow + Increment > 16#7fffffff -> + {error, {connection_error, flow_control_error, + 'The flow control window must not be greater than 2^31-1. (RFC7540 6.9.1)'}, + State}; +window_update_frame({window_update, Increment}, State=#http2_machine{local_window=ConnWindow}) -> + send_data(State#http2_machine{local_window=ConnWindow + Increment}); +%% Stream-specific WINDOW_UPDATE frame. +window_update_frame({window_update, StreamID, _}, State=#http2_machine{mode=Mode, + local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) + when (?IS_LOCAL(Mode, StreamID) andalso (StreamID >= LocalStreamID)) + orelse ((not ?IS_LOCAL(Mode, StreamID)) andalso (StreamID > RemoteStreamID)) -> + {error, {connection_error, protocol_error, + 'WINDOW_UPDATE frame received on a stream in idle state. (RFC7540 5.1)'}, + State}; +window_update_frame({window_update, StreamID, Increment}, + State0=#http2_machine{remote_lingering_streams=Lingering}) -> + case stream_get(StreamID, State0) of + #stream{local_window=StreamWindow} when StreamWindow + Increment > 16#7fffffff -> + stream_reset(StreamID, State0, flow_control_error, + 'The flow control window must not be greater than 2^31-1. (RFC7540 6.9.1)'); + Stream0 = #stream{local_window=StreamWindow} -> + send_data(Stream0#stream{local_window=StreamWindow + Increment}, State0); + undefined -> + %% WINDOW_UPDATE frames may be received for a short period of time + %% after a stream is closed. They must be ignored. + case lists:member(StreamID, Lingering) of + false -> {ok, State0}; + true -> stream_reset(StreamID, State0, stream_closed, + 'WINDOW_UPDATE frame received after the stream was reset. (RFC7540 5.1)') + end + end. + +%% CONTINUATION frame. + +%% Convenience record to manipulate the tuple. +%% The order of the fields matter. +-record(continuation, { + id :: cow_http2:streamid(), + head :: cow_http2:head_fin(), + data :: binary() +}). + +unexpected_continuation_frame(#continuation{}, State) -> + {error, {connection_error, protocol_error, + 'CONTINUATION frames MUST be preceded by a HEADERS or PUSH_PROMISE frame. (RFC7540 6.10)'}, + State}. + +continuation_frame(#continuation{id=StreamID, head=head_fin, data=HeaderFragment1}, + State=#http2_machine{state={continuation, Type, + Frame=#headers{id=StreamID, data=HeaderFragment0}}}) -> + HeaderData = <>, + headers_decode(Frame#headers{head=head_fin, data=HeaderData}, + State#http2_machine{state=normal}, Type, stream_get(StreamID, State)); +continuation_frame(#continuation{id=StreamID, head=head_fin, data=HeaderFragment1}, + State=#http2_machine{state={continuation, Type, #push_promise{ + id=StreamID, promised_id=PromisedStreamID, data=HeaderFragment0}}}) -> + HeaderData = <>, + headers_decode(#headers{id=PromisedStreamID, fin=fin, head=head_fin, data=HeaderData}, + State#http2_machine{state=normal}, Type, undefined); +continuation_frame(#continuation{id=StreamID, data=HeaderFragment1}, + State=#http2_machine{state={continuation, Type, ContinuedFrame0}}) + when element(2, ContinuedFrame0) =:= StreamID -> + ContinuedFrame = case ContinuedFrame0 of + #headers{data=HeaderFragment0} -> + HeaderData = <>, + ContinuedFrame0#headers{data=HeaderData}; + #push_promise{data=HeaderFragment0} -> + HeaderData = <>, + ContinuedFrame0#push_promise{data=HeaderData} + end, + {ok, State#http2_machine{state={continuation, Type, ContinuedFrame}}}; +continuation_frame(_F, State) -> + {error, {connection_error, protocol_error, + 'An invalid frame was received in the middle of a header block. (RFC7540 6.2)'}, + State}. + +%% Ignored frames. + +-spec ignored_frame(State) + -> {ok, State} + | {error, {connection_error, protocol_error, atom()}, State} + when State::http2_machine(). +ignored_frame(State=#http2_machine{state={continuation, _, _}}) -> + {error, {connection_error, protocol_error, + 'An invalid frame was received in the middle of a header block. (RFC7540 6.2)'}, + State}; +%% @todo It might be useful to error out when we receive +%% too many unknown frames. (RFC7540 10.5) +ignored_frame(State) -> + {ok, State}. + +%% Timeouts. + +-spec timeout(preface_timeout | settings_timeout, reference(), State) + -> {ok, State} + | {error, {connection_error, cow_http2:error(), atom()}, State} + when State::http2_machine(). +timeout(preface_timeout, TRef, State=#http2_machine{preface_timer=TRef}) -> + {error, {connection_error, protocol_error, + 'The preface was not received in a reasonable amount of time.'}, + State}; +timeout(settings_timeout, TRef, State=#http2_machine{settings_timer=TRef}) -> + {error, {connection_error, settings_timeout, + 'The SETTINGS ack was not received within the configured time. (RFC7540 6.5.3)'}, + State}; +timeout(_, _, State) -> + {ok, State}. + +%% Functions for sending a message header or body. Note that +%% this module does not send data directly, instead it returns +%% a value that can then be used to send the frames. + +-spec prepare_headers(cow_http2:streamid(), State, idle | cow_http2:fin(), + pseudo_headers(), cow_http:headers()) + -> {ok, cow_http2:fin(), iodata(), State} when State::http2_machine(). +prepare_headers(StreamID, State=#http2_machine{encode_state=EncodeState0}, + IsFin0, PseudoHeaders, Headers0) -> + Stream = #stream{method=Method, local=idle} = stream_get(StreamID, State), + IsFin = case {IsFin0, Method} of + {idle, _} -> nofin; + {_, <<"HEAD">>} -> fin; + _ -> IsFin0 + end, + Headers = merge_pseudo_headers(PseudoHeaders, remove_http11_headers(Headers0)), + {HeaderBlock, EncodeState} = cow_hpack:encode(Headers, EncodeState0), + {ok, IsFin, HeaderBlock, stream_store(Stream#stream{local=IsFin0}, + State#http2_machine{encode_state=EncodeState})}. + +-spec prepare_push_promise(cow_http2:streamid(), State, pseudo_headers(), cow_http:headers()) + -> {ok, cow_http2:streamid(), iodata(), State} + | {error, no_push} when State::http2_machine(). +prepare_push_promise(_, #http2_machine{remote_settings=#{enable_push := false}}, _, _) -> + {error, no_push}; +prepare_push_promise(StreamID, State=#http2_machine{encode_state=EncodeState0, + local_settings=#{initial_window_size := RemoteWindow}, + remote_settings=#{initial_window_size := LocalWindow}, + local_streamid=LocalStreamID}, PseudoHeaders, Headers0) -> + #stream{local=idle} = stream_get(StreamID, State), + TE = case lists:keyfind(<<"te">>, 1, Headers0) of + {_, TE0} -> TE0; + false -> undefined + end, + Headers = merge_pseudo_headers(PseudoHeaders, remove_http11_headers(Headers0)), + {HeaderBlock, EncodeState} = cow_hpack:encode(Headers, EncodeState0), + {ok, LocalStreamID, HeaderBlock, stream_store( + #stream{id=LocalStreamID, method=maps:get(method, PseudoHeaders), + remote=fin, remote_expected_size=0, + local_window=LocalWindow, remote_window=RemoteWindow, te=TE}, + State#http2_machine{encode_state=EncodeState, local_streamid=LocalStreamID + 2})}. + +remove_http11_headers(Headers) -> + RemoveHeaders0 = [ + <<"keep-alive">>, + <<"proxy-connection">>, + <<"transfer-encoding">>, + <<"upgrade">> + ], + RemoveHeaders = case lists:keyfind(<<"connection">>, 1, Headers) of + false -> + RemoveHeaders0; + {_, ConnHd} -> + %% We do not need to worry about any "close" header because + %% that header name is reserved. + Connection = cow_http_hd:parse_connection(ConnHd), + Connection ++ [<<"connection">>|RemoveHeaders0] + end, + lists:filter(fun({Name, _}) -> + not lists:member(Name, RemoveHeaders) + end, Headers). + +merge_pseudo_headers(PseudoHeaders, Headers0) -> + lists:foldl(fun + ({status, Status}, Acc) when is_integer(Status) -> + [{<<":status">>, integer_to_binary(Status)}|Acc]; + ({Name, Value}, Acc) -> + [{iolist_to_binary([$:, atom_to_binary(Name, latin1)]), Value}|Acc] + end, Headers0, maps:to_list(PseudoHeaders)). + +-spec prepare_trailers(cow_http2:streamid(), State, cow_http:headers()) + -> {ok, iodata(), State} when State::http2_machine(). +prepare_trailers(StreamID, State=#http2_machine{encode_state=EncodeState0}, Trailers) -> + Stream = #stream{local=nofin} = stream_get(StreamID, State), + {HeaderBlock, EncodeState} = cow_hpack:encode(Trailers, EncodeState0), + {ok, HeaderBlock, stream_store(Stream#stream{local=fin}, + State#http2_machine{encode_state=EncodeState})}. + +-spec send_or_queue_data(cow_http2:streamid(), State, cow_http2:fin(), DataOrFileOrTrailers) + -> {ok, State} + | {send, [{cow_http2:streamid(), cow_http2:fin(), [DataOrFileOrTrailers]}], State} + when State::http2_machine(), DataOrFileOrTrailers:: + {data, iodata()} | #sendfile{} | {trailers, cow_http:headers()}. +send_or_queue_data(StreamID, State0=#http2_machine{opts=Opts, local_window=ConnWindow}, + IsFin0, DataOrFileOrTrailers0) -> + %% @todo Probably just ignore if the method was HEAD. + Stream0 = #stream{ + local=nofin, + local_window=StreamWindow, + local_buffer_size=BufferSize, + te=TE0 + } = stream_get(StreamID, State0), + DataOrFileOrTrailers = case DataOrFileOrTrailers0 of + {trailers, _} -> + %% We only accept TE headers containing exactly "trailers" (RFC7540 8.1.2.1). + TE = try cow_http_hd:parse_te(TE0) of + {trailers, []} -> trailers; + _ -> no_trailers + catch _:_ -> + %% If we can't parse the TE header, assume we can't send trailers. + no_trailers + end, + case TE of + trailers -> + DataOrFileOrTrailers0; + no_trailers -> + {data, <<>>} + end; + _ -> + DataOrFileOrTrailers0 + end, + SendSize = case DataOrFileOrTrailers of + {data, D} -> BufferSize + iolist_size(D); + #sendfile{bytes=B} -> BufferSize + B; + {trailers, _} -> 0 + end, + MinSendSize = maps:get(stream_window_data_threshold, Opts, 16384), + if + %% If we cannot send the data all at once and the window + %% is smaller than we are willing to send at a minimum, + %% we queue the data directly. + (StreamWindow < MinSendSize) + andalso ((StreamWindow < SendSize) orelse (ConnWindow < SendSize)) -> + {ok, stream_store(queue_data(Stream0, IsFin0, DataOrFileOrTrailers, in), State0)}; + true -> + case send_or_queue_data(Stream0, State0, [], IsFin0, DataOrFileOrTrailers, in) of + {ok, Stream, State, []} -> + {ok, stream_store(Stream, State)}; + {ok, Stream=#stream{local=IsFin}, State, SendData} -> + {send, [{StreamID, IsFin, lists:reverse(SendData)}], stream_store(Stream, State)} + end + end. + +%% Internal data sending/queuing functions. + +%% @todo Should we ever want to implement the PRIORITY mechanism, +%% this would be the place to do it. Right now, we just go over +%% all streams and send what we can until either everything is +%% sent or we run out of space in the window. +send_data(State0=#http2_machine{streams=Streams0}) -> + Iterator = maps:iterator(Streams0), + case send_data_for_all_streams(maps:next(Iterator), Streams0, State0, []) of + {ok, Streams, State, []} -> + {ok, State#http2_machine{streams=Streams}}; + {ok, Streams, State, Send} -> + {send, Send, State#http2_machine{streams=Streams}} + end. + +send_data_for_all_streams(none, Streams, State, Send) -> + {ok, Streams, State, Send}; +%% While technically we should never get < 0 here, let's be on the safe side. +send_data_for_all_streams(_, Streams, State=#http2_machine{local_window=ConnWindow}, Send) + when ConnWindow =< 0 -> + {ok, Streams, State, Send}; +%% We rely on send_data_for_one_stream/3 to do all the necessary checks about the stream. +send_data_for_all_streams({StreamID, Stream0, Iterator}, Streams, State0, Send) -> + case send_data_for_one_stream(Stream0, State0, []) of + {ok, Stream, State, []} -> + send_data_for_all_streams(maps:next(Iterator), + Streams#{StreamID => Stream}, State, Send); + %% We need to remove the stream here because we do not use stream_store/2. + {ok, #stream{local=fin, remote=fin}, State, SendData} -> + send_data_for_all_streams(maps:next(Iterator), + maps:remove(StreamID, Streams), State, [{StreamID, fin, SendData}|Send]); + {ok, Stream=#stream{local=IsFin}, State, SendData} -> + send_data_for_all_streams(maps:next(Iterator), + Streams#{StreamID => Stream}, State, [{StreamID, IsFin, SendData}|Send]) + end. + +send_data(Stream0, State0) -> + case send_data_for_one_stream(Stream0, State0, []) of + {ok, Stream, State, []} -> + {ok, stream_store(Stream, State)}; + {ok, Stream=#stream{id=StreamID, local=IsFin}, State, SendData} -> + {send, [{StreamID, IsFin, SendData}], stream_store(Stream, State)} + end. + +send_data_for_one_stream(Stream=#stream{local=nofin, local_buffer_size=0, + local_trailers=Trailers}, State, SendAcc) when Trailers =/= undefined -> + {ok, Stream, State, lists:reverse([{trailers, Trailers}|SendAcc])}; +send_data_for_one_stream(Stream=#stream{local=nofin, local_buffer=Q0, local_buffer_size=0}, + State, SendAcc) -> + case queue:len(Q0) of + 0 -> + {ok, Stream, State, lists:reverse(SendAcc)}; + 1 -> + %% We know there is a final empty data frame in the queue. + %% We need to mark the stream as complete. + {{value, {fin, 0, _}}, Q} = queue:out(Q0), + {ok, Stream#stream{local=fin, local_buffer=Q}, State, lists:reverse(SendAcc)} + end; +send_data_for_one_stream(Stream=#stream{local=IsFin, local_window=StreamWindow, + local_buffer_size=BufferSize}, State=#http2_machine{local_window=ConnWindow}, SendAcc) + when ConnWindow =< 0; IsFin =:= fin; StreamWindow =< 0; BufferSize =:= 0 -> + {ok, Stream, State, lists:reverse(SendAcc)}; +send_data_for_one_stream(Stream0=#stream{local_window=StreamWindow, + local_buffer=Q0, local_buffer_size=BufferSize}, + State0=#http2_machine{opts=Opts, local_window=ConnWindow}, SendAcc0) -> + MinSendSize = maps:get(stream_window_data_threshold, Opts, 16384), + if + %% If we cannot send the entire buffer at once and the window + %% is smaller than we are willing to send at a minimum, do nothing. + %% + %% We only do this check the first time we go through this function; + %% we want to send as much data as possible IF we send some. + (SendAcc0 =:= []) andalso (StreamWindow < MinSendSize) + andalso ((StreamWindow < BufferSize) orelse (ConnWindow < BufferSize)) -> + {ok, Stream0, State0, []}; + true -> + %% We know there is an item in the queue. + {{value, {IsFin, DataSize, Data}}, Q} = queue:out(Q0), + Stream1 = Stream0#stream{local_buffer=Q, local_buffer_size=BufferSize - DataSize}, + {ok, Stream, State, SendAcc} + = send_or_queue_data(Stream1, State0, SendAcc0, IsFin, Data, in_r), + send_data_for_one_stream(Stream, State, SendAcc) + end. + +%% We can send trailers immediately if the queue is empty, otherwise we queue. +%% We always send trailer frames even if the window is empty. +send_or_queue_data(Stream=#stream{local_buffer_size=0}, + State, SendAcc, fin, {trailers, Trailers}, _) -> + {ok, Stream, State, [{trailers, Trailers}|SendAcc]}; +send_or_queue_data(Stream, State, SendAcc, fin, {trailers, Trailers}, _) -> + {ok, Stream#stream{local_trailers=Trailers}, State, SendAcc}; +%% Send data immediately if we can, buffer otherwise. +send_or_queue_data(Stream=#stream{local_window=StreamWindow}, + State=#http2_machine{local_window=ConnWindow}, + SendAcc, IsFin, Data, In) + when ConnWindow =< 0; StreamWindow =< 0 -> + {ok, queue_data(Stream, IsFin, Data, In), State, SendAcc}; +send_or_queue_data(Stream=#stream{local_window=StreamWindow}, + State=#http2_machine{opts=Opts, remote_settings=RemoteSettings, + local_window=ConnWindow}, SendAcc, IsFin, Data, In) -> + RemoteMaxFrameSize = maps:get(max_frame_size, RemoteSettings, 16384), + ConfiguredMaxFrameSize = maps:get(max_frame_size_sent, Opts, infinity), + MaxSendSize = min( + min(ConnWindow, StreamWindow), + min(RemoteMaxFrameSize, ConfiguredMaxFrameSize) + ), + case Data of + File = #sendfile{bytes=Bytes} when Bytes =< MaxSendSize -> + {ok, Stream#stream{local=IsFin, local_window=StreamWindow - Bytes}, + State#http2_machine{local_window=ConnWindow - Bytes}, + [File|SendAcc]}; + File = #sendfile{offset=Offset, bytes=Bytes} -> + send_or_queue_data(Stream#stream{local_window=StreamWindow - MaxSendSize}, + State#http2_machine{local_window=ConnWindow - MaxSendSize}, + [File#sendfile{bytes=MaxSendSize}|SendAcc], IsFin, + File#sendfile{offset=Offset + MaxSendSize, bytes=Bytes - MaxSendSize}, In); + {data, Iolist0} -> + IolistSize = iolist_size(Iolist0), + if + IolistSize =< MaxSendSize -> + {ok, Stream#stream{local=IsFin, local_window=StreamWindow - IolistSize}, + State#http2_machine{local_window=ConnWindow - IolistSize}, + [{data, Iolist0}|SendAcc]}; + true -> + {Iolist, More} = cow_iolists:split(MaxSendSize, Iolist0), + send_or_queue_data(Stream#stream{local_window=StreamWindow - MaxSendSize}, + State#http2_machine{local_window=ConnWindow - MaxSendSize}, + [{data, Iolist}|SendAcc], IsFin, {data, More}, In) + end + end. + +queue_data(Stream=#stream{local_buffer=Q0, local_buffer_size=Size0}, IsFin, Data, In) -> + DataSize = case Data of + {sendfile, _, Bytes, _} -> Bytes; + {data, Iolist} -> iolist_size(Iolist) + end, + %% Never queue non-final empty data frames. + case {DataSize, IsFin} of + {0, nofin} -> + Stream; + _ -> + Q = queue:In({IsFin, DataSize, Data}, Q0), + Stream#stream{local_buffer=Q, local_buffer_size=Size0 + DataSize} + end. + +%% Public interface to update the flow control window. +%% +%% The ensure_window function applies heuristics to avoid updating the +%% window when it is not necessary. The update_window function updates +%% the window unconditionally. +%% +%% The ensure_window function should be called when requesting more +%% data (for example when reading a request or response body) as well +%% as when receiving new data. Failure to do so may result in the +%% window being depleted. +%% +%% The heuristics dictating whether the window must be updated and +%% what the window size is depends on three options (margin, max +%% and threshold) along with the Size argument. The window increment +%% returned by this function may therefore be smaller than the Size +%% argument. On the other hand the total window allocated over many +%% calls may end up being larger than the initial Size argument. As +%% a result, it is the responsibility of the caller to ensure that +%% the Size argument is never lower than 0. + +-spec ensure_window(non_neg_integer(), State) + -> ok | {ok, pos_integer(), State} when State::http2_machine(). +ensure_window(Size, State=#http2_machine{opts=Opts, remote_window=RemoteWindow}) -> + case ensure_window(Size, RemoteWindow, connection, Opts) of + ok -> + ok; + {ok, Increment} -> + {ok, Increment, State#http2_machine{remote_window=RemoteWindow + Increment}} + end. + +-spec ensure_window(cow_http2:streamid(), non_neg_integer(), State) + -> ok | {ok, pos_integer(), State} when State::http2_machine(). +ensure_window(StreamID, Size, State=#http2_machine{opts=Opts}) -> + case stream_get(StreamID, State) of + %% For simplicity's sake, we do not consider attempts to ensure the window + %% of a terminated stream to be errors. We simply act as if the stream + %% window is large enough. + undefined -> + ok; + Stream = #stream{remote_window=RemoteWindow} -> + case ensure_window(Size, RemoteWindow, stream, Opts) of + ok -> + ok; + {ok, Increment} -> + {ok, Increment, stream_store(Stream#stream{remote_window=RemoteWindow + Increment}, State)} + end + end. + +%% No need to update the window when we are not expecting data. +ensure_window(0, _, _, _) -> + ok; +%% No need to update the window when it is already high enough. +ensure_window(Size, Window, _, _) when Size =< Window -> + ok; +ensure_window(Size0, Window, Type, Opts) -> + Threshold = ensure_window_threshold(Type, Opts), + if + %% We do not update the window when it is higher than the threshold. + Window > Threshold -> + ok; + true -> + Margin = ensure_window_margin(Type, Opts), + Size = Size0 + Margin, + MaxWindow = ensure_window_max(Type, Opts), + Increment = if + %% We cannot go above the maximum window size. + Size > MaxWindow -> MaxWindow - Window; + true -> Size - Window + end, + case Increment of + 0 -> ok; + _ -> {ok, Increment} + end + end. + +%% Margin defaults to the default initial window size. +ensure_window_margin(connection, Opts) -> + maps:get(connection_window_margin_size, Opts, 65535); +ensure_window_margin(stream, Opts) -> + maps:get(stream_window_margin_size, Opts, 65535). + +%% Max window defaults to the max value allowed by the protocol. +ensure_window_max(connection, Opts) -> + maps:get(max_connection_window_size, Opts, 16#7fffffff); +ensure_window_max(stream, Opts) -> + maps:get(max_stream_window_size, Opts, 16#7fffffff). + +%% Threshold defaults to 10 times the default frame size. +ensure_window_threshold(connection, Opts) -> + maps:get(connection_window_update_threshold, Opts, 163840); +ensure_window_threshold(stream, Opts) -> + maps:get(stream_window_update_threshold, Opts, 163840). + +-spec update_window(1..16#7fffffff, State) + -> State when State::http2_machine(). +update_window(Size, State=#http2_machine{remote_window=RemoteWindow}) + when Size > 0 -> + State#http2_machine{remote_window=RemoteWindow + Size}. + +-spec update_window(cow_http2:streamid(), 1..16#7fffffff, State) + -> State when State::http2_machine(). +update_window(StreamID, Size, State) + when Size > 0 -> + Stream = #stream{remote_window=RemoteWindow} = stream_get(StreamID, State), + stream_store(Stream#stream{remote_window=RemoteWindow + Size}, State). + +%% Public interface to reset streams. + +-spec reset_stream(cow_http2:streamid(), State) + -> {ok, State} | {error, not_found} when State::http2_machine(). +reset_stream(StreamID, State=#http2_machine{streams=Streams0}) -> + case maps:take(StreamID, Streams0) of + {_, Streams} -> + {ok, stream_linger(StreamID, State#http2_machine{streams=Streams})}; + error -> + {error, not_found} + end. + +%% Retrieve the buffer size for all streams. + +-spec get_connection_local_buffer_size(http2_machine()) -> non_neg_integer(). +get_connection_local_buffer_size(#http2_machine{streams=Streams}) -> + maps:fold(fun(_, #stream{local_buffer_size=Size}, Acc) -> + Acc + Size + end, 0, Streams). + +%% Retrieve a setting value, or its default value if not set. + +-spec get_local_setting(atom(), http2_machine()) -> atom() | integer(). +get_local_setting(Key, #http2_machine{local_settings=Settings}) -> + maps:get(Key, Settings, default_setting_value(Key)). + +-spec get_remote_settings(http2_machine()) -> map(). +get_remote_settings(#http2_machine{mode=Mode, remote_settings=Settings}) -> + Defaults0 = #{ + header_table_size => default_setting_value(header_table_size), + enable_push => default_setting_value(enable_push), + max_concurrent_streams => default_setting_value(max_concurrent_streams), + initial_window_size => default_setting_value(initial_window_size), + max_frame_size => default_setting_value(max_frame_size), + max_header_list_size => default_setting_value(max_header_list_size) + }, + Defaults = case Mode of + server -> + Defaults0#{enable_connect_protocol => default_setting_value(enable_connect_protocol)}; + client -> + Defaults0 + end, + maps:merge(Defaults, Settings). + +default_setting_value(header_table_size) -> 4096; +default_setting_value(enable_push) -> true; +default_setting_value(max_concurrent_streams) -> infinity; +default_setting_value(initial_window_size) -> 65535; +default_setting_value(max_frame_size) -> 16384; +default_setting_value(max_header_list_size) -> infinity; +default_setting_value(enable_connect_protocol) -> false. + +%% Function to obtain the last known streamid received +%% for the purposes of sending a GOAWAY frame and closing the connection. + +-spec get_last_streamid(http2_machine()) -> cow_http2:streamid(). +get_last_streamid(#http2_machine{remote_streamid=RemoteStreamID}) -> + RemoteStreamID. + +%% Set last accepted streamid to the last known streamid, for the purpose +%% ignoring frames for remote streams created after sending GOAWAY. + +-spec set_last_streamid(http2_machine()) -> {cow_http2:streamid(), http2_machine()}. +set_last_streamid(State=#http2_machine{remote_streamid=StreamID, + last_remote_streamid=LastStreamID}) when StreamID =< LastStreamID-> + {StreamID, State#http2_machine{last_remote_streamid = StreamID}}. + +%% Retrieve the local buffer size for a stream. + +-spec get_stream_local_buffer_size(cow_http2:streamid(), http2_machine()) + -> {ok, non_neg_integer()} | {error, not_found | closed}. +get_stream_local_buffer_size(StreamID, State=#http2_machine{mode=Mode, + local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) -> + case stream_get(StreamID, State) of + #stream{local_buffer_size=Size} -> + {ok, Size}; + undefined when (?IS_LOCAL(Mode, StreamID) andalso (StreamID < LocalStreamID)) + orelse ((not ?IS_LOCAL(Mode, StreamID)) andalso (StreamID =< RemoteStreamID)) -> + {error, closed}; + undefined -> + {error, not_found} + end. + +%% Retrieve the local state for a stream, including the state in the queue. + +-spec get_stream_local_state(cow_http2:streamid(), http2_machine()) + -> {ok, idle | cow_http2:fin(), empty | nofin | fin} | {error, not_found | closed}. +get_stream_local_state(StreamID, State=#http2_machine{mode=Mode, + local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) -> + case stream_get(StreamID, State) of + #stream{local=IsFin, local_buffer=Q, local_trailers=undefined} -> + IsQueueFin = case queue:peek_r(Q) of + empty -> empty; + {value, {IsQueueFin0, _, _}} -> IsQueueFin0 + end, + {ok, IsFin, IsQueueFin}; + %% Trailers are queued so the local state is fin after the queue is drained. + #stream{local=IsFin} -> + {ok, IsFin, fin}; + undefined when (?IS_LOCAL(Mode, StreamID) andalso (StreamID < LocalStreamID)) + orelse ((not ?IS_LOCAL(Mode, StreamID)) andalso (StreamID =< RemoteStreamID)) -> + {error, closed}; + undefined -> + {error, not_found} + end. + +%% Retrieve the remote state for a stream. + +-spec get_stream_remote_state(cow_http2:streamid(), http2_machine()) + -> {ok, idle | cow_http2:fin()} | {error, not_found | closed}. +get_stream_remote_state(StreamID, State=#http2_machine{mode=Mode, + local_streamid=LocalStreamID, remote_streamid=RemoteStreamID}) -> + case stream_get(StreamID, State) of + #stream{remote=IsFin} -> + {ok, IsFin}; + undefined when (?IS_LOCAL(Mode, StreamID) andalso (StreamID < LocalStreamID)) + orelse ((not ?IS_LOCAL(Mode, StreamID)) andalso (StreamID =< RemoteStreamID)) -> + {error, closed}; + undefined -> + {error, not_found} + end. + +%% Query whether the stream was reset recently by the remote endpoint. + +-spec is_lingering_stream(cow_http2:streamid(), http2_machine()) -> boolean(). +is_lingering_stream(StreamID, #http2_machine{ + local_lingering_streams=Local, remote_lingering_streams=Remote}) -> + case lists:member(StreamID, Local) of + true -> true; + false -> lists:member(StreamID, Remote) + end. + +%% Stream-related functions. + +stream_get(StreamID, #http2_machine{streams=Streams}) -> + maps:get(StreamID, Streams, undefined). + +stream_store(#stream{id=StreamID, local=fin, remote=fin}, + State=#http2_machine{streams=Streams0}) -> + Streams = maps:remove(StreamID, Streams0), + State#http2_machine{streams=Streams}; +stream_store(Stream=#stream{id=StreamID}, + State=#http2_machine{streams=Streams}) -> + State#http2_machine{streams=Streams#{StreamID => Stream}}. + +%% @todo Don't send an RST_STREAM if one was already sent. +stream_reset(StreamID, State, Reason, HumanReadable) -> + {error, {stream_error, StreamID, Reason, HumanReadable}, + stream_linger(StreamID, State)}. + +stream_linger(StreamID, State=#http2_machine{local_lingering_streams=Lingering0}) -> + %% We only keep up to 100 streams in this state. @todo Make it configurable? + Lingering = [StreamID|lists:sublist(Lingering0, 100 - 1)], + State#http2_machine{local_lingering_streams=Lingering}. diff --git a/src/wsLib/cow_http_hd.erl b/src/wsLib/cow_http_hd.erl new file mode 100644 index 0000000..e2a0a1d --- /dev/null +++ b/src/wsLib/cow_http_hd.erl @@ -0,0 +1,3622 @@ +%% Copyright (c) 2014-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http_hd). + +%% Functions are ordered by header name, with the parse +%% function before the build function. + +-export([parse_accept/1]). +-export([parse_accept_charset/1]). +% @todo -export([parse_accept_datetime/1]). RFC7089 +-export([parse_accept_encoding/1]). +% @todo -export([parse_accept_features/1]). RFC2295 +-export([parse_accept_language/1]). +-export([parse_accept_ranges/1]). +% @todo -export([parse_access_control_allow_credentials/1]). CORS +-export([access_control_allow_credentials/0]). +% @todo -export([parse_access_control_allow_headers/1]). CORS +-export([access_control_allow_headers/1]). +% @todo -export([parse_access_control_allow_methods/1]). CORS +-export([access_control_allow_methods/1]). +% @todo -export([parse_access_control_allow_origin/1]). CORS +-export([access_control_allow_origin/1]). +% @todo -export([parse_access_control_expose_headers/1]). CORS +-export([access_control_expose_headers/1]). +% @todo -export([parse_access_control_max_age/1]). CORS +-export([access_control_max_age/1]). +-export([parse_access_control_request_headers/1]). +-export([parse_access_control_request_method/1]). +-export([parse_age/1]). +-export([parse_allow/1]). +% @todo -export([parse_alternates/1]). RFC2295 +% @todo -export([parse_authentication_info/1]). RFC2617 +-export([parse_authorization/1]). +-export([parse_cache_control/1]). +-export([parse_connection/1]). +% @todo -export([parse_content_disposition/1]). RFC6266 +-export([parse_content_encoding/1]). +-export([parse_content_language/1]). +-export([parse_content_length/1]). +% @todo -export([parse_content_location/1]). RFC7231 +% @todo -export([parse_content_md5/1]). RFC2616 (deprecated) +-export([parse_content_range/1]). +% @todo -export([parse_content_security_policy/1]). CSP +% @todo -export([parse_content_security_policy_report_only/1]). CSP +-export([parse_content_type/1]). +-export([parse_cookie/1]). +-export([parse_date/1]). +% @todo -export([parse_digest/1]). RFC3230 +% @todo -export([parse_dnt/1]). http://donottrack.us/ +-export([parse_etag/1]). +-export([parse_expect/1]). +-export([parse_expires/1]). +% @todo -export([parse_forwarded/1]). RFC7239 +% @todo -export([parse_from/1]). RFC7231 +-export([parse_host/1]). +-export([parse_http2_settings/1]). +-export([parse_if_match/1]). +-export([parse_if_modified_since/1]). +-export([parse_if_none_match/1]). +-export([parse_if_range/1]). +-export([parse_if_unmodified_since/1]). +% @todo -export([parse_last_event_id/1]). eventsource +-export([parse_last_modified/1]). +-export([parse_link/1]). +% @todo -export([parse_location/1]). RFC7231 +-export([parse_max_forwards/1]). +% @todo -export([parse_memento_datetime/1]). RFC7089 +% @todo -export([parse_negotiate/1]). RFC2295 +-export([parse_origin/1]). +-export([parse_pragma/1]). +% @todo -export([parse_prefer/1]). RFC7240 +-export([parse_proxy_authenticate/1]). +% @todo -export([parse_proxy_authentication_info/1]). RFC2617 +-export([parse_proxy_authorization/1]). +% @todo -export([parse_proxy_support/1]). RFC4559 +% @todo -export([parse_public_key_pins/1]). Key Pinning (upcoming) +% @todo -export([parse_public_key_pins_report_only/1]). Key Pinning (upcoming) +-export([parse_range/1]). +% @todo -export([parse_referer/1]). RFC7231 +% @todo -export([parse_refresh/1]). Non-standard (examples: "5", "5; url=http://example.com/") +-export([parse_retry_after/1]). +-export([parse_sec_websocket_accept/1]). +-export([parse_sec_websocket_extensions/1]). +-export([parse_sec_websocket_key/1]). +% @todo -export([parse_sec_websocket_origin/1]). Websocket drafts 7 and 8 +-export([parse_sec_websocket_protocol_req/1]). +-export([parse_sec_websocket_protocol_resp/1]). +-export([parse_sec_websocket_version_req/1]). +-export([parse_sec_websocket_version_resp/1]). +% @todo -export([parse_server/1]). RFC7231 +-export([parse_set_cookie/1]). +% @todo -export([parse_strict_transport_security/1]). RFC6797 +% @todo -export([parse_tcn/1]). RFC2295 +-export([parse_te/1]). +-export([parse_trailer/1]). +-export([parse_transfer_encoding/1]). +-export([parse_upgrade/1]). +% @todo -export([parse_user_agent/1]). RFC7231 +% @todo -export([parse_variant_vary/1]). RFC2295 +-export([parse_variant_key/2]). +-export([variant_key/1]). +-export([parse_variants/1]). +-export([variants/1]). +-export([parse_vary/1]). +% @todo -export([parse_via/1]). RFC7230 +% @todo -export([parse_want_digest/1]). RFC3230 +% @todo -export([parse_warning/1]). RFC7234 +-export([parse_www_authenticate/1]). +% @todo -export([parse_x_content_duration/1]). Gecko/MDN (value: float) +% @todo -export([parse_x_dns_prefetch_control/1]). Various (value: "on"|"off") +-export([parse_x_forwarded_for/1]). +% @todo -export([parse_x_frame_options/1]). RFC7034 + +-type etag() :: {weak | strong, binary()}. +-export_type([etag/0]). + +-type media_type() :: {binary(), binary(), [{binary(), binary()}]}. +-export_type([media_type/0]). + +-type qvalue() :: 0..1000. +-export_type([qvalue/0]). + +-type websocket_version() :: 0..255. +-export_type([websocket_version/0]). + +-include("cow_inline.hrl"). +-include("cow_parse.hrl"). + +-ifdef(TEST). +-include_lib("proper/include/proper.hrl"). + +vector(Min, Max, Dom) -> ?LET(N, choose(Min, Max), vector(N, Dom)). +small_list(Dom) -> vector(0, 10, Dom). +small_non_empty_list(Dom) -> vector(1, 10, Dom). + +alpha_chars() -> "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ". +alphanum_chars() -> "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ". +digit_chars() -> "0123456789". + +ows() -> list(elements([$\s, $\t])). +alpha() -> elements(alpha_chars()). +alphanum() -> elements(alphanum_chars()). +digit() -> elements(digit_chars()). + +tchar() -> + frequency([ + {1, elements([$!, $#, $$, $%, $&, $', $*, $+, $-, $., $^, $_, $`, $|, $~])}, + {99, elements(alphanum_chars())} + ]). + +token() -> + ?LET(T, + non_empty(list(tchar())), + list_to_binary(T)). + +abnf_char() -> + integer(1, 127). + +vchar() -> + integer(33, 126). + +obs_text() -> + integer(128, 255). + +qdtext() -> + frequency([ + {99, elements("\t\s!#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~")}, + {1, obs_text()} + ]). + +quoted_pair() -> + [$\\, frequency([ + {99, elements("\t\s!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~")}, + {1, obs_text()} + ])]. + +quoted_string() -> + [$", list(frequency([{100, qdtext()}, {1, quoted_pair()}])), $"]. + +%% Helper function for ( token / quoted-string ) values. +unquote([$", V, $"]) -> unquote(V, <<>>); +unquote(V) -> V. + +unquote([], Acc) -> Acc; +unquote([[$\\, C]|Tail], Acc) -> unquote(Tail, << Acc/binary, C >>); +unquote([C|Tail], Acc) -> unquote(Tail, << Acc/binary, C >>). + +parameter() -> + ?SUCHTHAT({K, _, _, _}, + {token(), oneof([token(), quoted_string()]), ows(), ows()}, + K =/= <<"q">>). + +weight() -> + frequency([ + {90, integer(0, 1000)}, + {10, undefined} + ]). + +%% Helper function for weight's qvalue formatting. +qvalue_to_iodata(0) -> <<"0">>; +qvalue_to_iodata(Q) when Q < 10 -> [<<"0.00">>, integer_to_binary(Q)]; +qvalue_to_iodata(Q) when Q < 100 -> [<<"0.0">>, integer_to_binary(Q)]; +qvalue_to_iodata(Q) when Q < 1000 -> [<<"0.">>, integer_to_binary(Q)]; +qvalue_to_iodata(1000) -> <<"1">>. +-endif. + +%% Accept header. + +-spec parse_accept(binary()) -> [{media_type(), qvalue(), [binary() | {binary(), binary()}]}]. +parse_accept(<<"*/*">>) -> + [{{<<"*">>, <<"*">>, []}, 1000, []}]; +parse_accept(Accept) -> + media_range_list(Accept, []). + +media_range_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> ?LOWER(media_range_type, R, Acc, <<>>); +media_range_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> media_range_list(R, Acc); +media_range_list(<<>>, Acc) -> lists:reverse(Acc). + +media_range_type(<< C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> ?LOWER(media_range_type, R, Acc, T); +media_range_type(<< $/, C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> ?LOWER(media_range_subtype, R, Acc, T, <<>>); +%% Special clause for badly behaving user agents that send * instead of */*. +media_range_type(<< $;, R/bits >>, Acc, <<"*">>) -> media_range_before_param(R, Acc, <<"*">>, <<"*">>, []). + +media_range_subtype(<< C, R/bits >>, Acc, T, S) when ?IS_TOKEN(C) -> ?LOWER(media_range_subtype, R, Acc, T, S); +media_range_subtype(R, Acc, T, S) -> media_range_param_sep(R, Acc, T, S, []). + +media_range_param_sep(<<>>, Acc, T, S, P) -> lists:reverse([{{T, S, lists:reverse(P)}, 1000, []}|Acc]); +media_range_param_sep(<< $,, R/bits >>, Acc, T, S, P) -> media_range_list(R, [{{T, S, lists:reverse(P)}, 1000, []}|Acc]); +media_range_param_sep(<< $;, R/bits >>, Acc, T, S, P) -> media_range_before_param(R, Acc, T, S, P); +media_range_param_sep(<< C, R/bits >>, Acc, T, S, P) when ?IS_WS(C) -> media_range_param_sep(R, Acc, T, S, P). + +media_range_before_param(<< C, R/bits >>, Acc, T, S, P) when ?IS_WS(C) -> media_range_before_param(R, Acc, T, S, P); +media_range_before_param(<< $q, $=, R/bits >>, Acc, T, S, P) -> media_range_weight(R, Acc, T, S, P); +media_range_before_param(<< "charset=", $", R/bits >>, Acc, T, S, P) -> media_range_charset_quoted(R, Acc, T, S, P, <<>>); +media_range_before_param(<< "charset=", R/bits >>, Acc, T, S, P) -> media_range_charset(R, Acc, T, S, P, <<>>); +media_range_before_param(<< C, R/bits >>, Acc, T, S, P) when ?IS_TOKEN(C) -> ?LOWER(media_range_param, R, Acc, T, S, P, <<>>). + +media_range_charset_quoted(<< $", R/bits >>, Acc, T, S, P, V) -> + media_range_param_sep(R, Acc, T, S, [{<<"charset">>, V}|P]); +media_range_charset_quoted(<< $\\, C, R/bits >>, Acc, T, S, P, V) when ?IS_VCHAR_OBS(C) -> + ?LOWER(media_range_charset_quoted, R, Acc, T, S, P, V); +media_range_charset_quoted(<< C, R/bits >>, Acc, T, S, P, V) when ?IS_VCHAR_OBS(C) -> + ?LOWER(media_range_charset_quoted, R, Acc, T, S, P, V). + +media_range_charset(<< C, R/bits >>, Acc, T, S, P, V) when ?IS_TOKEN(C) -> + ?LOWER(media_range_charset, R, Acc, T, S, P, V); +media_range_charset(R, Acc, T, S, P, V) -> + media_range_param_sep(R, Acc, T, S, [{<<"charset">>, V}|P]). + +media_range_param(<< $=, $", R/bits >>, Acc, T, S, P, K) -> media_range_quoted(R, Acc, T, S, P, K, <<>>); +media_range_param(<< $=, C, R/bits >>, Acc, T, S, P, K) when ?IS_TOKEN(C) -> media_range_value(R, Acc, T, S, P, K, << C >>); +media_range_param(<< C, R/bits >>, Acc, T, S, P, K) when ?IS_TOKEN(C) -> ?LOWER(media_range_param, R, Acc, T, S, P, K). + +media_range_quoted(<< $", R/bits >>, Acc, T, S, P, K, V) -> media_range_param_sep(R, Acc, T, S, [{K, V}|P]); +media_range_quoted(<< $\\, C, R/bits >>, Acc, T, S, P, K, V) when ?IS_VCHAR_OBS(C) -> media_range_quoted(R, Acc, T, S, P, K, << V/binary, C >>); +media_range_quoted(<< C, R/bits >>, Acc, T, S, P, K, V) when ?IS_VCHAR_OBS(C) -> media_range_quoted(R, Acc, T, S, P, K, << V/binary, C >>). + +media_range_value(<< C, R/bits >>, Acc, T, S, P, K, V) when ?IS_TOKEN(C) -> media_range_value(R, Acc, T, S, P, K, << V/binary, C >>); +media_range_value(R, Acc, T, S, P, K, V) -> media_range_param_sep(R, Acc, T, S, [{K, V}|P]). + +media_range_weight(<< "1.000", R/bits >>, Acc, T, S, P) -> accept_ext_sep(R, Acc, T, S, P, 1000, []); +media_range_weight(<< "1.00", R/bits >>, Acc, T, S, P) -> accept_ext_sep(R, Acc, T, S, P, 1000, []); +media_range_weight(<< "1.0", R/bits >>, Acc, T, S, P) -> accept_ext_sep(R, Acc, T, S, P, 1000, []); +media_range_weight(<< "1.", R/bits >>, Acc, T, S, P) -> accept_ext_sep(R, Acc, T, S, P, 1000, []); +media_range_weight(<< "1", R/bits >>, Acc, T, S, P) -> accept_ext_sep(R, Acc, T, S, P, 1000, []); +media_range_weight(<< "0.", A, B, C, R/bits >>, Acc, T, S, P) when ?IS_DIGIT(A), ?IS_DIGIT(B), ?IS_DIGIT(C) -> + accept_ext_sep(R, Acc, T, S, P, (A - $0) * 100 + (B - $0) * 10 + (C - $0), []); +media_range_weight(<< "0.", A, B, R/bits >>, Acc, T, S, P) when ?IS_DIGIT(A), ?IS_DIGIT(B) -> + accept_ext_sep(R, Acc, T, S, P, (A - $0) * 100 + (B - $0) * 10, []); +media_range_weight(<< "0.", A, R/bits >>, Acc, T, S, P) when ?IS_DIGIT(A) -> + accept_ext_sep(R, Acc, T, S, P, (A - $0) * 100, []); +media_range_weight(<< "0.", R/bits >>, Acc, T, S, P) -> accept_ext_sep(R, Acc, T, S, P, 0, []); +media_range_weight(<< "0", R/bits >>, Acc, T, S, P) -> accept_ext_sep(R, Acc, T, S, P, 0, []); +%% Special clauses for badly behaving user agents that send .123 instead of 0.123. +media_range_weight(<< ".", A, B, C, R/bits >>, Acc, T, S, P) when ?IS_DIGIT(A), ?IS_DIGIT(B), ?IS_DIGIT(C) -> + accept_ext_sep(R, Acc, T, S, P, (A - $0) * 100 + (B - $0) * 10 + (C - $0), []); +media_range_weight(<< ".", A, B, R/bits >>, Acc, T, S, P) when ?IS_DIGIT(A), ?IS_DIGIT(B) -> + accept_ext_sep(R, Acc, T, S, P, (A - $0) * 100 + (B - $0) * 10, []); +media_range_weight(<< ".", A, R/bits >>, Acc, T, S, P) when ?IS_DIGIT(A) -> + accept_ext_sep(R, Acc, T, S, P, (A - $0) * 100, []). + +accept_ext_sep(<<>>, Acc, T, S, P, Q, E) -> lists:reverse([{{T, S, lists:reverse(P)}, Q, lists:reverse(E)}|Acc]); +accept_ext_sep(<< $,, R/bits >>, Acc, T, S, P, Q, E) -> media_range_list(R, [{{T, S, lists:reverse(P)}, Q, lists:reverse(E)}|Acc]); +accept_ext_sep(<< $;, R/bits >>, Acc, T, S, P, Q, E) -> accept_before_ext(R, Acc, T, S, P, Q, E); +accept_ext_sep(<< C, R/bits >>, Acc, T, S, P, Q, E) when ?IS_WS(C) -> accept_ext_sep(R, Acc, T, S, P, Q, E). + +accept_before_ext(<< C, R/bits >>, Acc, T, S, P, Q, E) when ?IS_WS(C) -> accept_before_ext(R, Acc, T, S, P, Q, E); +accept_before_ext(<< C, R/bits >>, Acc, T, S, P, Q, E) when ?IS_TOKEN(C) -> ?LOWER(accept_ext, R, Acc, T, S, P, Q, E, <<>>). + +accept_ext(<< $=, $", R/bits >>, Acc, T, S, P, Q, E, K) -> accept_quoted(R, Acc, T, S, P, Q, E, K, <<>>); +accept_ext(<< $=, C, R/bits >>, Acc, T, S, P, Q, E, K) when ?IS_TOKEN(C) -> accept_value(R, Acc, T, S, P, Q, E, K, << C >>); +accept_ext(<< C, R/bits >>, Acc, T, S, P, Q, E, K) when ?IS_TOKEN(C) -> ?LOWER(accept_ext, R, Acc, T, S, P, Q, E, K); +accept_ext(R, Acc, T, S, P, Q, E, K) -> accept_ext_sep(R, Acc, T, S, P, Q, [K|E]). + +accept_quoted(<< $", R/bits >>, Acc, T, S, P, Q, E, K, V) -> accept_ext_sep(R, Acc, T, S, P, Q, [{K, V}|E]); +accept_quoted(<< $\\, C, R/bits >>, Acc, T, S, P, Q, E, K, V) when ?IS_VCHAR_OBS(C) -> accept_quoted(R, Acc, T, S, P, Q, E, K, << V/binary, C >>); +accept_quoted(<< C, R/bits >>, Acc, T, S, P, Q, E, K, V) when ?IS_VCHAR_OBS(C) -> accept_quoted(R, Acc, T, S, P, Q, E, K, << V/binary, C >>). + +accept_value(<< C, R/bits >>, Acc, T, S, P, Q, E, K, V) when ?IS_TOKEN(C) -> accept_value(R, Acc, T, S, P, Q, E, K, << V/binary, C >>); +accept_value(R, Acc, T, S, P, Q, E, K, V) -> accept_ext_sep(R, Acc, T, S, P, Q, [{K, V}|E]). + +-ifdef(TEST). +accept_ext() -> + oneof([token(), parameter()]). + +accept_exts() -> + frequency([ + {90, []}, + {10, small_list(accept_ext())} + ]). + +accept_param() -> + frequency([ + {90, parameter()}, + {10, {<<"charset">>, oneof([token(), quoted_string()]), <<>>, <<>>}} + ]). + +accept_params() -> + small_list(accept_param()). + +accept() -> + ?LET({T, S, P, W, E}, + {token(), token(), accept_params(), weight(), accept_exts()}, + {T, S, P, W, E, iolist_to_binary([T, $/, S, + [[OWS1, $;, OWS2, K, $=, V] || {K, V, OWS1, OWS2} <- P], + case W of + undefined -> []; + _ -> [ + [<<";q=">>, qvalue_to_iodata(W)], + [case Ext of + {K, V, OWS1, OWS2} -> [OWS1, $;, OWS2, K, $=, V]; + K -> [$;, K] + end || Ext <- E]] + end])} + ). + +prop_parse_accept() -> + ?FORALL(L, + vector(1, 50, accept()), + begin + << _, Accept/binary >> = iolist_to_binary([[$,, A] || {_, _, _, _, _, A} <- L]), + ResL = parse_accept(Accept), + CheckedL = [begin + ExpectedP = [case ?LOWER(K) of + <<"charset">> -> {<<"charset">>, ?LOWER(unquote(V))}; + LowK -> {LowK, unquote(V)} + end || {K, V, _, _} <- P], + ExpectedE = [case Ext of + {K, V, _, _} -> {?LOWER(K), unquote(V)}; + K -> ?LOWER(K) + end || Ext <- E], + ResT =:= ?LOWER(T) + andalso ResS =:= ?LOWER(S) + andalso ResP =:= ExpectedP + andalso (ResW =:= W orelse (W =:= undefined andalso ResW =:= 1000)) + andalso ((W =:= undefined andalso ResE =:= []) orelse (W =/= undefined andalso ResE =:= ExpectedE)) + end || {{T, S, P, W, E, _}, {{ResT, ResS, ResP}, ResW, ResE}} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end + ). + +parse_accept_test_() -> + Tests = [ + {<<>>, []}, + {<<" ">>, []}, + {<<"audio/*; q=0.2, audio/basic">>, [ + {{<<"audio">>, <<"*">>, []}, 200, []}, + {{<<"audio">>, <<"basic">>, []}, 1000, []} + ]}, + {<<"text/plain; q=0.5, text/html, " + "text/x-dvi; q=0.8, text/x-c">>, [ + {{<<"text">>, <<"plain">>, []}, 500, []}, + {{<<"text">>, <<"html">>, []}, 1000, []}, + {{<<"text">>, <<"x-dvi">>, []}, 800, []}, + {{<<"text">>, <<"x-c">>, []}, 1000, []} + ]}, + {<<"text/*, text/html, text/html;level=1, */*">>, [ + {{<<"text">>, <<"*">>, []}, 1000, []}, + {{<<"text">>, <<"html">>, []}, 1000, []}, + {{<<"text">>, <<"html">>, [{<<"level">>, <<"1">>}]}, 1000, []}, + {{<<"*">>, <<"*">>, []}, 1000, []} + ]}, + {<<"text/*;q=0.3, text/html;q=0.7, text/html;level=1, " + "text/html;level=2;q=0.4, */*;q=0.5">>, [ + {{<<"text">>, <<"*">>, []}, 300, []}, + {{<<"text">>, <<"html">>, []}, 700, []}, + {{<<"text">>, <<"html">>, [{<<"level">>, <<"1">>}]}, 1000, []}, + {{<<"text">>, <<"html">>, [{<<"level">>, <<"2">>}]}, 400, []}, + {{<<"*">>, <<"*">>, []}, 500, []} + ]}, + {<<"text/html;level=1;quoted=\"hi hi hi\";" + "q=0.123;standalone;complex=gits, text/plain">>, [ + {{<<"text">>, <<"html">>, + [{<<"level">>, <<"1">>}, {<<"quoted">>, <<"hi hi hi">>}]}, 123, + [<<"standalone">>, {<<"complex">>, <<"gits">>}]}, + {{<<"text">>, <<"plain">>, []}, 1000, []} + ]}, + {<<"text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2">>, [ + {{<<"text">>, <<"html">>, []}, 1000, []}, + {{<<"image">>, <<"gif">>, []}, 1000, []}, + {{<<"image">>, <<"jpeg">>, []}, 1000, []}, + {{<<"*">>, <<"*">>, []}, 200, []}, + {{<<"*">>, <<"*">>, []}, 200, []} + ]}, + {<<"text/plain; charset=UTF-8">>, [ + {{<<"text">>, <<"plain">>, [{<<"charset">>, <<"utf-8">>}]}, 1000, []} + ]} + ], + [{V, fun() -> R = parse_accept(V) end} || {V, R} <- Tests]. + +parse_accept_error_test_() -> + Tests = [ + <<"audio/basic, */;q=0.5">>, + <<"audio/, audio/basic">>, + <<"aud\tio/basic">>, + <<"audio/basic;t=\"zero \\", 0, " woo\"">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_accept(V)) end} || V <- Tests]. + +horse_parse_accept() -> + horse:repeat(20000, + parse_accept(<<"text/*;q=0.3, text/html;q=0.7, text/html;level=1, " + "text/html;level=2;q=0.4, */*;q=0.5">>) + ). +-endif. + +%% Accept-Charset header. + +-spec parse_accept_charset(binary()) -> [{binary(), qvalue()}]. +parse_accept_charset(Charset) -> + nonempty(conneg_list(Charset, [])). + +conneg_list(<<>>, Acc) -> lists:reverse(Acc); +conneg_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> conneg_list(R, Acc); +conneg_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> ?LOWER(conneg, R, Acc, <<>>). + +conneg(<< C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> ?LOWER(conneg, R, Acc, T); +conneg(R, Acc, T) -> conneg_param_sep(R, Acc, T). + +conneg_param_sep(<<>>, Acc, T) -> lists:reverse([{T, 1000}|Acc]); +conneg_param_sep(<< $,, R/bits >>, Acc, T) -> conneg_list(R, [{T, 1000}|Acc]); +conneg_param_sep(<< $;, R/bits >>, Acc, T) -> conneg_before_weight(R, Acc, T); +conneg_param_sep(<< C, R/bits >>, Acc, T) when ?IS_WS(C) -> conneg_param_sep(R, Acc, T). + +conneg_before_weight(<< C, R/bits >>, Acc, T) when ?IS_WS(C) -> conneg_before_weight(R, Acc, T); +conneg_before_weight(<< $q, $=, R/bits >>, Acc, T) -> conneg_weight(R, Acc, T); +%% Special clause for broken user agents that confuse ; and , separators. +conneg_before_weight(<< C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> ?LOWER(conneg, R, [{T, 1000}|Acc], <<>>). + +conneg_weight(<< "1.000", R/bits >>, Acc, T) -> conneg_list_sep(R, [{T, 1000}|Acc]); +conneg_weight(<< "1.00", R/bits >>, Acc, T) -> conneg_list_sep(R, [{T, 1000}|Acc]); +conneg_weight(<< "1.0", R/bits >>, Acc, T) -> conneg_list_sep(R, [{T, 1000}|Acc]); +conneg_weight(<< "1.", R/bits >>, Acc, T) -> conneg_list_sep(R, [{T, 1000}|Acc]); +conneg_weight(<< "1", R/bits >>, Acc, T) -> conneg_list_sep(R, [{T, 1000}|Acc]); +conneg_weight(<< "0.", A, B, C, R/bits >>, Acc, T) when ?IS_DIGIT(A), ?IS_DIGIT(B), ?IS_DIGIT(C) -> + conneg_list_sep(R, [{T, (A - $0) * 100 + (B - $0) * 10 + (C - $0)}|Acc]); +conneg_weight(<< "0.", A, B, R/bits >>, Acc, T) when ?IS_DIGIT(A), ?IS_DIGIT(B) -> + conneg_list_sep(R, [{T, (A - $0) * 100 + (B - $0) * 10}|Acc]); +conneg_weight(<< "0.", A, R/bits >>, Acc, T) when ?IS_DIGIT(A) -> + conneg_list_sep(R, [{T, (A - $0) * 100}|Acc]); +conneg_weight(<< "0.", R/bits >>, Acc, T) -> conneg_list_sep(R, [{T, 0}|Acc]); +conneg_weight(<< "0", R/bits >>, Acc, T) -> conneg_list_sep(R, [{T, 0}|Acc]). + +conneg_list_sep(<<>>, Acc) -> lists:reverse(Acc); +conneg_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> conneg_list_sep(R, Acc); +conneg_list_sep(<< $,, R/bits >>, Acc) -> conneg_list(R, Acc). + +-ifdef(TEST). +accept_charset() -> + ?LET({C, W}, + {token(), weight()}, + {C, W, iolist_to_binary([C, case W of + undefined -> []; + _ -> [<<";q=">>, qvalue_to_iodata(W)] + end])} + ). + +prop_parse_accept_charset() -> + ?FORALL(L, + non_empty(list(accept_charset())), + begin + << _, AcceptCharset/binary >> = iolist_to_binary([[$,, A] || {_, _, A} <- L]), + ResL = parse_accept_charset(AcceptCharset), + CheckedL = [begin + ResC =:= ?LOWER(Ch) + andalso (ResW =:= W orelse (W =:= undefined andalso ResW =:= 1000)) + end || {{Ch, W, _}, {ResC, ResW}} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_accept_charset_test_() -> + Tests = [ + {<<"iso-8859-5, unicode-1-1;q=0.8">>, [ + {<<"iso-8859-5">>, 1000}, + {<<"unicode-1-1">>, 800} + ]}, + %% Some user agents send this invalid value for the Accept-Charset header + {<<"ISO-8859-1;utf-8;q=0.7,*;q=0.7">>, [ + {<<"iso-8859-1">>, 1000}, + {<<"utf-8">>, 700}, + {<<"*">>, 700} + ]} + ], + [{V, fun() -> R = parse_accept_charset(V) end} || {V, R} <- Tests]. + +parse_accept_charset_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_accept_charset(V)) end} || V <- Tests]. + +horse_parse_accept_charset() -> + horse:repeat(20000, + parse_accept_charset(<<"iso-8859-5, unicode-1-1;q=0.8">>) + ). +-endif. + +%% Accept-Encoding header. + +-spec parse_accept_encoding(binary()) -> [{binary(), qvalue()}]. +parse_accept_encoding(Encoding) -> + conneg_list(Encoding, []). + +-ifdef(TEST). +accept_encoding() -> + ?LET({E, W}, + {token(), weight()}, + {E, W, iolist_to_binary([E, case W of + undefined -> []; + _ -> [<<";q=">>, qvalue_to_iodata(W)] + end])} + ). + +%% @todo This property seems useless, see prop_accept_charset. +prop_parse_accept_encoding() -> + ?FORALL(L, + non_empty(list(accept_encoding())), + begin + << _, AcceptEncoding/binary >> = iolist_to_binary([[$,, A] || {_, _, A} <- L]), + ResL = parse_accept_encoding(AcceptEncoding), + CheckedL = [begin + ResE =:= ?LOWER(E) + andalso (ResW =:= W orelse (W =:= undefined andalso ResW =:= 1000)) + end || {{E, W, _}, {ResE, ResW}} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_accept_encoding_test_() -> + Tests = [ + {<<>>, []}, + {<<"*">>, [{<<"*">>, 1000}]}, + {<<"compress, gzip">>, [ + {<<"compress">>, 1000}, + {<<"gzip">>, 1000} + ]}, + {<<"compress;q=0.5, gzip;q=1.0">>, [ + {<<"compress">>, 500}, + {<<"gzip">>, 1000} + ]}, + {<<"gzip;q=1.0, identity; q=0.5, *;q=0">>, [ + {<<"gzip">>, 1000}, + {<<"identity">>, 500}, + {<<"*">>, 0} + ]} + ], + [{V, fun() -> R = parse_accept_encoding(V) end} || {V, R} <- Tests]. + +horse_parse_accept_encoding() -> + horse:repeat(20000, + parse_accept_encoding(<<"gzip;q=1.0, identity; q=0.5, *;q=0">>) + ). +-endif. + +%% Accept-Language header. + +-spec parse_accept_language(binary()) -> [{binary(), qvalue()}]. +parse_accept_language(LanguageRange) -> + nonempty(language_range_list(LanguageRange, [])). + +language_range_list(<<>>, Acc) -> lists:reverse(Acc); +language_range_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> language_range_list(R, Acc); +language_range_list(<< $*, R/bits >>, Acc) -> language_range_param_sep(R, Acc, <<"*">>); +language_range_list(<< C, R/bits >>, Acc) when ?IS_ALPHA(C) -> + ?LOWER(language_range, R, Acc, 1, <<>>). + +language_range(<< $-, C, R/bits >>, Acc, _, T) when ?IS_ALPHANUM(C) -> + ?LOWER(language_range_sub, R, Acc, 1, << T/binary, $- >>); +language_range(<< C, R/bits >>, Acc, N, T) when ?IS_ALPHA(C), N < 8 -> + ?LOWER(language_range, R, Acc, N + 1, T); +language_range(R, Acc, _, T) -> language_range_param_sep(R, Acc, T). + +language_range_sub(<< $-, R/bits >>, Acc, _, T) -> language_range_sub(R, Acc, 0, << T/binary, $- >>); +language_range_sub(<< C, R/bits >>, Acc, N, T) when ?IS_ALPHANUM(C), N < 8 -> + ?LOWER(language_range_sub, R, Acc, N + 1, T); +language_range_sub(R, Acc, _, T) -> language_range_param_sep(R, Acc, T). + +language_range_param_sep(<<>>, Acc, T) -> lists:reverse([{T, 1000}|Acc]); +language_range_param_sep(<< $,, R/bits >>, Acc, T) -> language_range_list(R, [{T, 1000}|Acc]); +language_range_param_sep(<< $;, R/bits >>, Acc, T) -> language_range_before_weight(R, Acc, T); +language_range_param_sep(<< C, R/bits >>, Acc, T) when ?IS_WS(C) -> language_range_param_sep(R, Acc, T). + +language_range_before_weight(<< C, R/bits >>, Acc, T) when ?IS_WS(C) -> language_range_before_weight(R, Acc, T); +language_range_before_weight(<< $q, $=, R/bits >>, Acc, T) -> language_range_weight(R, Acc, T); +%% Special clause for broken user agents that confuse ; and , separators. +language_range_before_weight(<< C, R/bits >>, Acc, T) when ?IS_ALPHA(C) -> + ?LOWER(language_range, R, [{T, 1000}|Acc], 1, <<>>). + +language_range_weight(<< "1.000", R/bits >>, Acc, T) -> language_range_list_sep(R, [{T, 1000}|Acc]); +language_range_weight(<< "1.00", R/bits >>, Acc, T) -> language_range_list_sep(R, [{T, 1000}|Acc]); +language_range_weight(<< "1.0", R/bits >>, Acc, T) -> language_range_list_sep(R, [{T, 1000}|Acc]); +language_range_weight(<< "1.", R/bits >>, Acc, T) -> language_range_list_sep(R, [{T, 1000}|Acc]); +language_range_weight(<< "1", R/bits >>, Acc, T) -> language_range_list_sep(R, [{T, 1000}|Acc]); +language_range_weight(<< "0.", A, B, C, R/bits >>, Acc, T) when ?IS_DIGIT(A), ?IS_DIGIT(B), ?IS_DIGIT(C) -> + language_range_list_sep(R, [{T, (A - $0) * 100 + (B - $0) * 10 + (C - $0)}|Acc]); +language_range_weight(<< "0.", A, B, R/bits >>, Acc, T) when ?IS_DIGIT(A), ?IS_DIGIT(B) -> + language_range_list_sep(R, [{T, (A - $0) * 100 + (B - $0) * 10}|Acc]); +language_range_weight(<< "0.", A, R/bits >>, Acc, T) when ?IS_DIGIT(A) -> + language_range_list_sep(R, [{T, (A - $0) * 100}|Acc]); +language_range_weight(<< "0.", R/bits >>, Acc, T) -> language_range_list_sep(R, [{T, 0}|Acc]); +language_range_weight(<< "0", R/bits >>, Acc, T) -> language_range_list_sep(R, [{T, 0}|Acc]). + +language_range_list_sep(<<>>, Acc) -> lists:reverse(Acc); +language_range_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> language_range_list_sep(R, Acc); +language_range_list_sep(<< $,, R/bits >>, Acc) -> language_range_list(R, Acc). + +-ifdef(TEST). +language_range_tag() -> + vector(1, 8, alpha()). + +language_range_subtag() -> + [$-, vector(1, 8, alphanum())]. + +language_range() -> + [language_range_tag(), small_list(language_range_subtag())]. + +accept_language() -> + ?LET({R, W}, + {language_range(), weight()}, + {iolist_to_binary(R), W, iolist_to_binary([R, case W of + undefined -> []; + _ -> [<<";q=">>, qvalue_to_iodata(W)] + end])} + ). + +prop_parse_accept_language() -> + ?FORALL(L, + non_empty(list(accept_language())), + begin + << _, AcceptLanguage/binary >> = iolist_to_binary([[$,, A] || {_, _, A} <- L]), + ResL = parse_accept_language(AcceptLanguage), + CheckedL = [begin + ResR =:= ?LOWER(R) + andalso (ResW =:= W orelse (W =:= undefined andalso ResW =:= 1000)) + end || {{R, W, _}, {ResR, ResW}} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_accept_language_test_() -> + Tests = [ + {<<"da, en-gb;q=0.8, en;q=0.7">>, [ + {<<"da">>, 1000}, + {<<"en-gb">>, 800}, + {<<"en">>, 700} + ]}, + {<<"en, en-US, en-cockney, i-cherokee, x-pig-latin, es-419">>, [ + {<<"en">>, 1000}, + {<<"en-us">>, 1000}, + {<<"en-cockney">>, 1000}, + {<<"i-cherokee">>, 1000}, + {<<"x-pig-latin">>, 1000}, + {<<"es-419">>, 1000} + ]} + ], + [{V, fun() -> R = parse_accept_language(V) end} || {V, R} <- Tests]. + +parse_accept_language_error_test_() -> + Tests = [ + <<>>, + <<"loooooong">>, + <<"en-us-loooooong">>, + <<"419-en-us">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_accept_language(V)) end} || V <- Tests]. + +horse_parse_accept_language() -> + horse:repeat(20000, + parse_accept_language(<<"da, en-gb;q=0.8, en;q=0.7">>) + ). +-endif. + +%% Accept-Ranges header. + +-spec parse_accept_ranges(binary()) -> [binary()]. +parse_accept_ranges(<<"none">>) -> []; +parse_accept_ranges(<<"bytes">>) -> [<<"bytes">>]; +parse_accept_ranges(AcceptRanges) -> + nonempty(token_ci_list(AcceptRanges, [])). + +-ifdef(TEST). +parse_accept_ranges_test_() -> + Tests = [ + {<<"bytes">>, [<<"bytes">>]}, + {<<"none">>, []}, + {<<"bytes, pages, kilos">>, [<<"bytes">>, <<"pages">>, <<"kilos">>]} + ], + [{V, fun() -> R = parse_accept_ranges(V) end} || {V, R} <- Tests]. + +parse_accept_ranges_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_accept_ranges(V)) end} || V <- Tests]. + +horse_parse_accept_ranges_none() -> + horse:repeat(200000, + parse_accept_ranges(<<"none">>) + ). + +horse_parse_accept_ranges_bytes() -> + horse:repeat(200000, + parse_accept_ranges(<<"bytes">>) + ). + +horse_parse_accept_ranges_other() -> + horse:repeat(200000, + parse_accept_ranges(<<"bytes, pages, kilos">>) + ). +-endif. + +%% Access-Control-Allow-Credentials header. + +-spec access_control_allow_credentials() -> iodata(). +access_control_allow_credentials() -> <<"true">>. + +%% Access-Control-Allow-Headers header. + +-spec access_control_allow_headers([binary()]) -> iodata(). +access_control_allow_headers(Headers) -> + join_token_list(nonempty(Headers)). + +-ifdef(TEST). +access_control_allow_headers_test_() -> + Tests = [ + {[<<"accept">>], <<"accept">>}, + {[<<"accept">>, <<"authorization">>, <<"content-type">>], <<"accept, authorization, content-type">>} + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> R = iolist_to_binary(access_control_allow_headers(V)) end} || {V, R} <- Tests]. + +access_control_allow_headers_error_test_() -> + Tests = [ + [] + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> {'EXIT', _} = (catch access_control_allow_headers(V)) end} || V <- Tests]. + +horse_access_control_allow_headers() -> + horse:repeat(200000, + access_control_allow_headers([<<"accept">>, <<"authorization">>, <<"content-type">>]) + ). +-endif. + +%% Access-Control-Allow-Methods header. + +-spec access_control_allow_methods([binary()]) -> iodata(). +access_control_allow_methods(Methods) -> + join_token_list(nonempty(Methods)). + +-ifdef(TEST). +access_control_allow_methods_test_() -> + Tests = [ + {[<<"GET">>], <<"GET">>}, + {[<<"GET">>, <<"POST">>, <<"DELETE">>], <<"GET, POST, DELETE">>} + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> R = iolist_to_binary(access_control_allow_methods(V)) end} || {V, R} <- Tests]. + +access_control_allow_methods_error_test_() -> + Tests = [ + [] + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> {'EXIT', _} = (catch access_control_allow_methods(V)) end} || V <- Tests]. + +horse_access_control_allow_methods() -> + horse:repeat(200000, + access_control_allow_methods([<<"GET">>, <<"POST">>, <<"DELETE">>]) + ). +-endif. + +%% Access-Control-Allow-Origin header. + +-spec access_control_allow_origin({binary(), binary(), 0..65535} | reference() | '*') -> iodata(). +access_control_allow_origin({Scheme, Host, Port}) -> + case default_port(Scheme) of + Port -> [Scheme, <<"://">>, Host]; + _ -> [Scheme, <<"://">>, Host, <<":">>, integer_to_binary(Port)] + end; +access_control_allow_origin('*') -> <<$*>>; +access_control_allow_origin(Ref) when is_reference(Ref) -> <<"null">>. + +-ifdef(TEST). +access_control_allow_origin_test_() -> + Tests = [ + {{<<"http">>, <<"www.example.org">>, 8080}, <<"http://www.example.org:8080">>}, + {{<<"http">>, <<"www.example.org">>, 80}, <<"http://www.example.org">>}, + {{<<"http">>, <<"192.0.2.1">>, 8080}, <<"http://192.0.2.1:8080">>}, + {{<<"http">>, <<"192.0.2.1">>, 80}, <<"http://192.0.2.1">>}, + {{<<"http">>, <<"[2001:db8::1]">>, 8080}, <<"http://[2001:db8::1]:8080">>}, + {{<<"http">>, <<"[2001:db8::1]">>, 80}, <<"http://[2001:db8::1]">>}, + {{<<"http">>, <<"[::ffff:192.0.2.1]">>, 8080}, <<"http://[::ffff:192.0.2.1]:8080">>}, + {{<<"http">>, <<"[::ffff:192.0.2.1]">>, 80}, <<"http://[::ffff:192.0.2.1]">>}, + {make_ref(), <<"null">>}, + {'*', <<$*>>} + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> R = iolist_to_binary(access_control_allow_origin(V)) end} || {V, R} <- Tests]. + +horse_access_control_allow_origin() -> + horse:repeat(200000, + access_control_allow_origin({<<"http">>, <<"example.org">>, 8080}) + ). +-endif. + +%% Access-Control-Expose-Headers header. + +-spec access_control_expose_headers([binary()]) -> iodata(). +access_control_expose_headers(Headers) -> + join_token_list(nonempty(Headers)). + +-ifdef(TEST). +access_control_expose_headers_test_() -> + Tests = [ + {[<<"accept">>], <<"accept">>}, + {[<<"accept">>, <<"authorization">>, <<"content-type">>], <<"accept, authorization, content-type">>} + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> R = iolist_to_binary(access_control_expose_headers(V)) end} || {V, R} <- Tests]. + +access_control_expose_headers_error_test_() -> + Tests = [ + [] + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> {'EXIT', _} = (catch access_control_expose_headers(V)) end} || V <- Tests]. + +horse_access_control_expose_headers() -> + horse:repeat(200000, + access_control_expose_headers([<<"accept">>, <<"authorization">>, <<"content-type">>]) + ). +-endif. + +%% Access-Control-Max-Age header. + +-spec access_control_max_age(non_neg_integer()) -> iodata(). +access_control_max_age(MaxAge) -> integer_to_binary(MaxAge). + +-ifdef(TEST). +access_control_max_age_test_() -> + Tests = [ + {0, <<"0">>}, + {42, <<"42">>}, + {69, <<"69">>}, + {1337, <<"1337">>}, + {3495, <<"3495">>}, + {1234567890, <<"1234567890">>} + ], + [{V, fun() -> R = access_control_max_age(V) end} || {V, R} <- Tests]. +-endif. + +%% Access-Control-Request-Headers header. + +-spec parse_access_control_request_headers(binary()) -> [binary()]. +parse_access_control_request_headers(Headers) -> + token_ci_list(Headers, []). + +-ifdef(TEST). +headers() -> + ?LET(L, + list({ows(), ows(), token()}), + case L of + [] -> {[], <<>>}; + _ -> + << _, Headers/binary >> = iolist_to_binary([[OWS1, $,, OWS2, M] || {OWS1, OWS2, M} <- L]), + {[?LOWER(M) || {_, _, M} <- L], Headers} + end). + +prop_parse_access_control_request_headers() -> + ?FORALL({L, Headers}, + headers(), + L =:= parse_access_control_request_headers(Headers)). + +parse_access_control_request_headers_test_() -> + Tests = [ + {<<>>, []}, + {<<"Content-Type">>, [<<"content-type">>]}, + {<<"accept, authorization, content-type">>, [<<"accept">>, <<"authorization">>, <<"content-type">>]}, + {<<"accept,, , authorization,content-type">>, [<<"accept">>, <<"authorization">>, <<"content-type">>]} + ], + [{V, fun() -> R = parse_access_control_request_headers(V) end} || {V, R} <- Tests]. + +horse_parse_access_control_request_headers() -> + horse:repeat(200000, + parse_access_control_request_headers(<<"accept, authorization, content-type">>) + ). +-endif. + +%% Access-Control-Request-Method header. + +-spec parse_access_control_request_method(binary()) -> binary(). +parse_access_control_request_method(Method) -> + true = <<>> =/= Method, + ok = validate_token(Method), + Method. + +validate_token(<< C, R/bits >>) when ?IS_TOKEN(C) -> validate_token(R); +validate_token(<<>>) -> ok. + +-ifdef(TEST). +parse_access_control_request_method_test_() -> + Tests = [ + <<"GET">>, + <<"HEAD">>, + <<"POST">>, + <<"PUT">>, + <<"DELETE">>, + <<"TRACE">>, + <<"CONNECT">>, + <<"whatever">> + ], + [{V, fun() -> R = parse_access_control_request_method(V) end} || {V, R} <- Tests]. + +parse_access_control_request_method_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_access_control_request_method(V)) end} || V <- Tests]. + +horse_parse_access_control_request_method() -> + horse:repeat(200000, + parse_access_control_request_method(<<"POST">>) + ). +-endif. + +%% Age header. + +-spec parse_age(binary()) -> non_neg_integer(). +parse_age(Age) -> + I = binary_to_integer(Age), + true = I >= 0, + I. + +-ifdef(TEST). +parse_age_test_() -> + Tests = [ + {<<"0">>, 0}, + {<<"42">>, 42}, + {<<"69">>, 69}, + {<<"1337">>, 1337}, + {<<"3495">>, 3495}, + {<<"1234567890">>, 1234567890} + ], + [{V, fun() -> R = parse_age(V) end} || {V, R} <- Tests]. + +parse_age_error_test_() -> + Tests = [ + <<>>, + <<"123, 123">>, + <<"4.17">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_age(V)) end} || V <- Tests]. +-endif. + +%% Allow header. + +-spec parse_allow(binary()) -> [binary()]. +parse_allow(Allow) -> + token_list(Allow, []). + +-ifdef(TEST). +allow() -> + ?LET(L, + list({ows(), ows(), token()}), + case L of + [] -> {[], <<>>}; + _ -> + << _, Allow/binary >> = iolist_to_binary([[OWS1, $,, OWS2, M] || {OWS1, OWS2, M} <- L]), + {[M || {_, _, M} <- L], Allow} + end). + +prop_parse_allow() -> + ?FORALL({L, Allow}, + allow(), + L =:= parse_allow(Allow)). + +parse_allow_test_() -> + Tests = [ + {<<>>, []}, + {<<"GET, HEAD, PUT">>, [<<"GET">>, <<"HEAD">>, <<"PUT">>]} + ], + [{V, fun() -> R = parse_allow(V) end} || {V, R} <- Tests]. + +horse_parse_allow() -> + horse:repeat(200000, + parse_allow(<<"GET, HEAD, PUT">>) + ). +-endif. + +%% Authorization header. +%% +%% We support Basic, Digest and Bearer schemes only. +%% +%% In the Digest case we do not validate that the mandatory +%% fields are present. When parsing auth-params, we do not +%% accept BWS characters around the "=". + +-spec parse_authorization(binary()) + -> {basic, binary(), binary()} + | {bearer, binary()} + | {digest, [{binary(), binary()}]}. +parse_authorization(<>) + when ((B =:= $B) or (B =:= $b)), ((A =:= $A) or (A =:= $a)), + ((S =:= $S) or (S =:= $s)), ((I =:= $I) or (I =:= $i)), + ((C =:= $C) or (C =:= $c)) -> + auth_basic(base64:decode(R), <<>>); +parse_authorization(<>) + when (R =/= <<>>), ((B =:= $B) or (B =:= $b)), + ((E1 =:= $E) or (E1 =:= $e)), ((A =:= $A) or (A =:= $a)), + ((R1 =:= $R) or (R1 =:= $r)), ((E2 =:= $E) or (E2 =:= $e)), + ((R2 =:= $R) or (R2 =:= $r)) -> + validate_auth_bearer(R), + {bearer, R}; +parse_authorization(<>) + when ((D =:= $D) or (D =:= $d)), ((I =:= $I) or (I =:= $i)), + ((G =:= $G) or (G =:= $g)), ((E =:= $E) or (E =:= $e)), + ((S =:= $S) or (S =:= $s)), ((T =:= $T) or (T =:= $t)) -> + {digest, nonempty(auth_digest_list(R, []))}. + +auth_basic(<< $:, Password/bits >>, UserID) -> {basic, UserID, Password}; +auth_basic(<< C, R/bits >>, UserID) -> auth_basic(R, << UserID/binary, C >>). + +validate_auth_bearer(<< C, R/bits >>) when ?IS_TOKEN68(C) -> validate_auth_bearer(R); +validate_auth_bearer(<< $=, R/bits >>) -> validate_auth_bearer_eq(R); +validate_auth_bearer(<<>>) -> ok. + +validate_auth_bearer_eq(<< $=, R/bits >>) -> validate_auth_bearer_eq(R); +validate_auth_bearer_eq(<<>>) -> ok. + +auth_digest_list(<<>>, Acc) -> lists:reverse(Acc); +auth_digest_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> auth_digest_list(R, Acc); +auth_digest_list(<< "algorithm=", C, R/bits >>, Acc) when ?IS_TOKEN(C) -> auth_digest_token(R, Acc, <<"algorithm">>, << C >>); +auth_digest_list(<< "cnonce=\"", R/bits >>, Acc) -> auth_digest_quoted(R, Acc, <<"cnonce">>, <<>>); +auth_digest_list(<< "nc=", A, B, C, D, E, F, G, H, R/bits >>, Acc) + when ?IS_LHEX(A), ?IS_LHEX(B), ?IS_LHEX(C), ?IS_LHEX(D), + ?IS_LHEX(E), ?IS_LHEX(F), ?IS_LHEX(G), ?IS_LHEX(H) -> + auth_digest_list_sep(R, [{<<"nc">>, << A, B, C, D, E, F, G, H >>}|Acc]); +auth_digest_list(<< "nonce=\"", R/bits >>, Acc) -> auth_digest_quoted(R, Acc, <<"nonce">>, <<>>); +auth_digest_list(<< "opaque=\"", R/bits >>, Acc) -> auth_digest_quoted(R, Acc, <<"opaque">>, <<>>); +auth_digest_list(<< "qop=", C, R/bits >>, Acc) when ?IS_TOKEN(C) -> auth_digest_token(R, Acc, <<"qop">>, << C >>); +auth_digest_list(<< "realm=\"", R/bits >>, Acc) -> auth_digest_quoted(R, Acc, <<"realm">>, <<>>); +auth_digest_list(<< "response=\"", R/bits >>, Acc) -> auth_digest_quoted(R, Acc, <<"response">>, <<>>); +auth_digest_list(<< "uri=\"", R/bits >>, Acc) -> auth_digest_quoted(R, Acc, <<"uri">>, <<>>); +auth_digest_list(<< "username=\"", R/bits >>, Acc) -> auth_digest_quoted(R, Acc, <<"username">>, <<>>); +auth_digest_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> + ?LOWER(auth_digest_param, R, Acc, <<>>). + +auth_digest_param(<< $=, $", R/bits >>, Acc, K) -> auth_digest_quoted(R, Acc, K, <<>>); +auth_digest_param(<< $=, C, R/bits >>, Acc, K) when ?IS_TOKEN(C) -> auth_digest_token(R, Acc, K, << C >>); +auth_digest_param(<< C, R/bits >>, Acc, K) when ?IS_TOKEN(C) -> + ?LOWER(auth_digest_param, R, Acc, K). + +auth_digest_token(<< C, R/bits >>, Acc, K, V) when ?IS_TOKEN(C) -> auth_digest_token(R, Acc, K, << V/binary, C >>); +auth_digest_token(R, Acc, K, V) -> auth_digest_list_sep(R, [{K, V}|Acc]). + +auth_digest_quoted(<< $", R/bits >>, Acc, K, V) -> auth_digest_list_sep(R, [{K, V}|Acc]); +auth_digest_quoted(<< $\\, C, R/bits >>, Acc, K, V) when ?IS_VCHAR_OBS(C) -> auth_digest_quoted(R, Acc, K, << V/binary, C >>); +auth_digest_quoted(<< C, R/bits >>, Acc, K, V) when ?IS_VCHAR_OBS(C) -> auth_digest_quoted(R, Acc, K, << V/binary, C >>). + +auth_digest_list_sep(<<>>, Acc) -> lists:reverse(Acc); +auth_digest_list_sep(<< $,, R/bits >>, Acc) -> auth_digest_list(R, Acc); +auth_digest_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> auth_digest_list_sep(R, Acc). + +-ifdef(TEST). +parse_authorization_test_() -> + Tests = [ + {<<"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==">>, {basic, <<"Aladdin">>, <<"open sesame">>}}, + {<<"bAsIc QWxhZGRpbjpvcGVuIHNlc2FtZQ==">>, {basic, <<"Aladdin">>, <<"open sesame">>}}, + {<<"Bearer mF_9.B5f-4.1JqM">>, {bearer, <<"mF_9.B5f-4.1JqM">>}}, + {<<"bEaRer mF_9.B5f-4.1JqM">>, {bearer, <<"mF_9.B5f-4.1JqM">>}}, + {<<"Digest username=\"Mufasa\"," + "realm=\"testrealm@host.com\"," + "nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"," + "uri=\"/dir/index.html\"," + "qop=auth," + "nc=00000001," + "cnonce=\"0a4f113b\"," + "response=\"6629fae49393a05397450978507c4ef1\"," + "opaque=\"5ccc069c403ebaf9f0171e9517f40e41\"">>, + {digest, [ + {<<"username">>, <<"Mufasa">>}, + {<<"realm">>, <<"testrealm@host.com">>}, + {<<"nonce">>, <<"dcd98b7102dd2f0e8b11d0f600bfb0c093">>}, + {<<"uri">>, <<"/dir/index.html">>}, + {<<"qop">>, <<"auth">>}, + {<<"nc">>, <<"00000001">>}, + {<<"cnonce">>, <<"0a4f113b">>}, + {<<"response">>, <<"6629fae49393a05397450978507c4ef1">>}, + {<<"opaque">>, <<"5ccc069c403ebaf9f0171e9517f40e41">>}]}} + ], + [{V, fun() -> R = parse_authorization(V) end} || {V, R} <- Tests]. + +horse_parse_authorization_basic() -> + horse:repeat(20000, + parse_authorization(<<"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==">>) + ). + +horse_parse_authorization_bearer() -> + horse:repeat(20000, + parse_authorization(<<"Bearer mF_9.B5f-4.1JqM">>) + ). + +horse_parse_authorization_digest() -> + horse:repeat(20000, + parse_authorization( + <<"Digest username=\"Mufasa\"," + "realm=\"testrealm@host.com\"," + "nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\"," + "uri=\"/dir/index.html\"," + "qop=auth," + "nc=00000001," + "cnonce=\"0a4f113b\"," + "response=\"6629fae49393a05397450978507c4ef1\"," + "opaque=\"5ccc069c403ebaf9f0171e9517f40e41\"">>) + ). +-endif. + +%% Cache-Control header. +%% +%% In the fields list case, we do not support escaping, which shouldn't be needed anyway. + +-spec parse_cache_control(binary()) + -> [binary() | {binary(), binary()} | {binary(), non_neg_integer()} | {binary(), [binary()]}]. +parse_cache_control(<<"no-cache">>) -> + [<<"no-cache">>]; +parse_cache_control(<<"max-age=0">>) -> + [{<<"max-age">>, 0}]; +parse_cache_control(CacheControl) -> + nonempty(cache_directive_list(CacheControl, [])). + +cache_directive_list(<<>>, Acc) -> lists:reverse(Acc); +cache_directive_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C)-> cache_directive_list(R, Acc); +cache_directive_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> + ?LOWER(cache_directive, R, Acc, <<>>). + +cache_directive(<< $=, $", R/bits >>, Acc, T) + when (T =:= <<"no-cache">>) or (T =:= <<"private">>) -> + cache_directive_fields_list(R, Acc, T, []); +cache_directive(<< $=, C, R/bits >>, Acc, T) + when ?IS_DIGIT(C), (T =:= <<"max-age">>) or (T =:= <<"max-stale">>) + or (T =:= <<"min-fresh">>) or (T =:= <<"s-maxage">>) -> + cache_directive_delta(R, Acc, T, (C - $0)); +cache_directive(<< $=, $", R/bits >>, Acc, T) -> cache_directive_quoted_string(R, Acc, T, <<>>); +cache_directive(<< $=, C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> cache_directive_token(R, Acc, T, << C >>); +cache_directive(<< C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> + ?LOWER(cache_directive, R, Acc, T); +cache_directive(R, Acc, T) -> cache_directive_list_sep(R, [T|Acc]). + +cache_directive_delta(<< C, R/bits >>, Acc, K, V) when ?IS_DIGIT(C) -> cache_directive_delta(R, Acc, K, V * 10 + (C - $0)); +cache_directive_delta(R, Acc, K, V) -> cache_directive_list_sep(R, [{K, V}|Acc]). + +cache_directive_fields_list(<< C, R/bits >>, Acc, K, L) when ?IS_WS_COMMA(C) -> cache_directive_fields_list(R, Acc, K, L); +cache_directive_fields_list(<< $", R/bits >>, Acc, K, L) -> cache_directive_list_sep(R, [{K, lists:reverse(L)}|Acc]); +cache_directive_fields_list(<< C, R/bits >>, Acc, K, L) when ?IS_TOKEN(C) -> + ?LOWER(cache_directive_field, R, Acc, K, L, <<>>). + +cache_directive_field(<< C, R/bits >>, Acc, K, L, F) when ?IS_TOKEN(C) -> + ?LOWER(cache_directive_field, R, Acc, K, L, F); +cache_directive_field(R, Acc, K, L, F) -> cache_directive_fields_list_sep(R, Acc, K, [F|L]). + +cache_directive_fields_list_sep(<< C, R/bits >>, Acc, K, L) when ?IS_WS(C) -> cache_directive_fields_list_sep(R, Acc, K, L); +cache_directive_fields_list_sep(<< $,, R/bits >>, Acc, K, L) -> cache_directive_fields_list(R, Acc, K, L); +cache_directive_fields_list_sep(<< $", R/bits >>, Acc, K, L) -> cache_directive_list_sep(R, [{K, lists:reverse(L)}|Acc]). + +cache_directive_token(<< C, R/bits >>, Acc, K, V) when ?IS_TOKEN(C) -> cache_directive_token(R, Acc, K, << V/binary, C >>); +cache_directive_token(R, Acc, K, V) -> cache_directive_list_sep(R, [{K, V}|Acc]). + +cache_directive_quoted_string(<< $", R/bits >>, Acc, K, V) -> cache_directive_list_sep(R, [{K, V}|Acc]); +cache_directive_quoted_string(<< $\\, C, R/bits >>, Acc, K, V) when ?IS_VCHAR_OBS(C) -> + cache_directive_quoted_string(R, Acc, K, << V/binary, C >>); +cache_directive_quoted_string(<< C, R/bits >>, Acc, K, V) when ?IS_VCHAR_OBS(C) -> + cache_directive_quoted_string(R, Acc, K, << V/binary, C >>). + +cache_directive_list_sep(<<>>, Acc) -> lists:reverse(Acc); +cache_directive_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> cache_directive_list_sep(R, Acc); +cache_directive_list_sep(<< $,, R/bits >>, Acc) -> cache_directive_list(R, Acc). + +-ifdef(TEST). +cache_directive_unreserved_token() -> + ?SUCHTHAT(T, + token(), + T =/= <<"max-age">> andalso T =/= <<"max-stale">> andalso T =/= <<"min-fresh">> + andalso T =/= <<"s-maxage">> andalso T =/= <<"no-cache">> andalso T =/= <<"private">>). + +cache_directive() -> + oneof([ + token(), + {cache_directive_unreserved_token(), token()}, + {cache_directive_unreserved_token(), quoted_string()}, + {elements([<<"max-age">>, <<"max-stale">>, <<"min-fresh">>, <<"s-maxage">>]), non_neg_integer()}, + {fields, elements([<<"no-cache">>, <<"private">>]), small_list(token())} + ]). + +cache_control() -> + ?LET(L, + non_empty(list(cache_directive())), + begin + << _, CacheControl/binary >> = iolist_to_binary([[$,, + case C of + {fields, K, V} -> [K, $=, $", [[F, $,] || F <- V], $"]; + {K, V} when is_integer(V) -> [K, $=, integer_to_binary(V)]; + {K, V} -> [K, $=, V]; + K -> K + end] || C <- L]), + {L, CacheControl} + end). + +prop_parse_cache_control() -> + ?FORALL({L, CacheControl}, + cache_control(), + begin + ResL = parse_cache_control(CacheControl), + CheckedL = [begin + ExpectedCc = case Cc of + {fields, K, V} -> {?LOWER(K), [?LOWER(F) || F <- V]}; + {K, V} -> {?LOWER(K), unquote(V)}; + K -> ?LOWER(K) + end, + ExpectedCc =:= ResCc + end || {Cc, ResCc} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_cache_control_test_() -> + Tests = [ + {<<"no-cache">>, [<<"no-cache">>]}, + {<<"no-store">>, [<<"no-store">>]}, + {<<"max-age=0">>, [{<<"max-age">>, 0}]}, + {<<"max-age=30">>, [{<<"max-age">>, 30}]}, + {<<"private, community=\"UCI\"">>, [<<"private">>, {<<"community">>, <<"UCI">>}]}, + {<<"private=\"Content-Type, Content-Encoding, Content-Language\"">>, + [{<<"private">>, [<<"content-type">>, <<"content-encoding">>, <<"content-language">>]}]} + ], + [{V, fun() -> R = parse_cache_control(V) end} || {V, R} <- Tests]. + +parse_cache_control_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_cache_control(V)) end} || V <- Tests]. + +horse_parse_cache_control_no_cache() -> + horse:repeat(200000, + parse_cache_control(<<"no-cache">>) + ). + +horse_parse_cache_control_max_age_0() -> + horse:repeat(200000, + parse_cache_control(<<"max-age=0">>) + ). + +horse_parse_cache_control_max_age_30() -> + horse:repeat(200000, + parse_cache_control(<<"max-age=30">>) + ). + +horse_parse_cache_control_custom() -> + horse:repeat(200000, + parse_cache_control(<<"private, community=\"UCI\"">>) + ). + +horse_parse_cache_control_fields() -> + horse:repeat(200000, + parse_cache_control(<<"private=\"Content-Type, Content-Encoding, Content-Language\"">>) + ). +-endif. + +%% Connection header. + +-spec parse_connection(binary()) -> [binary()]. +parse_connection(<<"close">>) -> + [<<"close">>]; +parse_connection(<<"keep-alive">>) -> + [<<"keep-alive">>]; +parse_connection(Connection) -> + nonempty(token_ci_list(Connection, [])). + +-ifdef(TEST). +prop_parse_connection() -> + ?FORALL(L, + non_empty(list(token())), + begin + << _, Connection/binary >> = iolist_to_binary([[$,, C] || C <- L]), + ResL = parse_connection(Connection), + CheckedL = [?LOWER(Co) =:= ResC || {Co, ResC} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_connection_test_() -> + Tests = [ + {<<"close">>, [<<"close">>]}, + {<<"ClOsE">>, [<<"close">>]}, + {<<"Keep-Alive">>, [<<"keep-alive">>]}, + {<<"keep-alive, Upgrade">>, [<<"keep-alive">>, <<"upgrade">>]} + ], + [{V, fun() -> R = parse_connection(V) end} || {V, R} <- Tests]. + +parse_connection_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_connection(V)) end} || V <- Tests]. + +horse_parse_connection_close() -> + horse:repeat(200000, + parse_connection(<<"close">>) + ). + +horse_parse_connection_keepalive() -> + horse:repeat(200000, + parse_connection(<<"keep-alive">>) + ). + +horse_parse_connection_keepalive_upgrade() -> + horse:repeat(200000, + parse_connection(<<"keep-alive, upgrade">>) + ). +-endif. + +%% Content-Encoding header. + +-spec parse_content_encoding(binary()) -> [binary()]. +parse_content_encoding(ContentEncoding) -> + nonempty(token_ci_list(ContentEncoding, [])). + +-ifdef(TEST). +parse_content_encoding_test_() -> + Tests = [ + {<<"gzip">>, [<<"gzip">>]} + ], + [{V, fun() -> R = parse_content_encoding(V) end} || {V, R} <- Tests]. + +parse_content_encoding_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_content_encoding(V)) end} || V <- Tests]. + +horse_parse_content_encoding() -> + horse:repeat(200000, + parse_content_encoding(<<"gzip">>) + ). +-endif. + +%% Content-Language header. +%% +%% We do not support irregular deprecated tags that do not match the ABNF. + +-spec parse_content_language(binary()) -> [binary()]. +parse_content_language(ContentLanguage) -> + nonempty(langtag_list(ContentLanguage, [])). + +langtag_list(<<>>, Acc) -> lists:reverse(Acc); +langtag_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> langtag_list(R, Acc); +langtag_list(<< A, B, C, R/bits >>, Acc) when ?IS_ALPHA(A), ?IS_ALPHA(B), ?IS_ALPHA(C) -> + langtag_extlang(R, Acc, << ?LC(A), ?LC(B), ?LC(C) >>, 0); +langtag_list(<< A, B, R/bits >>, Acc) when ?IS_ALPHA(A), ?IS_ALPHA(B) -> + langtag_extlang(R, Acc, << ?LC(A), ?LC(B) >>, 0); +langtag_list(<< X, R/bits >>, Acc) when X =:= $x; X =:= $X -> langtag_privateuse_sub(R, Acc, << $x >>, 0). + +langtag_extlang(<< $-, A, B, C, D, E, F, G, H, R/bits >>, Acc, T, _) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G), ?IS_ALPHANUM(H) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G), ?LC(H) >>); +langtag_extlang(<< $-, A, B, C, D, E, F, G, R/bits >>, Acc, T, _) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G) >>); +langtag_extlang(<< $-, A, B, C, D, E, F, R/bits >>, Acc, T, _) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F) >>); +langtag_extlang(<< $-, A, B, C, D, E, R/bits >>, Acc, T, _) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), ?IS_ALPHANUM(E) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E) >>); +langtag_extlang(<< $-, A, B, C, D, R/bits >>, Acc, T, _) + when ?IS_ALPHA(A), ?IS_ALPHA(B), ?IS_ALPHA(C), ?IS_ALPHA(D) -> + langtag_region(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D) >>); +langtag_extlang(<< $-, A, B, C, R/bits >>, Acc, T, N) + when ?IS_ALPHA(A), ?IS_ALPHA(B), ?IS_ALPHA(C) -> + case N of + 2 -> langtag_script(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C) >>); + _ -> langtag_extlang(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C) >>, N + 1) + end; +langtag_extlang(R, Acc, T, _) -> langtag_region(R, Acc, T). + +langtag_script(<< $-, A, B, C, D, E, F, G, H, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G), ?IS_ALPHANUM(H) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G), ?LC(H) >>); +langtag_script(<< $-, A, B, C, D, E, F, G, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G) >>); +langtag_script(<< $-, A, B, C, D, E, F, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F) >>); +langtag_script(<< $-, A, B, C, D, E, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), ?IS_ALPHANUM(E) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E) >>); +langtag_script(<< $-, A, B, C, D, R/bits >>, Acc, T) + when ?IS_ALPHA(A), ?IS_ALPHA(B), ?IS_ALPHA(C), ?IS_ALPHA(D) -> + langtag_region(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D) >>); +langtag_script(R, Acc, T) -> + langtag_region(R, Acc, T). + +langtag_region(<< $-, A, B, C, D, E, F, G, H, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G), ?IS_ALPHANUM(H) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G), ?LC(H) >>); +langtag_region(<< $-, A, B, C, D, E, F, G, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G) >>); +langtag_region(<< $-, A, B, C, D, E, F, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F) >>); +langtag_region(<< $-, A, B, C, D, E, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), ?IS_ALPHANUM(E) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E) >>); +langtag_region(<< $-, A, B, C, D, R/bits >>, Acc, T) + when ?IS_DIGIT(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D) -> + langtag_variant(R, Acc, << T/binary, $-, A, ?LC(B), ?LC(C), ?LC(D) >>); +langtag_region(<< $-, A, B, R/bits >>, Acc, T) when ?IS_ALPHA(A), ?IS_ALPHA(B) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B) >>); +langtag_region(<< $-, A, B, C, R/bits >>, Acc, T) when ?IS_DIGIT(A), ?IS_DIGIT(B), ?IS_DIGIT(C) -> + langtag_variant(R, Acc, << T/binary, $-, A, B, C >>); +langtag_region(R, Acc, T) -> + langtag_variant(R, Acc, T). + +langtag_variant(<< $-, A, B, C, D, E, F, G, H, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G), ?IS_ALPHANUM(H) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G), ?LC(H) >>); +langtag_variant(<< $-, A, B, C, D, E, F, G, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G) >>); +langtag_variant(<< $-, A, B, C, D, E, F, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F) >>); +langtag_variant(<< $-, A, B, C, D, E, R/bits >>, Acc, T) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), ?IS_ALPHANUM(E) -> + langtag_variant(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E) >>); +langtag_variant(<< $-, A, B, C, D, R/bits >>, Acc, T) + when ?IS_DIGIT(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D) -> + langtag_variant(R, Acc, << T/binary, $-, A, ?LC(B), ?LC(C), ?LC(D) >>); +langtag_variant(R, Acc, T) -> + langtag_extension(R, Acc, T). + +langtag_extension(<< $-, X, R/bits >>, Acc, T) when X =:= $x; X =:= $X -> langtag_privateuse_sub(R, Acc, << T/binary, $-, $x >>, 0); +langtag_extension(<< $-, S, R/bits >>, Acc, T) when ?IS_ALPHANUM(S) -> langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(S) >>, 0); +langtag_extension(R, Acc, T) -> langtag_list_sep(R, [T|Acc]). + +langtag_extension_sub(<< $-, A, B, C, D, E, F, G, H, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G), ?IS_ALPHANUM(H) -> + langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G), ?LC(H) >>, N + 1); +langtag_extension_sub(<< $-, A, B, C, D, E, F, G, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G) -> + langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G) >>, N + 1); +langtag_extension_sub(<< $-, A, B, C, D, E, F, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F) -> + langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F) >>, N + 1); +langtag_extension_sub(<< $-, A, B, C, D, E, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), ?IS_ALPHANUM(E) -> + langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E) >>, N + 1); +langtag_extension_sub(<< $-, A, B, C, D, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D) -> + langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D) >>, N + 1); +langtag_extension_sub(<< $-, A, B, C, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C) -> + langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C) >>, N + 1); +langtag_extension_sub(<< $-, A, B, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B) -> + langtag_extension_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B) >>, N + 1); +langtag_extension_sub(R, Acc, T, N) when N > 0 -> + langtag_extension(R, Acc, T). + +langtag_privateuse_sub(<< $-, A, B, C, D, E, F, G, H, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G), ?IS_ALPHANUM(H) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G), ?LC(H) >>, N + 1); +langtag_privateuse_sub(<< $-, A, B, C, D, E, F, G, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F), ?IS_ALPHANUM(G) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F), ?LC(G) >>, N + 1); +langtag_privateuse_sub(<< $-, A, B, C, D, E, F, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), + ?IS_ALPHANUM(E), ?IS_ALPHANUM(F) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E), ?LC(F) >>, N + 1); +langtag_privateuse_sub(<< $-, A, B, C, D, E, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D), ?IS_ALPHANUM(E) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D), ?LC(E) >>, N + 1); +langtag_privateuse_sub(<< $-, A, B, C, D, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C), ?IS_ALPHANUM(D) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C), ?LC(D) >>, N + 1); +langtag_privateuse_sub(<< $-, A, B, C, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B), ?IS_ALPHANUM(C) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B), ?LC(C) >>, N + 1); +langtag_privateuse_sub(<< $-, A, B, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A), ?IS_ALPHANUM(B) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A), ?LC(B) >>, N + 1); +langtag_privateuse_sub(<< $-, A, R/bits >>, Acc, T, N) + when ?IS_ALPHANUM(A) -> + langtag_privateuse_sub(R, Acc, << T/binary, $-, ?LC(A) >>, N + 1); +langtag_privateuse_sub(R, Acc, T, N) when N > 0 -> langtag_list_sep(R, [T|Acc]). + +langtag_list_sep(<<>>, Acc) -> lists:reverse(Acc); +langtag_list_sep(<< $,, R/bits >>, Acc) -> langtag_list(R, Acc); +langtag_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> langtag_list_sep(R, Acc). + +-ifdef(TEST). +langtag_language() -> vector(2, 3, alpha()). +langtag_extlang() -> vector(0, 3, [$-, alpha(), alpha(), alpha()]). +langtag_script() -> oneof([[], [$-, alpha(), alpha(), alpha(), alpha()]]). +langtag_region() -> oneof([[], [$-, alpha(), alpha()], [$-, digit(), digit(), digit()]]). + +langtag_variant() -> + small_list(frequency([ + {4, [$-, vector(5, 8, alphanum())]}, + {1, [$-, digit(), alphanum(), alphanum(), alphanum()]} + ])). + +langtag_extension() -> + small_list([$-, ?SUCHTHAT(S, alphanum(), S =/= $x andalso S =/= $X), + small_non_empty_list([$-, vector(2, 8, alphanum())]) + ]). + +langtag_privateuse() -> oneof([[], [$-, langtag_privateuse_nodash()]]). +langtag_privateuse_nodash() -> [elements([$x, $X]), small_non_empty_list([$-, vector(1, 8, alphanum())])]. +private_language_tag() -> ?LET(T, langtag_privateuse_nodash(), iolist_to_binary(T)). + +language_tag() -> + ?LET(IoList, + [langtag_language(), langtag_extlang(), langtag_script(), langtag_region(), + langtag_variant(), langtag_extension(), langtag_privateuse()], + iolist_to_binary(IoList)). + +content_language() -> + ?LET(L, + non_empty(list(frequency([ + {90, language_tag()}, + {10, private_language_tag()} + ]))), + begin + << _, ContentLanguage/binary >> = iolist_to_binary([[$,, T] || T <- L]), + {L, ContentLanguage} + end). + +prop_parse_content_language() -> + ?FORALL({L, ContentLanguage}, + content_language(), + begin + ResL = parse_content_language(ContentLanguage), + CheckedL = [?LOWER(T) =:= ResT || {T, ResT} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_content_language_test_() -> + Tests = [ + {<<"de">>, [<<"de">>]}, + {<<"fr">>, [<<"fr">>]}, + {<<"ja">>, [<<"ja">>]}, + {<<"zh-Hant">>, [<<"zh-hant">>]}, + {<<"zh-Hans">>, [<<"zh-hans">>]}, + {<<"sr-Cyrl">>, [<<"sr-cyrl">>]}, + {<<"sr-Latn">>, [<<"sr-latn">>]}, + {<<"zh-cmn-Hans-CN">>, [<<"zh-cmn-hans-cn">>]}, + {<<"cmn-Hans-CN">>, [<<"cmn-hans-cn">>]}, + {<<"zh-yue-HK">>, [<<"zh-yue-hk">>]}, + {<<"yue-HK">>, [<<"yue-hk">>]}, + {<<"zh-Hans-CN">>, [<<"zh-hans-cn">>]}, + {<<"sr-Latn-RS">>, [<<"sr-latn-rs">>]}, + {<<"sl-rozaj">>, [<<"sl-rozaj">>]}, + {<<"sl-rozaj-biske">>, [<<"sl-rozaj-biske">>]}, + {<<"sl-nedis">>, [<<"sl-nedis">>]}, + {<<"de-CH-1901">>, [<<"de-ch-1901">>]}, + {<<"sl-IT-nedis">>, [<<"sl-it-nedis">>]}, + {<<"hy-Latn-IT-arevela">>, [<<"hy-latn-it-arevela">>]}, + {<<"de-DE">>, [<<"de-de">>]}, + {<<"en-US">>, [<<"en-us">>]}, + {<<"es-419">>, [<<"es-419">>]}, + {<<"de-CH-x-phonebk">>, [<<"de-ch-x-phonebk">>]}, + {<<"az-Arab-x-AZE-derbend">>, [<<"az-arab-x-aze-derbend">>]}, + {<<"x-whatever">>, [<<"x-whatever">>]}, + {<<"qaa-Qaaa-QM-x-southern">>, [<<"qaa-qaaa-qm-x-southern">>]}, + {<<"de-Qaaa">>, [<<"de-qaaa">>]}, + {<<"sr-Latn-QM">>, [<<"sr-latn-qm">>]}, + {<<"sr-Qaaa-RS">>, [<<"sr-qaaa-rs">>]}, + {<<"en-US-u-islamcal">>, [<<"en-us-u-islamcal">>]}, + {<<"zh-CN-a-myext-x-private">>, [<<"zh-cn-a-myext-x-private">>]}, + {<<"en-a-myext-b-another">>, [<<"en-a-myext-b-another">>]}, + {<<"mn-Cyrl-MN">>, [<<"mn-cyrl-mn">>]}, + {<<"MN-cYRL-mn">>, [<<"mn-cyrl-mn">>]}, + {<<"mN-cYrL-Mn">>, [<<"mn-cyrl-mn">>]}, + {<<"az-Arab-IR">>, [<<"az-arab-ir">>]}, + {<<"zh-gan">>, [<<"zh-gan">>]}, + {<<"zh-yue">>, [<<"zh-yue">>]}, + {<<"zh-cmn">>, [<<"zh-cmn">>]}, + {<<"de-AT">>, [<<"de-at">>]}, + {<<"de-CH-1996">>, [<<"de-ch-1996">>]}, + {<<"en-Latn-GB-boont-r-extended-sequence-x-private">>, + [<<"en-latn-gb-boont-r-extended-sequence-x-private">>]}, + {<<"el-x-koine">>, [<<"el-x-koine">>]}, + {<<"el-x-attic">>, [<<"el-x-attic">>]}, + {<<"fr, en-US, es-419, az-Arab, x-pig-latin, man-Nkoo-GN">>, + [<<"fr">>, <<"en-us">>, <<"es-419">>, <<"az-arab">>, <<"x-pig-latin">>, <<"man-nkoo-gn">>]}, + {<<"da">>, [<<"da">>]}, + {<<"mi, en">>, [<<"mi">>, <<"en">>]} + ], + [{V, fun() -> R = parse_content_language(V) end} || {V, R} <- Tests]. + +parse_content_language_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_content_language(V)) end} || V <- Tests]. + +horse_parse_content_language() -> + horse:repeat(100000, + parse_content_language(<<"fr, en-US, es-419, az-Arab, x-pig-latin, man-Nkoo-GN">>) + ). +-endif. + +%% Content-Length header. + +-spec parse_content_length(binary()) -> non_neg_integer(). +parse_content_length(ContentLength) -> + I = binary_to_integer(ContentLength), + true = I >= 0, + I. + +-ifdef(TEST). +prop_parse_content_length() -> + ?FORALL( + X, + non_neg_integer(), + X =:= parse_content_length(integer_to_binary(X)) + ). + +parse_content_length_test_() -> + Tests = [ + {<<"0">>, 0}, + {<<"42">>, 42}, + {<<"69">>, 69}, + {<<"1337">>, 1337}, + {<<"3495">>, 3495}, + {<<"1234567890">>, 1234567890} + ], + [{V, fun() -> R = parse_content_length(V) end} || {V, R} <- Tests]. + +parse_content_length_error_test_() -> + Tests = [ + <<>>, + <<"-1">>, + <<"123, 123">>, + <<"4.17">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_content_length(V)) end} || V <- Tests]. + +horse_parse_content_length_zero() -> + horse:repeat(100000, + parse_content_length(<<"0">>) + ). + +horse_parse_content_length_giga() -> + horse:repeat(100000, + parse_content_length(<<"1234567890">>) + ). +-endif. + +%% Content-Range header. + +-spec parse_content_range(binary()) + -> {bytes, non_neg_integer(), non_neg_integer(), non_neg_integer() | '*'} + | {bytes, '*', non_neg_integer()} | {binary(), binary()}. +parse_content_range(<<"bytes */", C, R/bits >>) when ?IS_DIGIT(C) -> unsatisfied_range(R, C - $0); +parse_content_range(<<"bytes ", C, R/bits >>) when ?IS_DIGIT(C) -> byte_range_first(R, C - $0); +parse_content_range(<< C, R/bits >>) when ?IS_TOKEN(C) -> + ?LOWER(other_content_range_unit, R, <<>>). + +byte_range_first(<< $-, C, R/bits >>, First) when ?IS_DIGIT(C) -> byte_range_last(R, First, C - $0); +byte_range_first(<< C, R/bits >>, First) when ?IS_DIGIT(C) -> byte_range_first(R, First * 10 + C - $0). + +byte_range_last(<<"/*">>, First, Last) -> {bytes, First, Last, '*'}; +byte_range_last(<< $/, C, R/bits >>, First, Last) when ?IS_DIGIT(C) -> byte_range_complete(R, First, Last, C - $0); +byte_range_last(<< C, R/bits >>, First, Last) when ?IS_DIGIT(C) -> byte_range_last(R, First, Last * 10 + C - $0). + +byte_range_complete(<<>>, First, Last, Complete) -> {bytes, First, Last, Complete}; +byte_range_complete(<< C, R/bits >>, First, Last, Complete) when ?IS_DIGIT(C) -> + byte_range_complete(R, First, Last, Complete * 10 + C - $0). + +unsatisfied_range(<<>>, Complete) -> {bytes, '*', Complete}; +unsatisfied_range(<< C, R/bits >>, Complete) when ?IS_DIGIT(C) -> unsatisfied_range(R, Complete * 10 + C - $0). + +other_content_range_unit(<< $\s, R/bits >>, Unit) -> other_content_range_resp(R, Unit, <<>>); +other_content_range_unit(<< C, R/bits >>, Unit) when ?IS_TOKEN(C) -> + ?LOWER(other_content_range_unit, R, Unit). + +other_content_range_resp(<<>>, Unit, Resp) -> {Unit, Resp}; +other_content_range_resp(<< C, R/bits >>, Unit, Resp) when ?IS_CHAR(C) -> other_content_range_resp(R, Unit, << Resp/binary, C >>). + +-ifdef(TEST). +content_range() -> + ?LET(ContentRange, + oneof([ + ?SUCHTHAT({bytes, First, Last, Complete}, + {bytes, non_neg_integer(), non_neg_integer(), non_neg_integer()}, + First =< Last andalso Last < Complete), + ?SUCHTHAT({bytes, First, Last, '*'}, + {bytes, non_neg_integer(), non_neg_integer(), '*'}, + First =< Last), + {bytes, '*', non_neg_integer()}, + {token(), ?LET(L, list(abnf_char()), list_to_binary(L))} + ]), + {case ContentRange of + {Unit, Resp} when is_binary(Unit) -> {?LOWER(Unit), Resp}; + _ -> ContentRange + end, case ContentRange of + {bytes, First, Last, '*'} -> + << "bytes ", (integer_to_binary(First))/binary, "-", + (integer_to_binary(Last))/binary, "/*">>; + {bytes, First, Last, Complete} -> + << "bytes ", (integer_to_binary(First))/binary, "-", + (integer_to_binary(Last))/binary, "/", (integer_to_binary(Complete))/binary >>; + {bytes, '*', Complete} -> + << "bytes */", (integer_to_binary(Complete))/binary >>; + {Unit, Resp} -> + << Unit/binary, $\s, Resp/binary >> + end}). + +prop_parse_content_range() -> + ?FORALL({Res, ContentRange}, + content_range(), + Res =:= parse_content_range(ContentRange)). + +parse_content_range_test_() -> + Tests = [ + {<<"bytes 21010-47021/47022">>, {bytes, 21010, 47021, 47022}}, + {<<"bytes 500-999/8000">>, {bytes, 500, 999, 8000}}, + {<<"bytes 7000-7999/8000">>, {bytes, 7000, 7999, 8000}}, + {<<"bytes 42-1233/1234">>, {bytes, 42, 1233, 1234}}, + {<<"bytes 42-1233/*">>, {bytes, 42, 1233, '*'}}, + {<<"bytes */1234">>, {bytes, '*', 1234}}, + {<<"bytes 0-499/1234">>, {bytes, 0, 499, 1234}}, + {<<"bytes 500-999/1234">>, {bytes, 500, 999, 1234}}, + {<<"bytes 500-1233/1234">>, {bytes, 500, 1233, 1234}}, + {<<"bytes 734-1233/1234">>, {bytes, 734, 1233, 1234}}, + {<<"bytes */47022">>, {bytes, '*', 47022}}, + {<<"exampleunit 1.2-4.3/25">>, {<<"exampleunit">>, <<"1.2-4.3/25">>}}, + {<<"exampleunit 11.2-14.3/25">>, {<<"exampleunit">>, <<"11.2-14.3/25">>}} + ], + [{V, fun() -> R = parse_content_range(V) end} || {V, R} <- Tests]. + +parse_content_range_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_content_range(V)) end} || V <- Tests]. + +horse_parse_content_range_bytes() -> + horse:repeat(200000, + parse_content_range(<<"bytes 21010-47021/47022">>) + ). + +horse_parse_content_range_other() -> + horse:repeat(200000, + parse_content_range(<<"exampleunit 11.2-14.3/25">>) + ). +-endif. + +%% Content-Type header. + +-spec parse_content_type(binary()) -> media_type(). +parse_content_type(<< C, R/bits >>) when ?IS_TOKEN(C) -> + ?LOWER(media_type, R, <<>>). + +media_type(<< $/, C, R/bits >>, T) when ?IS_TOKEN(C) -> + ?LOWER(media_subtype, R, T, <<>>); +media_type(<< C, R/bits >>, T) when ?IS_TOKEN(C) -> + ?LOWER(media_type, R, T). + +media_subtype(<< C, R/bits >>, T, S) when ?IS_TOKEN(C) -> + ?LOWER(media_subtype, R, T, S); +media_subtype(R, T, S) -> media_param_sep(R, T, S, []). + +media_param_sep(<<>>, T, S, P) -> {T, S, lists:reverse(P)}; +media_param_sep(<< $;, R/bits >>, T, S, P) -> media_before_param(R, T, S, P); +media_param_sep(<< C, R/bits >>, T, S, P) when ?IS_WS(C) -> media_param_sep(R, T, S, P). + +media_before_param(<< C, R/bits >>, T, S, P) when ?IS_WS(C)-> media_before_param(R, T, S, P); +media_before_param(<< "charset=", $", R/bits >>, T, S, P) -> media_charset_quoted(R, T, S, P, <<>>); +media_before_param(<< "charset=", R/bits >>, T, S, P) -> media_charset(R, T, S, P, <<>>); +media_before_param(<< C, R/bits >>, T, S, P) when ?IS_TOKEN(C) -> + ?LOWER(media_param, R, T, S, P, <<>>). + +media_charset_quoted(<< $", R/bits >>, T, S, P, V) -> + media_param_sep(R, T, S, [{<<"charset">>, V}|P]); +media_charset_quoted(<< $\\, C, R/bits >>, T, S, P, V) when ?IS_VCHAR_OBS(C) -> + ?LOWER(media_charset_quoted, R, T, S, P, V); +media_charset_quoted(<< C, R/bits >>, T, S, P, V) when ?IS_VCHAR_OBS(C) -> + ?LOWER(media_charset_quoted, R, T, S, P, V). + +media_charset(<< C, R/bits >>, T, S, P, V) when ?IS_TOKEN(C) -> + ?LOWER(media_charset, R, T, S, P, V); +media_charset(R, T, S, P, V) -> media_param_sep(R, T, S, [{<<"charset">>, V}|P]). + +media_param(<< $=, $", R/bits >>, T, S, P, K) -> media_quoted(R, T, S, P, K, <<>>); +media_param(<< $=, C, R/bits >>, T, S, P, K) when ?IS_TOKEN(C) -> media_value(R, T, S, P, K, << C >>); +media_param(<< C, R/bits >>, T, S, P, K) when ?IS_TOKEN(C) -> + ?LOWER(media_param, R, T, S, P, K). + +media_quoted(<< $", R/bits >>, T, S, P, K, V) -> media_param_sep(R, T, S, [{K, V}|P]); +media_quoted(<< $\\, C, R/bits >>, T, S, P, K, V) when ?IS_VCHAR_OBS(C) -> media_quoted(R, T, S, P, K, << V/binary, C >>); +media_quoted(<< C, R/bits >>, T, S, P, K, V) when ?IS_VCHAR_OBS(C) -> media_quoted(R, T, S, P, K, << V/binary, C >>). + +media_value(<< C, R/bits >>, T, S, P, K, V) when ?IS_TOKEN(C) -> media_value(R, T, S, P, K, << V/binary, C >>); +media_value(R, T, S, P, K, V) -> media_param_sep(R, T, S, [{K, V}|P]). + +-ifdef(TEST). +media_type_parameter() -> + frequency([ + {90, parameter()}, + {10, {<<"charset">>, oneof([token(), quoted_string()]), <<>>, <<>>}} + ]). + +media_type() -> + ?LET({T, S, P}, + {token(), token(), small_list(media_type_parameter())}, + {T, S, P, iolist_to_binary([T, $/, S, [[OWS1, $;, OWS2, K, $=, V] || {K, V, OWS1, OWS2} <- P]])} + ). + +prop_parse_content_type() -> + ?FORALL({T, S, P, MediaType}, + media_type(), + begin + {ResT, ResS, ResP} = parse_content_type(MediaType), + ExpectedP = [case ?LOWER(K) of + <<"charset">> -> {<<"charset">>, ?LOWER(unquote(V))}; + LowK -> {LowK, unquote(V)} + end || {K, V, _, _} <- P], + ResT =:= ?LOWER(T) + andalso ResS =:= ?LOWER(S) + andalso ResP =:= ExpectedP + end + ). + +parse_content_type_test_() -> + Tests = [ + {<<"text/html;charset=utf-8">>, + {<<"text">>, <<"html">>, [{<<"charset">>, <<"utf-8">>}]}}, + {<<"text/html;charset=UTF-8">>, + {<<"text">>, <<"html">>, [{<<"charset">>, <<"utf-8">>}]}}, + {<<"Text/HTML;Charset=\"utf-8\"">>, + {<<"text">>, <<"html">>, [{<<"charset">>, <<"utf-8">>}]}}, + {<<"text/html; charset=\"utf-8\"">>, + {<<"text">>, <<"html">>, [{<<"charset">>, <<"utf-8">>}]}}, + {<<"text/html; charset=ISO-8859-4">>, + {<<"text">>, <<"html">>, [{<<"charset">>, <<"iso-8859-4">>}]}}, + {<<"text/plain; charset=iso-8859-4">>, + {<<"text">>, <<"plain">>, [{<<"charset">>, <<"iso-8859-4">>}]}}, + {<<"multipart/form-data \t;Boundary=\"MultipartIsUgly\"">>, + {<<"multipart">>, <<"form-data">>, [ + {<<"boundary">>, <<"MultipartIsUgly">>} + ]}}, + {<<"foo/bar; one=FirstParam; two=SecondParam">>, + {<<"foo">>, <<"bar">>, [ + {<<"one">>, <<"FirstParam">>}, + {<<"two">>, <<"SecondParam">>} + ]}} + ], + [{V, fun() -> R = parse_content_type(V) end} || {V, R} <- Tests]. + +horse_parse_content_type() -> + horse:repeat(200000, + parse_content_type(<<"text/html;charset=utf-8">>) + ). +-endif. + +%% Cookie header. + +-spec parse_cookie(binary()) -> [{binary(), binary()}]. +parse_cookie(Cookie) -> + cow_cookie:parse_cookie(Cookie). + +%% Date header. + +-spec parse_date(binary()) -> calendar:datetime(). +parse_date(Date) -> + cow_date:parse_date(Date). + +-ifdef(TEST). +parse_date_test_() -> + Tests = [ + {<<"Tue, 15 Nov 1994 08:12:31 GMT">>, {{1994, 11, 15}, {8, 12, 31}}} + ], + [{V, fun() -> R = parse_date(V) end} || {V, R} <- Tests]. +-endif. + +%% ETag header. + +-spec parse_etag(binary()) -> etag(). +parse_etag(<< $W, $/, $", R/bits >>) -> + etag(R, weak, <<>>); +parse_etag(<< $", R/bits >>) -> + etag(R, strong, <<>>). + +etag(<< $" >>, Strength, Tag) -> + {Strength, Tag}; +etag(<< C, R/bits >>, Strength, Tag) when ?IS_ETAGC(C) -> + etag(R, Strength, << Tag/binary, C >>). + +-ifdef(TEST). +etagc() -> + ?SUCHTHAT(C, integer(16#21, 16#ff), C =/= 16#22 andalso C =/= 16#7f). + +etag() -> + ?LET({Strength, Tag}, + {elements([weak, strong]), list(etagc())}, + begin + TagBin = list_to_binary(Tag), + {{Strength, TagBin}, + case Strength of + weak -> << $W, $/, $", TagBin/binary, $" >>; + strong -> << $", TagBin/binary, $" >> + end} + end). + +prop_parse_etag() -> + ?FORALL({Tag, TagBin}, + etag(), + Tag =:= parse_etag(TagBin)). + +parse_etag_test_() -> + Tests = [ + {<<"\"xyzzy\"">>, {strong, <<"xyzzy">>}}, + {<<"W/\"xyzzy\"">>, {weak, <<"xyzzy">>}}, + {<<"\"\"">>, {strong, <<>>}} + ], + [{V, fun() -> R = parse_etag(V) end} || {V, R} <- Tests]. + +parse_etag_error_test_() -> + Tests = [ + <<>>, + <<"\"">>, + <<"W">>, + <<"W/">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_etag(V)) end} || V <- Tests]. + +horse_parse_etag() -> + horse:repeat(200000, + parse_etag(<<"W/\"xyzzy\"">>) + ). +-endif. + +%% Expect header. + +-spec parse_expect(binary()) -> continue. +parse_expect(<<"100-continue">>) -> + continue; +parse_expect(<<"100-", C, O, N, T, I, M, U, E >>) + when (C =:= $C) or (C =:= $c), (O =:= $O) or (O =:= $o), + (N =:= $N) or (N =:= $n), (T =:= $T) or (T =:= $t), + (I =:= $I) or (I =:= $i), (M =:= $N) or (M =:= $n), + (U =:= $U) or (U =:= $u), (E =:= $E) or (E =:= $e) -> + continue. + +-ifdef(TEST). +expect() -> + ?LET(E, + [$1, $0, $0, $-, + elements([$c, $C]), elements([$o, $O]), elements([$n, $N]), + elements([$t, $T]), elements([$i, $I]), elements([$n, $N]), + elements([$u, $U]), elements([$e, $E])], + list_to_binary(E)). + +prop_parse_expect() -> + ?FORALL(E, expect(), continue =:= parse_expect(E)). + +parse_expect_test_() -> + Tests = [ + <<"100-continue">>, + <<"100-CONTINUE">>, + <<"100-Continue">>, + <<"100-CoNtInUe">> + ], + [{V, fun() -> continue = parse_expect(V) end} || V <- Tests]. + +parse_expect_error_test_() -> + Tests = [ + <<>>, + <<" ">>, + <<"200-OK">>, + <<"Cookies">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_expect(V)) end} || V <- Tests]. + +horse_parse_expect() -> + horse:repeat(200000, + parse_expect(<<"100-continue">>) + ). +-endif. + +%% Expires header. +%% +%% Recipients must interpret invalid date formats as a date +%% in the past. The value "0" is commonly used. + +-spec parse_expires(binary()) -> calendar:datetime(). +parse_expires(<<"0">>) -> + {{1, 1, 1}, {0, 0, 0}}; +parse_expires(Expires) -> + try + cow_date:parse_date(Expires) + catch _:_ -> + {{1, 1, 1}, {0, 0, 0}} + end. + +-ifdef(TEST). +parse_expires_test_() -> + Tests = [ + {<<"0">>, {{1, 1, 1}, {0, 0, 0}}}, + {<<"Thu, 01 Dec 1994 nope invalid">>, {{1, 1, 1}, {0, 0, 0}}}, + {<<"Thu, 01 Dec 1994 16:00:00 GMT">>, {{1994, 12, 1}, {16, 0, 0}}} + ], + [{V, fun() -> R = parse_expires(V) end} || {V, R} <- Tests]. + +horse_parse_expires_0() -> + horse:repeat(200000, + parse_expires(<<"0">>) + ). + +horse_parse_expires_invalid() -> + horse:repeat(200000, + parse_expires(<<"Thu, 01 Dec 1994 nope invalid">>) + ). +-endif. + +%% Host header. +%% +%% We only seek to have legal characters and separate the +%% host and port values. The number of segments in the host +%% or the size of each segment is not checked. +%% +%% There is no way to distinguish IPv4 addresses from regular +%% names until the last segment is reached therefore we do not +%% differentiate them. +%% +%% The following valid hosts are currently rejected: IPv6 +%% addresses with a zone identifier; IPvFuture addresses; +%% and percent-encoded addresses. + +-spec parse_host(binary()) -> {binary(), 0..65535 | undefined}. +parse_host(<< $[, R/bits >>) -> + ipv6_address(R, << $[ >>); +parse_host(Host) -> + reg_name(Host, <<>>). + +ipv6_address(<< $] >>, IP) -> {<< IP/binary, $] >>, undefined}; +ipv6_address(<< $], $:, Port/bits >>, IP) -> {<< IP/binary, $] >>, binary_to_integer(Port)}; +ipv6_address(<< C, R/bits >>, IP) when ?IS_HEX(C) or (C =:= $:) or (C =:= $.) -> + ?LOWER(ipv6_address, R, IP). + +reg_name(<<>>, Name) -> {Name, undefined}; +reg_name(<< $:, Port/bits >>, Name) -> {Name, binary_to_integer(Port)}; +reg_name(<< C, R/bits >>, Name) when ?IS_URI_UNRESERVED(C) or ?IS_URI_SUB_DELIMS(C) -> + ?LOWER(reg_name, R, Name). + +-ifdef(TEST). +host_chars() -> "!$&'()*+,-.0123456789;=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz~". +host() -> vector(1, 255, elements(host_chars())). + +host_port() -> + ?LET({Host, Port}, + {host(), oneof([undefined, integer(1, 65535)])}, + begin + HostBin = list_to_binary(Host), + {{?LOWER(HostBin), Port}, + case Port of + undefined -> HostBin; + _ -> << HostBin/binary, $:, (integer_to_binary(Port))/binary >> + end} + end). + +prop_parse_host() -> + ?FORALL({Res, Host}, host_port(), Res =:= parse_host(Host)). + +parse_host_test_() -> + Tests = [ + {<<>>, {<<>>, undefined}}, + {<<"www.example.org:8080">>, {<<"www.example.org">>, 8080}}, + {<<"www.example.org">>, {<<"www.example.org">>, undefined}}, + {<<"192.0.2.1:8080">>, {<<"192.0.2.1">>, 8080}}, + {<<"192.0.2.1">>, {<<"192.0.2.1">>, undefined}}, + {<<"[2001:db8::1]:8080">>, {<<"[2001:db8::1]">>, 8080}}, + {<<"[2001:db8::1]">>, {<<"[2001:db8::1]">>, undefined}}, + {<<"[::ffff:192.0.2.1]:8080">>, {<<"[::ffff:192.0.2.1]">>, 8080}}, + {<<"[::ffff:192.0.2.1]">>, {<<"[::ffff:192.0.2.1]">>, undefined}} + ], + [{V, fun() -> R = parse_host(V) end} || {V, R} <- Tests]. + +horse_parse_host_blue_example_org() -> + horse:repeat(200000, + parse_host(<<"blue.example.org:8080">>) + ). + +horse_parse_host_ipv4() -> + horse:repeat(200000, + parse_host(<<"192.0.2.1:8080">>) + ). + +horse_parse_host_ipv6() -> + horse:repeat(200000, + parse_host(<<"[2001:db8::1]:8080">>) + ). + +horse_parse_host_ipv6_v4() -> + horse:repeat(200000, + parse_host(<<"[::ffff:192.0.2.1]:8080">>) + ). +-endif. + +%% HTTP2-Settings header. + +-spec parse_http2_settings(binary()) -> map(). +parse_http2_settings(HTTP2Settings) -> + cow_http2:parse_settings_payload(base64:decode(HTTP2Settings)). + +%% If-Match header. + +-spec parse_if_match(binary()) -> '*' | [etag()]. +parse_if_match(<<"*">>) -> + '*'; +parse_if_match(IfMatch) -> + nonempty(etag_list(IfMatch, [])). + +etag_list(<<>>, Acc) -> lists:reverse(Acc); +etag_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> etag_list(R, Acc); +etag_list(<< $W, $/, $", R/bits >>, Acc) -> etag(R, Acc, weak, <<>>); +etag_list(<< $", R/bits >>, Acc) -> etag(R, Acc, strong, <<>>). + +etag(<< $", R/bits >>, Acc, Strength, Tag) -> etag_list_sep(R, [{Strength, Tag}|Acc]); +etag(<< C, R/bits >>, Acc, Strength, Tag) when ?IS_ETAGC(C) -> etag(R, Acc, Strength, << Tag/binary, C >>). + +etag_list_sep(<<>>, Acc) -> lists:reverse(Acc); +etag_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> etag_list_sep(R, Acc); +etag_list_sep(<< $,, R/bits >>, Acc) -> etag_list(R, Acc). + +-ifdef(TEST). +prop_parse_if_match() -> + ?FORALL(L, + non_empty(list(etag())), + begin + << _, IfMatch/binary >> = iolist_to_binary([[$,, T] || {_, T} <- L]), + ResL = parse_if_match(IfMatch), + CheckedL = [T =:= ResT || {{T, _}, ResT} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_if_match_test_() -> + Tests = [ + {<<"\"xyzzy\"">>, [{strong, <<"xyzzy">>}]}, + {<<"\"xyzzy\", \"r2d2xxxx\", \"c3piozzzz\"">>, + [{strong, <<"xyzzy">>}, {strong, <<"r2d2xxxx">>}, {strong, <<"c3piozzzz">>}]}, + {<<"*">>, '*'} + ], + [{V, fun() -> R = parse_if_match(V) end} || {V, R} <- Tests]. + +parse_if_match_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_if_match(V)) end} || V <- Tests]. + +horse_parse_if_match() -> + horse:repeat(200000, + parse_if_match(<<"\"xyzzy\", \"r2d2xxxx\", \"c3piozzzz\"">>) + ). +-endif. + +%% If-Modified-Since header. + +-spec parse_if_modified_since(binary()) -> calendar:datetime(). +parse_if_modified_since(IfModifiedSince) -> + cow_date:parse_date(IfModifiedSince). + +-ifdef(TEST). +parse_if_modified_since_test_() -> + Tests = [ + {<<"Sat, 29 Oct 1994 19:43:31 GMT">>, {{1994, 10, 29}, {19, 43, 31}}} + ], + [{V, fun() -> R = parse_if_modified_since(V) end} || {V, R} <- Tests]. +-endif. + +%% If-None-Match header. + +-spec parse_if_none_match(binary()) -> '*' | [etag()]. +parse_if_none_match(<<"*">>) -> + '*'; +parse_if_none_match(IfNoneMatch) -> + nonempty(etag_list(IfNoneMatch, [])). + +-ifdef(TEST). +parse_if_none_match_test_() -> + Tests = [ + {<<"\"xyzzy\"">>, [{strong, <<"xyzzy">>}]}, + {<<"W/\"xyzzy\"">>, [{weak, <<"xyzzy">>}]}, + {<<"\"xyzzy\", \"r2d2xxxx\", \"c3piozzzz\"">>, + [{strong, <<"xyzzy">>}, {strong, <<"r2d2xxxx">>}, {strong, <<"c3piozzzz">>}]}, + {<<"W/\"xyzzy\", W/\"r2d2xxxx\", W/\"c3piozzzz\"">>, + [{weak, <<"xyzzy">>}, {weak, <<"r2d2xxxx">>}, {weak, <<"c3piozzzz">>}]}, + {<<"*">>, '*'} + ], + [{V, fun() -> R = parse_if_none_match(V) end} || {V, R} <- Tests]. + +parse_if_none_match_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_if_none_match(V)) end} || V <- Tests]. + +horse_parse_if_none_match() -> + horse:repeat(200000, + parse_if_none_match(<<"W/\"xyzzy\", W/\"r2d2xxxx\", W/\"c3piozzzz\"">>) + ). +-endif. + +%% If-Range header. + +-spec parse_if_range(binary()) -> etag() | calendar:datetime(). +parse_if_range(<< $W, $/, $", R/bits >>) -> + etag(R, weak, <<>>); +parse_if_range(<< $", R/bits >>) -> + etag(R, strong, <<>>); +parse_if_range(IfRange) -> + cow_date:parse_date(IfRange). + +-ifdef(TEST). +parse_if_range_test_() -> + Tests = [ + {<<"W/\"xyzzy\"">>, {weak, <<"xyzzy">>}}, + {<<"\"xyzzy\"">>, {strong, <<"xyzzy">>}}, + {<<"Sat, 29 Oct 1994 19:43:31 GMT">>, {{1994, 10, 29}, {19, 43, 31}}} + ], + [{V, fun() -> R = parse_if_range(V) end} || {V, R} <- Tests]. + +parse_if_range_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_if_range(V)) end} || V <- Tests]. + +horse_parse_if_range_etag() -> + horse:repeat(200000, + parse_if_range(<<"\"xyzzy\"">>) + ). + +horse_parse_if_range_date() -> + horse:repeat(200000, + parse_if_range(<<"Sat, 29 Oct 1994 19:43:31 GMT">>) + ). +-endif. + +%% If-Unmodified-Since header. + +-spec parse_if_unmodified_since(binary()) -> calendar:datetime(). +parse_if_unmodified_since(IfModifiedSince) -> + cow_date:parse_date(IfModifiedSince). + +-ifdef(TEST). +parse_if_unmodified_since_test_() -> + Tests = [ + {<<"Sat, 29 Oct 1994 19:43:31 GMT">>, {{1994, 10, 29}, {19, 43, 31}}} + ], + [{V, fun() -> R = parse_if_unmodified_since(V) end} || {V, R} <- Tests]. +-endif. + +%% Last-Modified header. + +-spec parse_last_modified(binary()) -> calendar:datetime(). +parse_last_modified(LastModified) -> + cow_date:parse_date(LastModified). + +-ifdef(TEST). +parse_last_modified_test_() -> + Tests = [ + {<<"Tue, 15 Nov 1994 12:45:26 GMT">>, {{1994, 11, 15}, {12, 45, 26}}} + ], + [{V, fun() -> R = parse_last_modified(V) end} || {V, R} <- Tests]. +-endif. + +%% Link header. + +-spec parse_link(binary()) -> [cow_link:link()]. +parse_link(Link) -> + cow_link:parse_link(Link). + +%% Max-Forwards header. + +-spec parse_max_forwards(binary()) -> non_neg_integer(). +parse_max_forwards(MaxForwards) -> + I = binary_to_integer(MaxForwards), + true = I >= 0, + I. + +-ifdef(TEST). +prop_parse_max_forwards() -> + ?FORALL( + X, + non_neg_integer(), + X =:= parse_max_forwards(integer_to_binary(X)) + ). + +parse_max_forwards_test_() -> + Tests = [ + {<<"0">>, 0}, + {<<"42">>, 42}, + {<<"69">>, 69}, + {<<"1337">>, 1337}, + {<<"1234567890">>, 1234567890} + ], + [{V, fun() -> R = parse_max_forwards(V) end} || {V, R} <- Tests]. + +parse_max_forwards_error_test_() -> + Tests = [ + <<>>, + <<"123, 123">>, + <<"4.17">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_max_forwards(V)) end} || V <- Tests]. +-endif. + +%% Origin header. + +%% According to the RFC6454 we should generate +%% a fresh globally unique identifier and return that value if: +%% - URI does not use a hierarchical element as a naming authority +%% or the URI is not an absolute URI +%% - the implementation doesn't support the protocol given by uri-scheme +%% Thus, erlang reference represents a GUID here. +%% +%% We only seek to have legal characters and separate the +%% host and port values. The number of segments in the host +%% or the size of each segment is not checked. +%% +%% There is no way to distinguish IPv4 addresses from regular +%% names until the last segment is reached therefore we do not +%% differentiate them. +%% +%% @todo The following valid hosts are currently rejected: IPv6 +%% addresses with a zone identifier; IPvFuture addresses; +%% and percent-encoded addresses. + +-spec parse_origin(binary()) -> [{binary(), binary(), 0..65535} | reference()]. +parse_origin(Origins) -> + nonempty(origin_scheme(Origins, [])). + +origin_scheme(<<>>, Acc) -> Acc; +origin_scheme(<< "http://", R/bits >>, Acc) -> origin_host(R, Acc, <<"http">>); +origin_scheme(<< "https://", R/bits >>, Acc) -> origin_host(R, Acc, <<"https">>); +origin_scheme(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> origin_scheme(next_origin(R), [make_ref()|Acc]). + +origin_host(<< $[, R/bits >>, Acc, Scheme) -> origin_ipv6_address(R, Acc, Scheme, << $[ >>); +origin_host(Host, Acc, Scheme) -> origin_reg_name(Host, Acc, Scheme, <<>>). + +origin_ipv6_address(<< $] >>, Acc, Scheme, IP) -> + lists:reverse([{Scheme, << IP/binary, $] >>, default_port(Scheme)}|Acc]); +origin_ipv6_address(<< $], $\s, R/bits >>, Acc, Scheme, IP) -> + origin_scheme(R, [{Scheme, << IP/binary, $] >>, default_port(Scheme)}|Acc]); +origin_ipv6_address(<< $], $:, Port/bits >>, Acc, Scheme, IP) -> + origin_port(Port, Acc, Scheme, << IP/binary, $] >>, <<>>); +origin_ipv6_address(<< C, R/bits >>, Acc, Scheme, IP) when ?IS_HEX(C) or (C =:= $:) or (C =:= $.) -> + ?LOWER(origin_ipv6_address, R, Acc, Scheme, IP). + +origin_reg_name(<<>>, Acc, Scheme, Name) -> + lists:reverse([{Scheme, Name, default_port(Scheme)}|Acc]); +origin_reg_name(<< $\s, R/bits >>, Acc, Scheme, Name) -> + origin_scheme(R, [{Scheme, Name, default_port(Scheme)}|Acc]); +origin_reg_name(<< $:, Port/bits >>, Acc, Scheme, Name) -> + origin_port(Port, Acc, Scheme, Name, <<>>); +origin_reg_name(<< C, R/bits >>, Acc, Scheme, Name) when ?IS_URI_UNRESERVED(C) or ?IS_URI_SUB_DELIMS(C) -> + ?LOWER(origin_reg_name, R, Acc, Scheme, Name). + +origin_port(<<>>, Acc, Scheme, Host, Port) -> + lists:reverse([{Scheme, Host, binary_to_integer(Port)}|Acc]); +origin_port(<< $\s, R/bits >>, Acc, Scheme, Host, Port) -> + origin_scheme(R, [{Scheme, Host, binary_to_integer(Port)}|Acc]); +origin_port(<< C, R/bits >>, Acc, Scheme, Host, Port) when ?IS_DIGIT(C) -> + origin_port(R, Acc, Scheme, Host, << Port/binary, C >>). + +next_origin(<<>>) -> <<>>; +next_origin(<< $\s, C, R/bits >>) when ?IS_TOKEN(C) -> << C, R/bits >>; +next_origin(<< C, R/bits >>) when ?IS_TOKEN(C) or (C =:= $:) or (C =:= $/) -> next_origin(R). + +default_port(<< "http" >>) -> 80; +default_port(<< "https" >>) -> 443. + +-ifdef(TEST). +scheme() -> oneof([<<"http">>, <<"https">>]). + +scheme_host_port() -> + ?LET({Scheme, Host, Port}, + {scheme(), host(), integer(1, 65535)}, + begin + HostBin = list_to_binary(Host), + {[{Scheme, ?LOWER(HostBin), Port}], + case default_port(Scheme) of + Port -> << Scheme/binary, "://", HostBin/binary>>; + _ -> << Scheme/binary, "://", HostBin/binary, $:, (integer_to_binary(Port))/binary >> + end} + end). + +prop_parse_origin() -> + ?FORALL({Res, Origin}, scheme_host_port(), Res =:= parse_origin(Origin)). + +parse_origin_test_() -> + Tests = [ + {<<"http://www.example.org:8080">>, [{<<"http">>, <<"www.example.org">>, 8080}]}, + {<<"http://www.example.org">>, [{<<"http">>, <<"www.example.org">>, 80}]}, + {<<"http://192.0.2.1:8080">>, [{<<"http">>, <<"192.0.2.1">>, 8080}]}, + {<<"http://192.0.2.1">>, [{<<"http">>, <<"192.0.2.1">>, 80}]}, + {<<"http://[2001:db8::1]:8080">>, [{<<"http">>, <<"[2001:db8::1]">>, 8080}]}, + {<<"http://[2001:db8::1]">>, [{<<"http">>, <<"[2001:db8::1]">>, 80}]}, + {<<"http://[::ffff:192.0.2.1]:8080">>, [{<<"http">>, <<"[::ffff:192.0.2.1]">>, 8080}]}, + {<<"http://[::ffff:192.0.2.1]">>, [{<<"http">>, <<"[::ffff:192.0.2.1]">>, 80}]}, + {<<"http://example.org https://blue.example.com:8080">>, + [{<<"http">>, <<"example.org">>, 80}, + {<<"https">>, <<"blue.example.com">>, 8080}]} + ], + [{V, fun() -> R = parse_origin(V) end} || {V, R} <- Tests]. + +parse_origin_reference_test_() -> + Tests = [ + <<"null">>, + <<"httpx://example.org:80">>, + <<"httpx://example.org:80 null">>, + <<"null null">> + ], + [{V, fun() -> [true = is_reference(Ref) || Ref <- parse_origin(V)] end} || V <- Tests]. + +parse_origin_error_test_() -> + Tests = [ + <<>>, + <<"null", $\t, "null">>, + <<"null", $\s, $\s, "null">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_origin(V)) end} || V <- Tests]. + +horse_parse_origin_blue_example_org() -> + horse:repeat(200000, + parse_origin(<<"http://blue.example.org:8080">>) + ). + +horse_parse_origin_ipv4() -> + horse:repeat(200000, + parse_origin(<<"http://192.0.2.1:8080">>) + ). + +horse_parse_origin_ipv6() -> + horse:repeat(200000, + parse_origin(<<"http://[2001:db8::1]:8080">>) + ). + +horse_parse_origin_ipv6_v4() -> + horse:repeat(200000, + parse_origin(<<"http://[::ffff:192.0.2.1]:8080">>) + ). + +horse_parse_origin_null() -> + horse:repeat(200000, + parse_origin(<<"null">>) + ). +-endif. + +%% Pragma header. +%% +%% Legacy header kept for backward compatibility with HTTP/1.0 caches. +%% Only the "no-cache" directive was ever specified, and only for +%% request messages. +%% +%% We take a large shortcut in the parsing of this header, expecting +%% an exact match of "no-cache". + +-spec parse_pragma(binary()) -> cache | no_cache. +parse_pragma(<<"no-cache">>) -> no_cache; +parse_pragma(_) -> cache. + +%% Proxy-Authenticate header. +%% +%% Alias of parse_www_authenticate/1 due to identical syntax. + +-spec parse_proxy_authenticate(binary()) -> [{basic, binary()} + | {bearer | digest | binary(), [{binary(), binary()}]}]. +parse_proxy_authenticate(ProxyAuthenticate) -> + parse_www_authenticate(ProxyAuthenticate). + +%% Proxy-Authorization header. +%% +%% Alias of parse_authorization/1 due to identical syntax. + +-spec parse_proxy_authorization(binary()) + -> {basic, binary(), binary()} + | {bearer, binary()} + | {digest, [{binary(), binary()}]}. +parse_proxy_authorization(ProxyAuthorization) -> + parse_authorization(ProxyAuthorization). + +%% Range header. + +-spec parse_range(binary()) + -> {bytes, [{non_neg_integer(), non_neg_integer() | infinity} | neg_integer()]} + | {binary(), binary()}. +parse_range(<<"bytes=", R/bits >>) -> + bytes_range_set(R, []); +parse_range(<< C, R/bits >>) when ?IS_TOKEN(C) -> + ?LOWER(other_range_unit, R, <<>>). + +bytes_range_set(<<>>, Acc) -> {bytes, lists:reverse(Acc)}; +bytes_range_set(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> bytes_range_set(R, Acc); +bytes_range_set(<< $-, C, R/bits >>, Acc) when ?IS_DIGIT(C) -> bytes_range_suffix_spec(R, Acc, C - $0); +bytes_range_set(<< C, R/bits >>, Acc) when ?IS_DIGIT(C) -> bytes_range_spec(R, Acc, C - $0). + +bytes_range_spec(<< $-, C, R/bits >>, Acc, First) when ?IS_DIGIT(C) -> bytes_range_spec_last(R, Acc, First, C - $0); +bytes_range_spec(<< $-, R/bits >>, Acc, First) -> bytes_range_set_sep(R, [{First, infinity}|Acc]); +bytes_range_spec(<< C, R/bits >>, Acc, First) when ?IS_DIGIT(C) -> bytes_range_spec(R, Acc, First * 10 + C - $0). + +bytes_range_spec_last(<< C, R/bits >>, Acc, First, Last) when ?IS_DIGIT(C) -> bytes_range_spec_last(R, Acc, First, Last * 10 + C - $0); +bytes_range_spec_last(R, Acc, First, Last) -> bytes_range_set_sep(R, [{First, Last}|Acc]). + +bytes_range_suffix_spec(<< C, R/bits >>, Acc, Suffix) when ?IS_DIGIT(C) -> bytes_range_suffix_spec(R, Acc, Suffix * 10 + C - $0); +bytes_range_suffix_spec(R, Acc, Suffix) -> bytes_range_set_sep(R, [-Suffix|Acc]). + +bytes_range_set_sep(<<>>, Acc) -> {bytes, lists:reverse(Acc)}; +bytes_range_set_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> bytes_range_set_sep(R, Acc); +bytes_range_set_sep(<< $,, R/bits >>, Acc) -> bytes_range_set(R, Acc). + +other_range_unit(<< $=, C, R/bits >>, U) when ?IS_VCHAR(C) -> + other_range_set(R, U, << C >>); +other_range_unit(<< C, R/bits >>, U) when ?IS_TOKEN(C) -> + ?LOWER(other_range_unit, R, U). + +other_range_set(<<>>, U, S) -> + {U, S}; +other_range_set(<< C, R/bits >>, U, S) when ?IS_VCHAR(C) -> + other_range_set(R, U, << S/binary, C >>). + +-ifdef(TEST). +bytes_range() -> + ?LET(BytesSet, + non_empty(list(oneof([ + ?SUCHTHAT({First, Last}, {pos_integer(), pos_integer()}, First =< Last), + {pos_integer(), infinity}, + ?LET(I, pos_integer(), -I) + ]))), + {{bytes, BytesSet}, begin + << _, Set/bits >> = iolist_to_binary([ + case Spec of + {First, infinity} -> [$,, integer_to_binary(First), $-]; + {First, Last} -> [$,, integer_to_binary(First), $-, integer_to_binary(Last)]; + Suffix -> [$,, integer_to_binary(Suffix)] + end || Spec <- BytesSet]), + <<"bytes=", Set/binary >> + end}). + +other_range() -> + ?LET(Range = {Unit, Set}, + {token(), ?LET(L, non_empty(list(vchar())), list_to_binary(L))}, + {Range, << Unit/binary, $=, Set/binary >>}). + +range() -> + oneof([ + bytes_range(), + other_range() + ]). + +prop_parse_range() -> + ?FORALL({Range, RangeBin}, + range(), + begin + Range2 = case Range of + {bytes, _} -> Range; + {Unit, Set} -> {?LOWER(Unit), Set} + end, + Range2 =:= parse_range(RangeBin) + end). + +parse_range_test_() -> + Tests = [ + {<<"bytes=0-499">>, {bytes, [{0, 499}]}}, + {<<"bytes=500-999">>, {bytes, [{500, 999}]}}, + {<<"bytes=-500">>, {bytes, [-500]}}, + {<<"bytes=9500-">>, {bytes, [{9500, infinity}]}}, + {<<"bytes=0-0,-1">>, {bytes, [{0, 0}, -1]}}, + {<<"bytes=500-600,601-999">>, {bytes, [{500, 600}, {601, 999}]}}, + {<<"bytes=500-700,601-999">>, {bytes, [{500, 700}, {601, 999}]}}, + {<<"books=I-III,V-IX">>, {<<"books">>, <<"I-III,V-IX">>}} + ], + [{V, fun() -> R = parse_range(V) end} || {V, R} <- Tests]. + +parse_range_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_range(V)) end} || V <- Tests]. + +horse_parse_range_first_last() -> + horse:repeat(200000, + parse_range(<<"bytes=500-999">>) + ). + +horse_parse_range_infinity() -> + horse:repeat(200000, + parse_range(<<"bytes=9500-">>) + ). + +horse_parse_range_suffix() -> + horse:repeat(200000, + parse_range(<<"bytes=-500">>) + ). + +horse_parse_range_two() -> + horse:repeat(200000, + parse_range(<<"bytes=500-700,601-999">>) + ). + +horse_parse_range_other() -> + horse:repeat(200000, + parse_range(<<"books=I-III,V-IX">>) + ). +-endif. + +%% Retry-After header. + +-spec parse_retry_after(binary()) -> non_neg_integer() | calendar:datetime(). +parse_retry_after(RetryAfter = << D, _/bits >>) when ?IS_DIGIT(D) -> + I = binary_to_integer(RetryAfter), + true = I >= 0, + I; +parse_retry_after(RetryAfter) -> + cow_date:parse_date(RetryAfter). + +-ifdef(TEST). +parse_retry_after_test_() -> + Tests = [ + {<<"Fri, 31 Dec 1999 23:59:59 GMT">>, {{1999, 12, 31}, {23, 59, 59}}}, + {<<"120">>, 120} + ], + [{V, fun() -> R = parse_retry_after(V) end} || {V, R} <- Tests]. + +parse_retry_after_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_retry_after(V)) end} || V <- Tests]. + +horse_parse_retry_after_date() -> + horse:repeat(200000, + parse_retry_after(<<"Fri, 31 Dec 1999 23:59:59 GMT">>) + ). + +horse_parse_retry_after_delay_seconds() -> + horse:repeat(200000, + parse_retry_after(<<"120">>) + ). +-endif. + +%% Sec-WebSocket-Accept header. +%% +%% The argument is returned without any processing. This value is +%% expected to be matched directly by the client so no parsing is +%% needed. + +-spec parse_sec_websocket_accept(binary()) -> binary(). +parse_sec_websocket_accept(SecWebSocketAccept) -> + SecWebSocketAccept. + +%% Sec-WebSocket-Extensions header. + +-spec parse_sec_websocket_extensions(binary()) -> [{binary(), [binary() | {binary(), binary()}]}]. +parse_sec_websocket_extensions(SecWebSocketExtensions) -> + nonempty(ws_extension_list(SecWebSocketExtensions, [])). + +ws_extension_list(<<>>, Acc) -> lists:reverse(Acc); +ws_extension_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> ws_extension_list(R, Acc); +ws_extension_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> ws_extension(R, Acc, << C >>). + +ws_extension(<< C, R/bits >>, Acc, E) when ?IS_TOKEN(C) -> ws_extension(R, Acc, << E/binary, C >>); +ws_extension(R, Acc, E) -> ws_extension_param_sep(R, Acc, E, []). + +ws_extension_param_sep(<<>>, Acc, E, P) -> lists:reverse([{E, lists:reverse(P)}|Acc]); +ws_extension_param_sep(<< $,, R/bits >>, Acc, E, P) -> ws_extension_list(R, [{E, lists:reverse(P)}|Acc]); +ws_extension_param_sep(<< $;, R/bits >>, Acc, E, P) -> ws_extension_before_param(R, Acc, E, P); +ws_extension_param_sep(<< C, R/bits >>, Acc, E, P) when ?IS_WS(C) -> ws_extension_param_sep(R, Acc, E, P). + +ws_extension_before_param(<< C, R/bits >>, Acc, E, P) when ?IS_WS(C) -> ws_extension_before_param(R, Acc, E, P); +ws_extension_before_param(<< C, R/bits >>, Acc, E, P) when ?IS_TOKEN(C) -> ws_extension_param(R, Acc, E, P, << C >>). + +ws_extension_param(<< $=, $", R/bits >>, Acc, E, P, K) -> ws_extension_quoted(R, Acc, E, P, K, <<>>); +ws_extension_param(<< $=, C, R/bits >>, Acc, E, P, K) when ?IS_TOKEN(C) -> ws_extension_value(R, Acc, E, P, K, << C >>); +ws_extension_param(<< C, R/bits >>, Acc, E, P, K) when ?IS_TOKEN(C) -> ws_extension_param(R, Acc, E, P, << K/binary, C >>); +ws_extension_param(R, Acc, E, P, K) -> ws_extension_param_sep(R, Acc, E, [K|P]). + +ws_extension_quoted(<< $", R/bits >>, Acc, E, P, K, V) -> ws_extension_param_sep(R, Acc, E, [{K, V}|P]); +ws_extension_quoted(<< $\\, C, R/bits >>, Acc, E, P, K, V) when ?IS_TOKEN(C) -> ws_extension_quoted(R, Acc, E, P, K, << V/binary, C >>); +ws_extension_quoted(<< C, R/bits >>, Acc, E, P, K, V) when ?IS_TOKEN(C) -> ws_extension_quoted(R, Acc, E, P, K, << V/binary, C >>). + +ws_extension_value(<< C, R/bits >>, Acc, E, P, K, V) when ?IS_TOKEN(C) -> ws_extension_value(R, Acc, E, P, K, << V/binary, C >>); +ws_extension_value(R, Acc, E, P, K, V) -> ws_extension_param_sep(R, Acc, E, [{K, V}|P]). + +-ifdef(TEST). +quoted_token() -> + ?LET(T, + non_empty(list(frequency([ + {99, tchar()}, + {1, [$\\, tchar()]} + ]))), + [$", T, $"]). + +ws_extension() -> + ?LET({E, PL}, + {token(), small_list({ows(), ows(), oneof([token(), {token(), oneof([token(), quoted_token()])}])})}, + {E, PL, iolist_to_binary([E, + [case P of + {OWS1, OWS2, {K, V}} -> [OWS1, $;, OWS2, K, $=, V]; + {OWS1, OWS2, K} -> [OWS1, $;, OWS2, K] + end || P <- PL] + ])}). + +prop_parse_sec_websocket_extensions() -> + ?FORALL(L, + vector(1, 50, ws_extension()), + begin + << _, SecWebsocketExtensions/binary >> = iolist_to_binary([[$,, E] || {_, _, E} <- L]), + ResL = parse_sec_websocket_extensions(SecWebsocketExtensions), + CheckedL = [begin + ExpectedPL = [case P of + {_, _, {K, V}} -> {K, unquote(V)}; + {_, _, K} -> K + end || P <- PL], + E =:= ResE andalso ExpectedPL =:= ResPL + end || {{E, PL, _}, {ResE, ResPL}} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_sec_websocket_extensions_test_() -> + Tests = [ + {<<"foo">>, [{<<"foo">>, []}]}, + {<<"bar; baz=2">>, [{<<"bar">>, [{<<"baz">>, <<"2">>}]}]}, + {<<"foo, bar; baz=2">>, [{<<"foo">>, []}, {<<"bar">>, [{<<"baz">>, <<"2">>}]}]}, + {<<"deflate-stream">>, [{<<"deflate-stream">>, []}]}, + {<<"mux; max-channels=4; flow-control, deflate-stream">>, + [{<<"mux">>, [{<<"max-channels">>, <<"4">>}, <<"flow-control">>]}, {<<"deflate-stream">>, []}]}, + {<<"private-extension">>, [{<<"private-extension">>, []}]} + ], + [{V, fun() -> R = parse_sec_websocket_extensions(V) end} || {V, R} <- Tests]. + +parse_sec_websocket_extensions_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_sec_websocket_extensions(V)) end} + || V <- Tests]. + +horse_parse_sec_websocket_extensions() -> + horse:repeat(200000, + parse_sec_websocket_extensions(<<"mux; max-channels=4; flow-control, deflate-stream">>) + ). +-endif. + +%% Sec-WebSocket-Key header. +%% +%% The argument is returned without any processing. This value is +%% expected to be prepended to a static value, the result of which +%% hashed to form a new base64 value returned in Sec-WebSocket-Accept, +%% therefore no parsing is needed. + +-spec parse_sec_websocket_key(binary()) -> binary(). +parse_sec_websocket_key(SecWebSocketKey) -> + SecWebSocketKey. + +%% Sec-WebSocket-Protocol request header. + +-spec parse_sec_websocket_protocol_req(binary()) -> [binary()]. +parse_sec_websocket_protocol_req(SecWebSocketProtocol) -> + nonempty(token_list(SecWebSocketProtocol, [])). + +-ifdef(TEST). +parse_sec_websocket_protocol_req_test_() -> + Tests = [ + {<<"chat, superchat">>, [<<"chat">>, <<"superchat">>]}, + {<<"Chat, SuperChat">>, [<<"Chat">>, <<"SuperChat">>]} + ], + [{V, fun() -> R = parse_sec_websocket_protocol_req(V) end} || {V, R} <- Tests]. + +parse_sec_websocket_protocol_req_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_sec_websocket_protocol_req(V)) end} + || V <- Tests]. + +horse_parse_sec_websocket_protocol_req() -> + horse:repeat(200000, + parse_sec_websocket_protocol_req(<<"chat, superchat">>) + ). +-endif. + +%% Sec-Websocket-Protocol response header. + +-spec parse_sec_websocket_protocol_resp(binary()) -> binary(). +parse_sec_websocket_protocol_resp(Protocol) -> + true = <<>> =/= Protocol, + ok = validate_token(Protocol), + Protocol. + +-ifdef(TEST). +prop_parse_sec_websocket_protocol_resp() -> + ?FORALL(T, + token(), + T =:= parse_sec_websocket_protocol_resp(T)). + +parse_sec_websocket_protocol_resp_test_() -> + Tests = [ + {<<"chat">>, <<"chat">>}, + {<<"CHAT">>, <<"CHAT">>} + ], + [{V, fun() -> R = parse_sec_websocket_protocol_resp(V) end} || {V, R} <- Tests]. + +parse_sec_websocket_protocol_resp_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_sec_websocket_protocol_resp(V)) end} + || V <- Tests]. + +horse_parse_sec_websocket_protocol_resp() -> + horse:repeat(200000, + parse_sec_websocket_protocol_resp(<<"chat">>) + ). +-endif. + +%% Sec-WebSocket-Version request header. + +-spec parse_sec_websocket_version_req(binary()) -> websocket_version(). +parse_sec_websocket_version_req(SecWebSocketVersion) when byte_size(SecWebSocketVersion) < 4 -> + Version = binary_to_integer(SecWebSocketVersion), + true = Version >= 0 andalso Version =< 255, + Version. + +-ifdef(TEST). +prop_parse_sec_websocket_version_req() -> + ?FORALL(Version, + integer(0, 255), + Version =:= parse_sec_websocket_version_req(integer_to_binary(Version))). + +parse_sec_websocket_version_req_test_() -> + Tests = [ + {<<"13">>, 13}, + {<<"25">>, 25} + ], + [{V, fun() -> R = parse_sec_websocket_version_req(V) end} || {V, R} <- Tests]. + +parse_sec_websocket_version_req_error_test_() -> + Tests = [ + <<>>, + <<" ">>, + <<"7, 8, 13">>, + <<"invalid">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_sec_websocket_version_req(V)) end} + || V <- Tests]. + +horse_parse_sec_websocket_version_req_13() -> + horse:repeat(200000, + parse_sec_websocket_version_req(<<"13">>) + ). + +horse_parse_sec_websocket_version_req_255() -> + horse:repeat(200000, + parse_sec_websocket_version_req(<<"255">>) + ). +-endif. + +%% Sec-WebSocket-Version response header. + +-spec parse_sec_websocket_version_resp(binary()) -> [websocket_version()]. +parse_sec_websocket_version_resp(SecWebSocketVersion) -> + nonempty(ws_version_list(SecWebSocketVersion, [])). + +ws_version_list(<<>>, Acc) -> lists:reverse(Acc); +ws_version_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> ws_version_list(R, Acc); +ws_version_list(<< C, R/bits >>, Acc) when ?IS_DIGIT(C) -> ws_version(R, Acc, C - $0). + +ws_version(<< C, R/bits >>, Acc, V) when ?IS_DIGIT(C) -> ws_version(R, Acc, V * 10 + C - $0); +ws_version(R, Acc, V) -> ws_version_list_sep(R, [V|Acc]). + +ws_version_list_sep(<<>>, Acc) -> lists:reverse(Acc); +ws_version_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> ws_version_list_sep(R, Acc); +ws_version_list_sep(<< $,, R/bits >>, Acc) -> ws_version_list(R, Acc). + +-ifdef(TEST). +sec_websocket_version_resp() -> + ?LET(L, + non_empty(list({ows(), ows(), integer(0, 255)})), + begin + << _, SecWebSocketVersion/binary >> = iolist_to_binary( + [[OWS1, $,, OWS2, integer_to_binary(V)] || {OWS1, OWS2, V} <- L]), + {[V || {_, _, V} <- L], SecWebSocketVersion} + end). + +prop_parse_sec_websocket_version_resp() -> + ?FORALL({L, SecWebSocketVersion}, + sec_websocket_version_resp(), + L =:= parse_sec_websocket_version_resp(SecWebSocketVersion)). + +parse_sec_websocket_version_resp_test_() -> + Tests = [ + {<<"13, 8, 7">>, [13, 8, 7]} + ], + [{V, fun() -> R = parse_sec_websocket_version_resp(V) end} || {V, R} <- Tests]. + +parse_sec_websocket_version_resp_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_sec_websocket_version_resp(V)) end} + || V <- Tests]. + +horse_parse_sec_websocket_version_resp() -> + horse:repeat(200000, + parse_sec_websocket_version_resp(<<"13, 8, 7">>) + ). +-endif. + +%% Set-Cookie header. + +-spec parse_set_cookie(binary()) + -> {ok, binary(), binary(), cow_cookie:cookie_attrs()} + | ignore. +parse_set_cookie(SetCookie) -> + cow_cookie:parse_set_cookie(SetCookie). + +%% TE header. +%% +%% This function does not support parsing of transfer-parameter. + +-spec parse_te(binary()) -> {trailers | no_trailers, [{binary(), qvalue()}]}. +parse_te(TE) -> + te_list(TE, no_trailers, []). + +te_list(<<>>, Trail, Acc) -> {Trail, lists:reverse(Acc)}; +te_list(<< C, R/bits >>, Trail, Acc) when ?IS_WS_COMMA(C) -> te_list(R, Trail, Acc); +te_list(<< "trailers", R/bits >>, Trail, Acc) -> te(R, Trail, Acc, <<"trailers">>); +te_list(<< "compress", R/bits >>, Trail, Acc) -> te(R, Trail, Acc, <<"compress">>); +te_list(<< "deflate", R/bits >>, Trail, Acc) -> te(R, Trail, Acc, <<"deflate">>); +te_list(<< "gzip", R/bits >>, Trail, Acc) -> te(R, Trail, Acc, <<"gzip">>); +te_list(<< C, R/bits >>, Trail, Acc) when ?IS_TOKEN(C) -> + ?LOWER(te, R, Trail, Acc, <<>>). + +te(<<>>, _, Acc, <<"trailers">>) -> {trailers, lists:reverse(Acc)}; +te(<< $,, R/bits >>, _, Acc, <<"trailers">>) -> te_list(R, trailers, Acc); +te(<< $;, R/bits >>, Trail, Acc, T) when T =/= <<"trailers">> -> te_before_weight(R, Trail, Acc, T); +te(<< C, R/bits >>, _, Acc, <<"trailers">>) when ?IS_WS(C) -> te_list_sep(R, trailers, Acc); +te(<< C, R/bits >>, Trail, Acc, T) when ?IS_TOKEN(C) -> + ?LOWER(te, R, Trail, Acc, T); +te(R, Trail, Acc, T) -> te_param_sep(R, Trail, Acc, T). + +te_param_sep(<<>>, Trail, Acc, T) -> {Trail, lists:reverse([{T, 1000}|Acc])}; +te_param_sep(<< $,, R/bits >>, Trail, Acc, T) -> te_list(R, Trail, [{T, 1000}|Acc]); +te_param_sep(<< C, R/bits >>, Trail, Acc, T) when ?IS_WS(C) -> te_param_sep(R, Trail, Acc, T). + +te_before_weight(<< C, R/bits >>, Trail, Acc, T) when ?IS_WS(C) -> te_before_weight(R, Trail, Acc, T); +te_before_weight(<< $q, $=, R/bits >>, Trail, Acc, T) -> te_weight(R, Trail, Acc, T). + +te_weight(<< "1.000", R/bits >>, Trail, Acc, T) -> te_list_sep(R, Trail, [{T, 1000}|Acc]); +te_weight(<< "1.00", R/bits >>, Trail, Acc, T) -> te_list_sep(R, Trail, [{T, 1000}|Acc]); +te_weight(<< "1.0", R/bits >>, Trail, Acc, T) -> te_list_sep(R, Trail, [{T, 1000}|Acc]); +te_weight(<< "1.", R/bits >>, Trail, Acc, T) -> te_list_sep(R, Trail, [{T, 1000}|Acc]); +te_weight(<< "1", R/bits >>, Trail, Acc, T) -> te_list_sep(R, Trail, [{T, 1000}|Acc]); +te_weight(<< "0.", A, B, C, R/bits >>, Trail, Acc, T) when ?IS_DIGIT(A), ?IS_DIGIT(B), ?IS_DIGIT(C) -> + te_list_sep(R, Trail, [{T, (A - $0) * 100 + (B - $0) * 10 + (C - $0)}|Acc]); +te_weight(<< "0.", A, B, R/bits >>, Trail, Acc, T) when ?IS_DIGIT(A), ?IS_DIGIT(B) -> + te_list_sep(R, Trail, [{T, (A - $0) * 100 + (B - $0) * 10}|Acc]); +te_weight(<< "0.", A, R/bits >>, Trail, Acc, T) when ?IS_DIGIT(A) -> + te_list_sep(R, Trail, [{T, (A - $0) * 100}|Acc]); +te_weight(<< "0.", R/bits >>, Trail, Acc, T) -> te_list_sep(R, Trail, [{T, 0}|Acc]); +te_weight(<< "0", R/bits >>, Trail, Acc, T) -> te_list_sep(R, Trail, [{T, 0}|Acc]). + +te_list_sep(<<>>, Trail, Acc) -> {Trail, lists:reverse(Acc)}; +te_list_sep(<< C, R/bits >>, Trail, Acc) when ?IS_WS(C) -> te_list_sep(R, Trail, Acc); +te_list_sep(<< $,, R/bits >>, Trail, Acc) -> te_list(R, Trail, Acc). + +-ifdef(TEST). +te() -> + ?LET({Trail, L}, + {elements([trailers, no_trailers]), + small_non_empty_list({?SUCHTHAT(T, token(), T =/= <<"trailers">>), weight()})}, + {Trail, L, begin + L2 = case Trail of + no_trailers -> L; + trailers -> + Rand = rand:uniform(length(L) + 1) - 1, + {Before, After} = lists:split(Rand, L), + Before ++ [{<<"trailers">>, undefined}|After] + end, + << _, TE/binary >> = iolist_to_binary([case W of + undefined -> [$,, T]; + _ -> [$,, T, <<";q=">>, qvalue_to_iodata(W)] + end || {T, W} <- L2]), + TE + end} + ). + +prop_parse_te() -> + ?FORALL({Trail, L, TE}, + te(), + begin + {ResTrail, ResL} = parse_te(TE), + CheckedL = [begin + ResT =:= ?LOWER(T) + andalso (ResW =:= W orelse (W =:= undefined andalso ResW =:= 1000)) + end || {{T, W}, {ResT, ResW}} <- lists:zip(L, ResL)], + ResTrail =:= Trail andalso [true] =:= lists:usort(CheckedL) + end). + +parse_te_test_() -> + Tests = [ + {<<"deflate">>, {no_trailers, [{<<"deflate">>, 1000}]}}, + {<<>>, {no_trailers, []}}, + {<<"trailers, deflate;q=0.5">>, {trailers, [{<<"deflate">>, 500}]}} + ], + [{V, fun() -> R = parse_te(V) end} || {V, R} <- Tests]. + +horse_parse_te() -> + horse:repeat(200000, + parse_te(<<"trailers, deflate;q=0.5">>) + ). +-endif. + +%% Trailer header. + +-spec parse_trailer(binary()) -> [binary()]. +parse_trailer(Trailer) -> + nonempty(token_ci_list(Trailer, [])). + +-ifdef(TEST). +parse_trailer_test_() -> + Tests = [ + {<<"Date, Content-MD5">>, [<<"date">>, <<"content-md5">>]} + ], + [{V, fun() -> R = parse_trailer(V) end} || {V, R} <- Tests]. + +parse_trailer_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_trailer(V)) end} || V <- Tests]. + +horse_parse_trailer() -> + horse:repeat(200000, + parse_trailer(<<"Date, Content-MD5">>) + ). +-endif. + +%% Transfer-Encoding header. +%% +%% This function does not support parsing of transfer-parameter. + +-spec parse_transfer_encoding(binary()) -> [binary()]. +parse_transfer_encoding(<<"chunked">>) -> + [<<"chunked">>]; +parse_transfer_encoding(TransferEncoding) -> + nonempty(token_ci_list(TransferEncoding, [])). + +-ifdef(TEST). +prop_parse_transfer_encoding() -> + ?FORALL(L, + non_empty(list(token())), + begin + << _, TransferEncoding/binary >> = iolist_to_binary([[$,, C] || C <- L]), + ResL = parse_transfer_encoding(TransferEncoding), + CheckedL = [?LOWER(Co) =:= ResC || {Co, ResC} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_transfer_encoding_test_() -> + Tests = [ + {<<"a , , , ">>, [<<"a">>]}, + {<<" , , , a">>, [<<"a">>]}, + {<<"a , , b">>, [<<"a">>, <<"b">>]}, + {<<"chunked">>, [<<"chunked">>]}, + {<<"chunked, something">>, [<<"chunked">>, <<"something">>]}, + {<<"gzip, chunked">>, [<<"gzip">>, <<"chunked">>]} + ], + [{V, fun() -> R = parse_transfer_encoding(V) end} || {V, R} <- Tests]. + +parse_transfer_encoding_error_test_() -> + Tests = [ + <<>>, + <<" ">>, + <<" , ">>, + <<",,,">>, + <<"a b">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_transfer_encoding(V)) end} + || V <- Tests]. + +horse_parse_transfer_encoding_chunked() -> + horse:repeat(200000, + parse_transfer_encoding(<<"chunked">>) + ). + +horse_parse_transfer_encoding_custom() -> + horse:repeat(200000, + parse_transfer_encoding(<<"chunked, something">>) + ). +-endif. + +%% Upgrade header. +%% +%% It is unclear from the RFC whether the values here are +%% case sensitive. +%% +%% We handle them in a case insensitive manner because they +%% are described as case insensitive in the Websocket RFC. + +-spec parse_upgrade(binary()) -> [binary()]. +parse_upgrade(Upgrade) -> + nonempty(protocol_list(Upgrade, [])). + +protocol_list(<<>>, Acc) -> lists:reverse(Acc); +protocol_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> protocol_list(R, Acc); +protocol_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> + ?LOWER(protocol_name, R, Acc, <<>>). + +protocol_name(<< $/, C, R/bits >>, Acc, P) -> + ?LOWER(protocol_version, R, Acc, << P/binary, $/ >>); +protocol_name(<< C, R/bits >>, Acc, P) when ?IS_TOKEN(C) -> + ?LOWER(protocol_name, R, Acc, P); +protocol_name(R, Acc, P) -> protocol_list_sep(R, [P|Acc]). + +protocol_version(<< C, R/bits >>, Acc, P) when ?IS_TOKEN(C) -> + ?LOWER(protocol_version, R, Acc, P); +protocol_version(R, Acc, P) -> protocol_list_sep(R, [P|Acc]). + +protocol_list_sep(<<>>, Acc) -> lists:reverse(Acc); +protocol_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> protocol_list_sep(R, Acc); +protocol_list_sep(<< $,, R/bits >>, Acc) -> protocol_list(R, Acc). + +-ifdef(TEST). +protocols() -> + ?LET(P, + oneof([token(), [token(), $/, token()]]), + iolist_to_binary(P)). + +prop_parse_upgrade() -> + ?FORALL(L, + non_empty(list(protocols())), + begin + << _, Upgrade/binary >> = iolist_to_binary([[$,, P] || P <- L]), + ResL = parse_upgrade(Upgrade), + CheckedL = [?LOWER(P) =:= ResP || {P, ResP} <- lists:zip(L, ResL)], + [true] =:= lists:usort(CheckedL) + end). + +parse_upgrade_test_() -> + Tests = [ + {<<"HTTP/2.0, SHTTP/1.3, IRC/6.9, RTA/x11">>, + [<<"http/2.0">>, <<"shttp/1.3">>, <<"irc/6.9">>, <<"rta/x11">>]}, + {<<"HTTP/2.0">>, [<<"http/2.0">>]} + ], + [{V, fun() -> R = parse_upgrade(V) end} || {V, R} <- Tests]. + +parse_upgrade_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_upgrade(V)) end} + || V <- Tests]. +-endif. + +%% Variant-Key-06 (draft) header. +%% +%% The Variants header must be parsed first in order to know +%% the NumMembers argument as it is the number of members in +%% the Variants dictionary. + +-spec parse_variant_key(binary(), pos_integer()) -> [[binary()]]. +parse_variant_key(VariantKey, NumMembers) -> + List = cow_http_struct_hd:parse_list(VariantKey), + [case Inner of + {with_params, InnerList, #{}} -> + NumMembers = length(InnerList), + [case Item of + {with_params, {token, Value}, #{}} -> Value; + {with_params, {string, Value}, #{}} -> Value + end || Item <- InnerList] + end || Inner <- List]. + +-ifdef(TEST). +parse_variant_key_test_() -> + Tests = [ + {<<"(en)">>, 1, [[<<"en">>]]}, + {<<"(gzip fr)">>, 2, [[<<"gzip">>, <<"fr">>]]}, + {<<"(gzip fr), (\"identity\" fr)">>, 2, [[<<"gzip">>, <<"fr">>], [<<"identity">>, <<"fr">>]]}, + {<<"(\"gzip \" fr)">>, 2, [[<<"gzip ">>, <<"fr">>]]}, + {<<"(en br)">>, 2, [[<<"en">>, <<"br">>]]}, + {<<"(\"0\")">>, 1, [[<<"0">>]]}, + {<<"(silver), (\"bronze\")">>, 1, [[<<"silver">>], [<<"bronze">>]]}, + {<<"(some_person)">>, 1, [[<<"some_person">>]]}, + {<<"(gold europe)">>, 2, [[<<"gold">>, <<"europe">>]]} + ], + [{V, fun() -> R = parse_variant_key(V, N) end} || {V, N, R} <- Tests]. + +parse_variant_key_error_test_() -> + Tests = [ + {<<"(gzip fr), (identity fr), (br fr oops)">>, 2} + ], + [{V, fun() -> {'EXIT', _} = (catch parse_variant_key(V, N)) end} || {V, N} <- Tests]. +-endif. + +-spec variant_key([[binary()]]) -> iolist(). +%% We assume that the lists are of correct length. +variant_key(VariantKeys) -> + cow_http_struct_hd:list([ + {with_params, [ + {with_params, {string, Value}, #{}} + || Value <- InnerList], #{}} + || InnerList <- VariantKeys]). + +-ifdef(TEST). +variant_key_identity_test_() -> + Tests = [ + {1, [[<<"en">>]]}, + {2, [[<<"gzip">>, <<"fr">>]]}, + {2, [[<<"gzip">>, <<"fr">>], [<<"identity">>, <<"fr">>]]}, + {2, [[<<"gzip ">>, <<"fr">>]]}, + {2, [[<<"en">>, <<"br">>]]}, + {1, [[<<"0">>]]}, + {1, [[<<"silver">>], [<<"bronze">>]]}, + {1, [[<<"some_person">>]]}, + {2, [[<<"gold">>, <<"europe">>]]} + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> V = parse_variant_key(iolist_to_binary(variant_key(V)), N) end} || {N, V} <- Tests]. +-endif. + +%% Variants-06 (draft) header. + +-spec parse_variants(binary()) -> [{binary(), [binary()]}]. +parse_variants(Variants) -> + {Dict0, Order} = cow_http_struct_hd:parse_dictionary(Variants), + Dict = maps:map(fun(_, {with_params, List, #{}}) -> + [case Item of + {with_params, {token, Value}, #{}} -> Value; + {with_params, {string, Value}, #{}} -> Value + end || Item <- List] + end, Dict0), + [{Key, maps:get(Key, Dict)} || Key <- Order]. + +-ifdef(TEST). +parse_variants_test_() -> + Tests = [ + {<<"accept-language=(de en jp)">>, [{<<"accept-language">>, [<<"de">>, <<"en">>, <<"jp">>]}]}, + {<<"accept-encoding=(gzip)">>, [{<<"accept-encoding">>, [<<"gzip">>]}]}, + {<<"accept-encoding=()">>, [{<<"accept-encoding">>, []}]}, + {<<"accept-encoding=(gzip br), accept-language=(en fr)">>, [ + {<<"accept-encoding">>, [<<"gzip">>, <<"br">>]}, + {<<"accept-language">>, [<<"en">>, <<"fr">>]} + ]}, + {<<"accept-language=(en fr de), accept-encoding=(gzip br)">>, [ + {<<"accept-language">>, [<<"en">>, <<"fr">>, <<"de">>]}, + {<<"accept-encoding">>, [<<"gzip">>, <<"br">>]} + ]} + ], + [{V, fun() -> R = parse_variants(V) end} || {V, R} <- Tests]. +-endif. + +-spec variants([{binary(), [binary()]}]) -> iolist(). +variants(Variants) -> + cow_http_struct_hd:dictionary([ + {Key, {with_params, [ + {with_params, {string, Value}, #{}} + || Value <- List], #{}}} + || {Key, List} <- Variants]). + +-ifdef(TEST). +variants_identity_test_() -> + Tests = [ + [{<<"accept-language">>, [<<"de">>, <<"en">>, <<"jp">>]}], + [{<<"accept-encoding">>, [<<"gzip">>]}], + [{<<"accept-encoding">>, []}], + [ + {<<"accept-encoding">>, [<<"gzip">>, <<"br">>]}, + {<<"accept-language">>, [<<"en">>, <<"fr">>]} + ], + [ + {<<"accept-language">>, [<<"en">>, <<"fr">>, <<"de">>]}, + {<<"accept-encoding">>, [<<"gzip">>, <<"br">>]} + ] + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> V = parse_variants(iolist_to_binary(variants(V))) end} || V <- Tests]. +-endif. + +%% Vary header. + +-spec parse_vary(binary()) -> '*' | [binary()]. +parse_vary(<<"*">>) -> + '*'; +parse_vary(Vary) -> + nonempty(token_ci_list(Vary, [])). + +-ifdef(TEST). +parse_vary_test_() -> + Tests = [ + {<<"*">>, '*'}, + {<<"Accept-Encoding">>, [<<"accept-encoding">>]}, + {<<"accept-encoding, accept-language">>, [<<"accept-encoding">>, <<"accept-language">>]} + ], + [{V, fun() -> R = parse_vary(V) end} || {V, R} <- Tests]. + +parse_vary_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_vary(V)) end} || V <- Tests]. +-endif. + +%% WWW-Authenticate header. +%% +%% Unknown schemes are represented as the lowercase binary +%% instead of an atom. Unlike with parse_authorization/1, +%% we do not crash on unknown schemes. +%% +%% When parsing auth-params, we do not accept BWS characters around the "=". + +-spec parse_www_authenticate(binary()) -> [{basic, binary()} + | {bearer | digest | binary(), [{binary(), binary()}]}]. +parse_www_authenticate(Authenticate) -> + nonempty(www_auth_list(Authenticate, [])). + +www_auth_list(<<>>, Acc) -> lists:reverse(Acc); +www_auth_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> www_auth_list(R, Acc); +www_auth_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> + ?LOWER(www_auth_scheme, R, Acc, <<>>). + +www_auth_basic_before_realm(<< C, R/bits >>, Acc) when ?IS_WS(C) -> www_auth_basic_before_realm(R, Acc); +www_auth_basic_before_realm(<< "realm=\"", R/bits >>, Acc) -> www_auth_basic(R, Acc, <<>>). + +www_auth_basic(<< $", R/bits >>, Acc, Realm) -> www_auth_list_sep(R, [{basic, Realm}|Acc]); +www_auth_basic(<< $\\, C, R/bits >>, Acc, Realm) when ?IS_VCHAR_OBS(C) -> www_auth_basic(R, Acc, << Realm/binary, C >>); +www_auth_basic(<< C, R/bits >>, Acc, Realm) when ?IS_VCHAR_OBS(C) -> www_auth_basic(R, Acc, << Realm/binary, C >>). + +www_auth_scheme(<< C, R/bits >>, Acc, Scheme) when ?IS_WS(C) -> + case Scheme of + <<"basic">> -> www_auth_basic_before_realm(R, Acc); + <<"bearer">> -> www_auth_params_list(R, Acc, bearer, []); + <<"digest">> -> www_auth_params_list(R, Acc, digest, []); + _ -> www_auth_params_list(R, Acc, Scheme, []) + end; +www_auth_scheme(<< C, R/bits >>, Acc, Scheme) when ?IS_TOKEN(C) -> + ?LOWER(www_auth_scheme, R, Acc, Scheme). + +www_auth_list_sep(<<>>, Acc) -> lists:reverse(Acc); +www_auth_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> www_auth_list_sep(R, Acc); +www_auth_list_sep(<< $,, R/bits >>, Acc) -> www_auth_list(R, Acc). + +www_auth_params_list(<<>>, Acc, Scheme, Params) -> + lists:reverse([{Scheme, lists:reverse(nonempty(Params))}|Acc]); +www_auth_params_list(<< C, R/bits >>, Acc, Scheme, Params) when ?IS_WS_COMMA(C) -> + www_auth_params_list(R, Acc, Scheme, Params); +www_auth_params_list(<< "algorithm=", C, R/bits >>, Acc, Scheme, Params) when ?IS_TOKEN(C) -> + www_auth_token(R, Acc, Scheme, Params, <<"algorithm">>, << C >>); +www_auth_params_list(<< "domain=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"domain">>, <<>>); +www_auth_params_list(<< "error=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"error">>, <<>>); +www_auth_params_list(<< "error_description=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"error_description">>, <<>>); +www_auth_params_list(<< "error_uri=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"error_uri">>, <<>>); +www_auth_params_list(<< "nonce=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"nonce">>, <<>>); +www_auth_params_list(<< "opaque=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"opaque">>, <<>>); +www_auth_params_list(<< "qop=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"qop">>, <<>>); +www_auth_params_list(<< "realm=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"realm">>, <<>>); +www_auth_params_list(<< "scope=\"", R/bits >>, Acc, Scheme, Params) -> + www_auth_quoted(R, Acc, Scheme, Params, <<"scope">>, <<>>); +www_auth_params_list(<< "stale=false", R/bits >>, Acc, Scheme, Params) -> + www_auth_params_list_sep(R, Acc, Scheme, [{<<"stale">>, <<"false">>}|Params]); +www_auth_params_list(<< "stale=true", R/bits >>, Acc, Scheme, Params) -> + www_auth_params_list_sep(R, Acc, Scheme, [{<<"stale">>, <<"true">>}|Params]); +www_auth_params_list(<< C, R/bits >>, Acc, Scheme, Params) when ?IS_TOKEN(C) -> + ?LOWER(www_auth_param, R, Acc, Scheme, Params, <<>>). + +www_auth_param(<< $=, $", R/bits >>, Acc, Scheme, Params, K) -> + www_auth_quoted(R, Acc, Scheme, Params, K, <<>>); +www_auth_param(<< $=, C, R/bits >>, Acc, Scheme, Params, K) when ?IS_TOKEN(C) -> + www_auth_token(R, Acc, Scheme, Params, K, << C >>); +www_auth_param(<< C, R/bits >>, Acc, Scheme, Params, K) when ?IS_TOKEN(C) -> + ?LOWER(www_auth_param, R, Acc, Scheme, Params, K); +www_auth_param(R, Acc, Scheme, Params, NewScheme) -> + www_auth_scheme(R, [{Scheme, lists:reverse(Params)}|Acc], NewScheme). + +www_auth_token(<< C, R/bits >>, Acc, Scheme, Params, K, V) when ?IS_TOKEN(C) -> + www_auth_token(R, Acc, Scheme, Params, K, << V/binary, C >>); +www_auth_token(R, Acc, Scheme, Params, K, V) -> + www_auth_params_list_sep(R, Acc, Scheme, [{K, V}|Params]). + +www_auth_quoted(<< $", R/bits >>, Acc, Scheme, Params, K, V) -> + www_auth_params_list_sep(R, Acc, Scheme, [{K, V}|Params]); +www_auth_quoted(<< $\\, C, R/bits >>, Acc, Scheme, Params, K, V) when ?IS_VCHAR_OBS(C) -> + www_auth_quoted(R, Acc, Scheme, Params, K, << V/binary, C >>); +www_auth_quoted(<< C, R/bits >>, Acc, Scheme, Params, K, V) when ?IS_VCHAR_OBS(C) -> + www_auth_quoted(R, Acc, Scheme, Params, K, << V/binary, C >>). + +www_auth_params_list_sep(<<>>, Acc, Scheme, Params) -> + lists:reverse([{Scheme, lists:reverse(Params)}|Acc]); +www_auth_params_list_sep(<< C, R/bits >>, Acc, Scheme, Params) when ?IS_WS(C) -> + www_auth_params_list_sep(R, Acc, Scheme, Params); +www_auth_params_list_sep(<< $,, R/bits >>, Acc, Scheme, Params) -> + www_auth_params_list_after_sep(R, Acc, Scheme, Params). + +www_auth_params_list_after_sep(<<>>, Acc, Scheme, Params) -> + lists:reverse([{Scheme, lists:reverse(Params)}|Acc]); +www_auth_params_list_after_sep(<< C, R/bits >>, Acc, Scheme, Params) when ?IS_WS_COMMA(C) -> + www_auth_params_list_after_sep(R, Acc, Scheme, Params); +www_auth_params_list_after_sep(R, Acc, Scheme, Params) -> + www_auth_params_list(R, Acc, Scheme, Params). + +-ifdef(TEST). +parse_www_authenticate_test_() -> + Tests = [ + {<<"Newauth realm=\"apps\", type=1, title=\"Login to \\\"apps\\\"\", Basic realm=\"simple\"">>, + [{<<"newauth">>, [ + {<<"realm">>, <<"apps">>}, + {<<"type">>, <<"1">>}, + {<<"title">>, <<"Login to \"apps\"">>}]}, + {basic, <<"simple">>}]}, + %% Same test, different order. + {<<"Basic realm=\"simple\", Newauth realm=\"apps\", type=1, title=\"Login to \\\"apps\\\"\"">>, + [{basic, <<"simple">>}, + {<<"newauth">>, [ + {<<"realm">>, <<"apps">>}, + {<<"type">>, <<"1">>}, + {<<"title">>, <<"Login to \"apps\"">>}]}]}, + {<<"Bearer realm=\"example\"">>, + [{bearer, [{<<"realm">>, <<"example">>}]}]}, + {<<"Bearer realm=\"example\", error=\"invalid_token\", error_description=\"The access token expired\"">>, + [{bearer, [ + {<<"realm">>, <<"example">>}, + {<<"error">>, <<"invalid_token">>}, + {<<"error_description">>, <<"The access token expired">>} + ]}]}, + {<<"Basic realm=\"WallyWorld\"">>, + [{basic, <<"WallyWorld">>}]}, + {<<"Digest realm=\"testrealm@host.com\", qop=\"auth,auth-int\", " + "nonce=\"dcd98b7102dd2f0e8b11d0f600bfb0c093\", " + "opaque=\"5ccc069c403ebaf9f0171e9517f40e41\"">>, + [{digest, [ + {<<"realm">>, <<"testrealm@host.com">>}, + {<<"qop">>, <<"auth,auth-int">>}, + {<<"nonce">>, <<"dcd98b7102dd2f0e8b11d0f600bfb0c093">>}, + {<<"opaque">>, <<"5ccc069c403ebaf9f0171e9517f40e41">>} + ]}]} + ], + [{V, fun() -> R = parse_www_authenticate(V) end} || {V, R} <- Tests]. + +parse_www_authenticate_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_www_authenticate(V)) end} || V <- Tests]. + +horse_parse_www_authenticate() -> + horse:repeat(200000, + parse_www_authenticate(<<"Newauth realm=\"apps\", type=1, title=\"Login to \\\"apps\\\"\", Basic realm=\"simple\"">>) + ). +-endif. + +%% X-Forwarded-For header. +%% +%% This header has no specification but *looks like* it is +%% a list of tokens. +%% +%% This header is deprecated in favor of the Forwarded header. + +-spec parse_x_forwarded_for(binary()) -> [binary()]. +parse_x_forwarded_for(XForwardedFor) -> + nonempty(nodeid_list(XForwardedFor, [])). + +-define(IS_NODEID_TOKEN(C), + ?IS_ALPHA(C) or ?IS_DIGIT(C) + or (C =:= $:) or (C =:= $.) or (C =:= $_) + or (C =:= $-) or (C =:= $[) or (C =:= $])). + +nodeid_list(<<>>, Acc) -> lists:reverse(Acc); +nodeid_list(<>, Acc) when ?IS_WS_COMMA(C) -> nodeid_list(R, Acc); +nodeid_list(<>, Acc) when ?IS_NODEID_TOKEN(C) -> nodeid(R, Acc, <>). + +nodeid(<>, Acc, T) when ?IS_NODEID_TOKEN(C) -> nodeid(R, Acc, <>); +nodeid(R, Acc, T) -> nodeid_list_sep(R, [T|Acc]). + +nodeid_list_sep(<<>>, Acc) -> lists:reverse(Acc); +nodeid_list_sep(<>, Acc) when ?IS_WS(C) -> nodeid_list_sep(R, Acc); +nodeid_list_sep(<<$,, R/bits>>, Acc) -> nodeid_list(R, Acc). + +-ifdef(TEST). +parse_x_forwarded_for_test_() -> + Tests = [ + {<<"client, proxy1, proxy2">>, + [<<"client">>, <<"proxy1">>, <<"proxy2">>]}, + {<<"128.138.243.150, unknown, 192.52.106.30">>, + [<<"128.138.243.150">>, <<"unknown">>, <<"192.52.106.30">>]}, + %% Examples from Mozilla DN. + {<<"2001:db8:85a3:8d3:1319:8a2e:370:7348">>, + [<<"2001:db8:85a3:8d3:1319:8a2e:370:7348">>]}, + {<<"203.0.113.195">>, + [<<"203.0.113.195">>]}, + {<<"203.0.113.195, 70.41.3.18, 150.172.238.178">>, + [<<"203.0.113.195">>, <<"70.41.3.18">>, <<"150.172.238.178">>]}, + %% Examples from RFC7239 modified for x-forwarded-for. + {<<"[2001:db8:cafe::17]:4711">>, + [<<"[2001:db8:cafe::17]:4711">>]}, + {<<"192.0.2.43, 198.51.100.17">>, + [<<"192.0.2.43">>, <<"198.51.100.17">>]}, + {<<"_hidden">>, + [<<"_hidden">>]}, + {<<"192.0.2.43,[2001:db8:cafe::17],unknown">>, + [<<"192.0.2.43">>, <<"[2001:db8:cafe::17]">>, <<"unknown">>]}, + {<<"192.0.2.43, [2001:db8:cafe::17], unknown">>, + [<<"192.0.2.43">>, <<"[2001:db8:cafe::17]">>, <<"unknown">>]}, + {<<"192.0.2.43, 2001:db8:cafe::17">>, + [<<"192.0.2.43">>, <<"2001:db8:cafe::17">>]}, + {<<"192.0.2.43, [2001:db8:cafe::17]">>, + [<<"192.0.2.43">>, <<"[2001:db8:cafe::17]">>]} + ], + [{V, fun() -> R = parse_x_forwarded_for(V) end} || {V, R} <- Tests]. + +parse_x_forwarded_for_error_test_() -> + Tests = [ + <<>> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_x_forwarded_for(V)) end} || V <- Tests]. +-endif. + +%% Internal. + +%% Only return if the list is not empty. +nonempty(L) when L =/= [] -> L. + +%% Parse a list of case sensitive tokens. +token_list(<<>>, Acc) -> lists:reverse(Acc); +token_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> token_list(R, Acc); +token_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> token(R, Acc, << C >>). + +token(<< C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> token(R, Acc, << T/binary, C >>); +token(R, Acc, T) -> token_list_sep(R, [T|Acc]). + +token_list_sep(<<>>, Acc) -> lists:reverse(Acc); +token_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> token_list_sep(R, Acc); +token_list_sep(<< $,, R/bits >>, Acc) -> token_list(R, Acc). + +%% Parse a list of case insensitive tokens. +token_ci_list(<<>>, Acc) -> lists:reverse(Acc); +token_ci_list(<< C, R/bits >>, Acc) when ?IS_WS_COMMA(C) -> token_ci_list(R, Acc); +token_ci_list(<< C, R/bits >>, Acc) when ?IS_TOKEN(C) -> ?LOWER(token_ci, R, Acc, <<>>). + +token_ci(<< C, R/bits >>, Acc, T) when ?IS_TOKEN(C) -> ?LOWER(token_ci, R, Acc, T); +token_ci(R, Acc, T) -> token_ci_list_sep(R, [T|Acc]). + +token_ci_list_sep(<<>>, Acc) -> lists:reverse(Acc); +token_ci_list_sep(<< C, R/bits >>, Acc) when ?IS_WS(C) -> token_ci_list_sep(R, Acc); +token_ci_list_sep(<< $,, R/bits >>, Acc) -> token_ci_list(R, Acc). + +join_token_list([]) -> []; +join_token_list([H|T]) -> join_token_list(T, [H]). + +join_token_list([], Acc) -> lists:reverse(Acc); +join_token_list([H|T], Acc) -> join_token_list(T, [H,<<", ">>|Acc]). diff --git a/src/wsLib/cow_http_struct_hd.erl b/src/wsLib/cow_http_struct_hd.erl new file mode 100644 index 0000000..373c8da --- /dev/null +++ b/src/wsLib/cow_http_struct_hd.erl @@ -0,0 +1,420 @@ +%% Copyright (c) 2019, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% The mapping between Erlang and structured headers types is as follow: +%% +%% List: list() +%% Dictionary: map() +%% Bare item: one bare_item() that can be of type: +%% Integer: integer() +%% Float: float() +%% String: {string, binary()} +%% Token: {token, binary()} +%% Byte sequence: {binary, binary()} +%% Boolean: boolean() +%% And finally: +%% Type with Parameters: {with_params, Type, Parameters} +%% Parameters: [{binary(), bare_item()}] + +-module(cow_http_struct_hd). + +-export([parse_dictionary/1]). +-export([parse_item/1]). +-export([parse_list/1]). +-export([dictionary/1]). +-export([item/1]). +-export([list/1]). + +-include("cow_parse.hrl"). + +-type sh_list() :: [sh_item() | sh_inner_list()]. +-type sh_inner_list() :: sh_with_params([sh_item()]). +-type sh_params() :: #{binary() => sh_bare_item() | undefined}. +-type sh_dictionary() :: {#{binary() => sh_item() | sh_inner_list()}, [binary()]}. +-type sh_item() :: sh_with_params(sh_bare_item()). +-type sh_bare_item() :: integer() | float() | boolean() + | {string | token | binary, binary()}. +-type sh_with_params(Type) :: {with_params, Type, sh_params()}. + +-define(IS_LC_ALPHA(C), + (C =:= $a) or (C =:= $b) or (C =:= $c) or (C =:= $d) or (C =:= $e) or + (C =:= $f) or (C =:= $g) or (C =:= $h) or (C =:= $i) or (C =:= $j) or + (C =:= $k) or (C =:= $l) or (C =:= $m) or (C =:= $n) or (C =:= $o) or + (C =:= $p) or (C =:= $q) or (C =:= $r) or (C =:= $s) or (C =:= $t) or + (C =:= $u) or (C =:= $v) or (C =:= $w) or (C =:= $x) or (C =:= $y) or + (C =:= $z) +). + +%% Parsing. + +-spec parse_dictionary(binary()) -> sh_dictionary(). +parse_dictionary(<<>>) -> + {#{}, []}; +parse_dictionary(<>) when ?IS_LC_ALPHA(C) -> + {Dict, Order, <<>>} = parse_dict_key(R, #{}, [], <>), + {Dict, Order}. + +parse_dict_key(<<$=,$(,R0/bits>>, Acc, Order, K) -> + false = maps:is_key(K, Acc), + {Item, R} = parse_inner_list(R0, []), + parse_dict_before_sep(R, Acc#{K => Item}, [K|Order]); +parse_dict_key(<<$=,R0/bits>>, Acc, Order, K) -> + false = maps:is_key(K, Acc), + {Item, R} = parse_item1(R0), + parse_dict_before_sep(R, Acc#{K => Item}, [K|Order]); +parse_dict_key(<>, Acc, Order, K) + when ?IS_LC_ALPHA(C) or ?IS_DIGIT(C) + or (C =:= $_) or (C =:= $-) or (C =:= $*) -> + parse_dict_key(R, Acc, Order, <>). + +parse_dict_before_sep(<>, Acc, Order) when ?IS_WS(C) -> + parse_dict_before_sep(R, Acc, Order); +parse_dict_before_sep(<>, Acc, Order) when C =:= $, -> + parse_dict_before_member(R, Acc, Order); +parse_dict_before_sep(<<>>, Acc, Order) -> + {Acc, lists:reverse(Order), <<>>}. + +parse_dict_before_member(<>, Acc, Order) when ?IS_WS(C) -> + parse_dict_before_member(R, Acc, Order); +parse_dict_before_member(<>, Acc, Order) when ?IS_LC_ALPHA(C) -> + parse_dict_key(R, Acc, Order, <>). + +-spec parse_item(binary()) -> sh_item(). +parse_item(Bin) -> + {Item, <<>>} = parse_item1(Bin), + Item. + +parse_item1(Bin) -> + case parse_bare_item(Bin) of + {Item, <<$;,R/bits>>} -> + {Params, Rest} = parse_before_param(R, #{}), + {{with_params, Item, Params}, Rest}; + {Item, Rest} -> + {{with_params, Item, #{}}, Rest} + end. + +-spec parse_list(binary()) -> sh_list(). +parse_list(<<>>) -> + []; +parse_list(Bin) -> + parse_list_before_member(Bin, []). + +parse_list_member(<<$(,R0/bits>>, Acc) -> + {Item, R} = parse_inner_list(R0, []), + parse_list_before_sep(R, [Item|Acc]); +parse_list_member(R0, Acc) -> + {Item, R} = parse_item1(R0), + parse_list_before_sep(R, [Item|Acc]). + +parse_list_before_sep(<>, Acc) when ?IS_WS(C) -> + parse_list_before_sep(R, Acc); +parse_list_before_sep(<<$,,R/bits>>, Acc) -> + parse_list_before_member(R, Acc); +parse_list_before_sep(<<>>, Acc) -> + lists:reverse(Acc). + +parse_list_before_member(<>, Acc) when ?IS_WS(C) -> + parse_list_before_member(R, Acc); +parse_list_before_member(R, Acc) -> + parse_list_member(R, Acc). + +%% Internal. + +parse_inner_list(<>, Acc) when ?IS_WS(C) -> + parse_inner_list(R, Acc); +parse_inner_list(<<$),$;,R0/bits>>, Acc) -> + {Params, R} = parse_before_param(R0, #{}), + {{with_params, lists:reverse(Acc), Params}, R}; +parse_inner_list(<<$),R/bits>>, Acc) -> + {{with_params, lists:reverse(Acc), #{}}, R}; +parse_inner_list(R0, Acc) -> + {Item, R = <>} = parse_item1(R0), + true = (C =:= $\s) orelse (C =:= $)), + parse_inner_list(R, [Item|Acc]). + +parse_before_param(<>, Acc) when ?IS_WS(C) -> + parse_before_param(R, Acc); +parse_before_param(<>, Acc) when ?IS_LC_ALPHA(C) -> + parse_param(R, Acc, <>). + +parse_param(<<$;,R/bits>>, Acc, K) -> + parse_before_param(R, Acc#{K => undefined}); +parse_param(<<$=,R0/bits>>, Acc, K) -> + case parse_bare_item(R0) of + {Item, <<$;,R/bits>>} -> + false = maps:is_key(K, Acc), + parse_before_param(R, Acc#{K => Item}); + {Item, R} -> + false = maps:is_key(K, Acc), + {Acc#{K => Item}, R} + end; +parse_param(<>, Acc, K) + when ?IS_LC_ALPHA(C) or ?IS_DIGIT(C) + or (C =:= $_) or (C =:= $-) or (C =:= $*) -> + parse_param(R, Acc, <>); +parse_param(R, Acc, K) -> + false = maps:is_key(K, Acc), + {Acc#{K => undefined}, R}. + +%% Integer or float. +parse_bare_item(<<$-,R/bits>>) -> parse_number(R, 0, <<$->>); +parse_bare_item(<>) when ?IS_DIGIT(C) -> parse_number(R, 1, <>); +%% String. +parse_bare_item(<<$",R/bits>>) -> parse_string(R, <<>>); +%% Token. +parse_bare_item(<>) when ?IS_ALPHA(C) -> parse_token(R, <>); +%% Byte sequence. +parse_bare_item(<<$*,R/bits>>) -> parse_binary(R, <<>>); +%% Boolean. +parse_bare_item(<<"?0",R/bits>>) -> {false, R}; +parse_bare_item(<<"?1",R/bits>>) -> {true, R}. + +parse_number(<>, L, Acc) when ?IS_DIGIT(C) -> + parse_number(R, L+1, <>); +parse_number(<>, L, Acc) when C =:= $. -> + parse_float(R, L, 0, <>); +parse_number(R, L, Acc) when L =< 15 -> + {binary_to_integer(Acc), R}. + +parse_float(<>, L1, L2, Acc) when ?IS_DIGIT(C) -> + parse_float(R, L1, L2+1, <>); +parse_float(R, L1, L2, Acc) when + L1 =< 9, L2 =< 6; + L1 =< 10, L2 =< 5; + L1 =< 11, L2 =< 4; + L1 =< 12, L2 =< 3; + L1 =< 13, L2 =< 2; + L1 =< 14, L2 =< 1 -> + {binary_to_float(Acc), R}. + +parse_string(<<$\\,$",R/bits>>, Acc) -> + parse_string(R, <>); +parse_string(<<$\\,$\\,R/bits>>, Acc) -> + parse_string(R, <>); +parse_string(<<$",R/bits>>, Acc) -> + {{string, Acc}, R}; +parse_string(<>, Acc) when + C >= 16#20, C =< 16#21; + C >= 16#23, C =< 16#5b; + C >= 16#5d, C =< 16#7e -> + parse_string(R, <>). + +parse_token(<>, Acc) when ?IS_TOKEN(C) or (C =:= $:) or (C =:= $/) -> + parse_token(R, <>); +parse_token(R, Acc) -> + {{token, Acc}, R}. + +parse_binary(<<$*,R/bits>>, Acc) -> + {{binary, base64:decode(Acc)}, R}; +parse_binary(<>, Acc) when ?IS_ALPHANUM(C) or (C =:= $+) or (C =:= $/) or (C =:= $=) -> + parse_binary(R, <>). + +-ifdef(TEST). +parse_struct_hd_test_() -> + Files = filelib:wildcard("deps/structured-header-tests/*.json"), + lists:flatten([begin + {ok, JSON} = file:read_file(File), + Tests = jsx:decode(JSON, [return_maps]), + [ + {iolist_to_binary(io_lib:format("~s: ~s", [filename:basename(File), Name])), fun() -> + %% The implementation is strict. We fail whenever we can. + CanFail = maps:get(<<"can_fail">>, Test, false), + MustFail = maps:get(<<"must_fail">>, Test, false), + Expected = case MustFail of + true -> undefined; + false -> expected_to_term(maps:get(<<"expected">>, Test)) + end, + Raw = raw_to_binary(Raw0), + case HeaderType of + <<"dictionary">> when MustFail; CanFail -> + {'EXIT', _} = (catch parse_dictionary(Raw)); + %% The test "binary.json: non-zero pad bits" does not fail + %% due to our reliance on Erlang/OTP's base64 module. + <<"item">> when CanFail -> + case (catch parse_item(Raw)) of + {'EXIT', _} -> ok; + Expected -> ok + end; + <<"item">> when MustFail -> + {'EXIT', _} = (catch parse_item(Raw)); + <<"list">> when MustFail; CanFail -> + {'EXIT', _} = (catch parse_list(Raw)); + <<"dictionary">> -> + {Expected, _Order} = (catch parse_dictionary(Raw)); + <<"item">> -> + Expected = (catch parse_item(Raw)); + <<"list">> -> + Expected = (catch parse_list(Raw)) + end + end} + || Test=#{ + <<"name">> := Name, + <<"header_type">> := HeaderType, + <<"raw">> := Raw0 + } <- Tests] + end || File <- Files]). + +%% Item. +expected_to_term(E=[_, Params]) when is_map(Params) -> + e2t(E); +%% Outer list. +expected_to_term(Expected) when is_list(Expected) -> + [e2t(E) || E <- Expected]; +expected_to_term(Expected) -> + e2t(Expected). + +%% Dictionary. +e2t(Dict) when is_map(Dict) -> + maps:map(fun(_, V) -> e2t(V) end, Dict); +%% Inner list. +e2t([List, Params]) when is_list(List) -> + {with_params, [e2t(E) || E <- List], + maps:map(fun(_, P) -> e2tb(P) end, Params)}; +%% Item. +e2t([Bare, Params]) -> + {with_params, e2tb(Bare), + maps:map(fun(_, P) -> e2tb(P) end, Params)}. + +%% Bare item. +e2tb(#{<<"__type">> := <<"token">>, <<"value">> := V}) -> + {token, V}; +e2tb(#{<<"__type">> := <<"binary">>, <<"value">> := V}) -> + {binary, base32:decode(V)}; +e2tb(V) when is_binary(V) -> + {string, V}; +e2tb(null) -> + undefined; +e2tb(V) -> + V. + +%% The Cowlib parsers currently do not support resuming parsing +%% in the case of multiple headers. To make tests work we modify +%% the raw value the same way Cowboy does when encountering +%% multiple headers: by adding a comma and space in between. +%% +%% Similarly, the Cowlib parsers expect the leading and trailing +%% whitespace to be removed before calling the parser. +raw_to_binary(RawList) -> + trim_ws(iolist_to_binary(lists:join(<<", ">>, RawList))). + +trim_ws(<>) when ?IS_WS(C) -> trim_ws(R); +trim_ws(R) -> trim_ws_end(R, byte_size(R) - 1). + +trim_ws_end(_, -1) -> + <<>>; +trim_ws_end(Value, N) -> + case binary:at(Value, N) of + $\s -> trim_ws_end(Value, N - 1); + $\t -> trim_ws_end(Value, N - 1); + _ -> + S = N + 1, + << Value2:S/binary, _/bits >> = Value, + Value2 + end. +-endif. + +%% Building. + +-spec dictionary(#{binary() => sh_item() | sh_inner_list()} + | [{binary(), sh_item() | sh_inner_list()}]) + -> iolist(). +%% @todo Also accept this? dictionary({Map, Order}) -> +dictionary(Map) when is_map(Map) -> + dictionary(maps:to_list(Map)); +dictionary(KVList) when is_list(KVList) -> + lists:join(<<", ">>, [ + [Key, $=, item_or_inner_list(Value)] + || {Key, Value} <- KVList]). + +-spec item(sh_item()) -> iolist(). +item({with_params, BareItem, Params}) -> + [bare_item(BareItem), params(Params)]. + +-spec list(sh_list()) -> iolist(). +list(List) -> + lists:join(<<", ">>, [item_or_inner_list(Value) || Value <- List]). + +item_or_inner_list(Value={with_params, List, _}) when is_list(List) -> + inner_list(Value); +item_or_inner_list(Value) -> + item(Value). + +inner_list({with_params, List, Params}) -> + [$(, lists:join($\s, [item(Value) || Value <- List]), $), params(Params)]. + +bare_item({string, String}) -> + [$", escape_string(String, <<>>), $"]; +bare_item({token, Token}) -> + Token; +bare_item({binary, Binary}) -> + [$*, base64:encode(Binary), $*]; +bare_item(Integer) when is_integer(Integer) -> + integer_to_binary(Integer); +%% In order to properly reproduce the float as a string we +%% must first determine how many decimals we want in the +%% fractional component, otherwise rounding errors may occur. +bare_item(Float) when is_float(Float) -> + Decimals = case trunc(Float) of + I when I >= 10000000000000 -> 1; + I when I >= 1000000000000 -> 2; + I when I >= 100000000000 -> 3; + I when I >= 10000000000 -> 4; + I when I >= 1000000000 -> 5; + _ -> 6 + end, + float_to_binary(Float, [{decimals, Decimals}, compact]); +bare_item(true) -> + <<"?1">>; +bare_item(false) -> + <<"?0">>. + +escape_string(<<>>, Acc) -> Acc; +escape_string(<<$\\,R/bits>>, Acc) -> escape_string(R, <>); +escape_string(<<$",R/bits>>, Acc) -> escape_string(R, <>); +escape_string(<>, Acc) -> escape_string(R, <>). + +params(Params) -> + maps:fold(fun + (Key, undefined, Acc) -> + [[$;, Key]|Acc]; + (Key, Value, Acc) -> + [[$;, Key, $=, bare_item(Value)]|Acc] + end, [], Params). + +-ifdef(TEST). +struct_hd_identity_test_() -> + Files = filelib:wildcard("deps/structured-header-tests/*.json"), + lists:flatten([begin + {ok, JSON} = file:read_file(File), + Tests = jsx:decode(JSON, [return_maps]), + [ + {iolist_to_binary(io_lib:format("~s: ~s", [filename:basename(File), Name])), fun() -> + Expected = expected_to_term(Expected0), + case HeaderType of + <<"dictionary">> -> + {Expected, _Order} = parse_dictionary(iolist_to_binary(dictionary(Expected))); + <<"item">> -> + Expected = parse_item(iolist_to_binary(item(Expected))); + <<"list">> -> + Expected = parse_list(iolist_to_binary(list(Expected))) + end + end} + || #{ + <<"name">> := Name, + <<"header_type">> := HeaderType, + %% We only run tests that must not fail. + <<"expected">> := Expected0 + } <- Tests] + end || File <- Files]). +-endif. diff --git a/src/wsLib/cow_http_te.erl b/src/wsLib/cow_http_te.erl new file mode 100644 index 0000000..57d5167 --- /dev/null +++ b/src/wsLib/cow_http_te.erl @@ -0,0 +1,373 @@ +%% Copyright (c) 2014-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http_te). + +%% Identity. +-export([stream_identity/2]). +-export([identity/1]). + +%% Chunked. +-export([stream_chunked/2]). +-export([chunk/1]). +-export([last_chunk/0]). + +%% The state type is the same for both identity and chunked. +-type state() :: {non_neg_integer(), non_neg_integer()}. +-export_type([state/0]). + +-type decode_ret() :: more + | {more, Data::binary(), state()} + | {more, Data::binary(), RemLen::non_neg_integer(), state()} + | {more, Data::binary(), Rest::binary(), state()} + | {done, HasTrailers::trailers | no_trailers, Rest::binary()} + | {done, Data::binary(), HasTrailers::trailers | no_trailers, Rest::binary()}. +-export_type([decode_ret/0]). + +-include("cow_parse.hrl"). + +-ifdef(TEST). +dripfeed(<< C, Rest/bits >>, Acc, State, F) -> + case F(<< Acc/binary, C >>, State) of + more -> + dripfeed(Rest, << Acc/binary, C >>, State, F); + {more, _, State2} -> + dripfeed(Rest, <<>>, State2, F); + {more, _, Length, State2} when is_integer(Length) -> + dripfeed(Rest, <<>>, State2, F); + {more, _, Acc2, State2} -> + dripfeed(Rest, Acc2, State2, F); + {done, _, <<>>} -> + ok; + {done, _, _, <<>>} -> + ok + end. +-endif. + +%% Identity. + +%% @doc Decode an identity stream. + +-spec stream_identity(Data, State) + -> {more, Data, Len, State} | {done, Data, Len, Data} + when Data::binary(), State::state(), Len::non_neg_integer(). +stream_identity(Data, {Streamed, Total}) -> + Streamed2 = Streamed + byte_size(Data), + if + Streamed2 < Total -> + {more, Data, Total - Streamed2, {Streamed2, Total}}; + true -> + Size = Total - Streamed, + << Data2:Size/binary, Rest/bits >> = Data, + {done, Data2, Total, Rest} + end. + +-spec identity(Data) -> Data when Data::iodata(). +identity(Data) -> + Data. + +-ifdef(TEST). +stream_identity_test() -> + {done, <<>>, 0, <<>>} + = stream_identity(identity(<<>>), {0, 0}), + {done, <<"\r\n">>, 2, <<>>} + = stream_identity(identity(<<"\r\n">>), {0, 2}), + {done, << 0:80000 >>, 10000, <<>>} + = stream_identity(identity(<< 0:80000 >>), {0, 10000}), + ok. + +stream_identity_parts_test() -> + {more, << 0:8000 >>, 1999, S1} + = stream_identity(<< 0:8000 >>, {0, 2999}), + {more, << 0:8000 >>, 999, S2} + = stream_identity(<< 0:8000 >>, S1), + {done, << 0:7992 >>, 2999, <<>>} + = stream_identity(<< 0:7992 >>, S2), + ok. + +%% Using the same data as the chunked one for comparison. +horse_stream_identity() -> + horse:repeat(10000, + stream_identity(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, {0, 43}) + ). + +horse_stream_identity_dripfeed() -> + horse:repeat(10000, + dripfeed(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, <<>>, {0, 43}, fun stream_identity/2) + ). +-endif. + +%% Chunked. + +%% @doc Decode a chunked stream. + +-spec stream_chunked(Data, State) + -> more | {more, Data, State} | {more, Data, non_neg_integer(), State} + | {more, Data, Data, State} + | {done, HasTrailers, Data} | {done, Data, HasTrailers, Data} + when Data::binary(), State::state(), HasTrailers::trailers | no_trailers. +stream_chunked(Data, State) -> + stream_chunked(Data, State, <<>>). + +%% New chunk. +stream_chunked(Data = << C, _/bits >>, {0, Streamed}, Acc) when C =/= $\r -> + case chunked_len(Data, Streamed, Acc, 0) of + {next, Rest, State, Acc2} -> + stream_chunked(Rest, State, Acc2); + {more, State, Acc2} -> + {more, Acc2, Data, State}; + Ret -> + Ret + end; +%% Trailing \r\n before next chunk. +stream_chunked(<< "\r\n", Rest/bits >>, {2, Streamed}, Acc) -> + stream_chunked(Rest, {0, Streamed}, Acc); +%% Trailing \r before next chunk. +stream_chunked(<< "\r" >>, {2, Streamed}, Acc) -> + {more, Acc, {1, Streamed}}; +%% Trailing \n before next chunk. +stream_chunked(<< "\n", Rest/bits >>, {1, Streamed}, Acc) -> + stream_chunked(Rest, {0, Streamed}, Acc); +%% More data needed. +stream_chunked(<<>>, State = {Rem, _}, Acc) -> + {more, Acc, Rem, State}; +%% Chunk data. +stream_chunked(Data, {Rem, Streamed}, Acc) when Rem > 2 -> + DataSize = byte_size(Data), + RemSize = Rem - 2, + case Data of + << Chunk:RemSize/binary, "\r\n", Rest/bits >> -> + stream_chunked(Rest, {0, Streamed + RemSize}, << Acc/binary, Chunk/binary >>); + << Chunk:RemSize/binary, "\r" >> -> + {more, << Acc/binary, Chunk/binary >>, {1, Streamed + RemSize}}; + %% Everything in Data is part of the chunk. If we have more + %% data than the chunk accepts, then this is an error and we crash. + _ when DataSize =< RemSize -> + Rem2 = Rem - DataSize, + {more, << Acc/binary, Data/binary >>, Rem2, {Rem2, Streamed + DataSize}} + end. + +chunked_len(<< $0, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16); +chunked_len(<< $1, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 1); +chunked_len(<< $2, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 2); +chunked_len(<< $3, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 3); +chunked_len(<< $4, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 4); +chunked_len(<< $5, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 5); +chunked_len(<< $6, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 6); +chunked_len(<< $7, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 7); +chunked_len(<< $8, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 8); +chunked_len(<< $9, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 9); +chunked_len(<< $A, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10); +chunked_len(<< $B, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11); +chunked_len(<< $C, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12); +chunked_len(<< $D, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13); +chunked_len(<< $E, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14); +chunked_len(<< $F, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15); +chunked_len(<< $a, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10); +chunked_len(<< $b, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11); +chunked_len(<< $c, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12); +chunked_len(<< $d, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13); +chunked_len(<< $e, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14); +chunked_len(<< $f, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15); +%% Chunk extensions. +%% +%% Note that we currently skip the first character we encounter here, +%% and not in the skip_chunk_ext function. If we latter implement +%% chunk extensions (unlikely) we will need to change this clause too. +chunked_len(<< C, R/bits >>, S, A, Len) when ?IS_WS(C); C =:= $; -> skip_chunk_ext(R, S, A, Len, 0); +%% Final chunk. +%% +%% When trailers are following we simply return them as the Rest. +%% Then the user code can decide to call the stream_trailers function +%% to parse them. The user can therefore ignore trailers as necessary +%% if they do not wish to handle them. +chunked_len(<< "\r\n\r\n", R/bits >>, _, <<>>, 0) -> {done, no_trailers, R}; +chunked_len(<< "\r\n\r\n", R/bits >>, _, A, 0) -> {done, A, no_trailers, R}; +chunked_len(<< "\r\n", R/bits >>, _, <<>>, 0) when byte_size(R) > 2 -> {done, trailers, R}; +chunked_len(<< "\r\n", R/bits >>, _, A, 0) when byte_size(R) > 2 -> {done, A, trailers, R}; +chunked_len(_, _, _, 0) -> more; +%% Normal chunk. Add 2 to Len for the trailing \r\n. +chunked_len(<< "\r\n", R/bits >>, S, A, Len) -> {next, R, {Len + 2, S}, A}; +chunked_len(<<"\r">>, _, <<>>, _) -> more; +chunked_len(<<"\r">>, S, A, _) -> {more, {0, S}, A}; +chunked_len(<<>>, _, <<>>, _) -> more; +chunked_len(<<>>, S, A, _) -> {more, {0, S}, A}. + +skip_chunk_ext(R = << "\r", _/bits >>, S, A, Len, _) -> chunked_len(R, S, A, Len); +skip_chunk_ext(R = <<>>, S, A, Len, _) -> chunked_len(R, S, A, Len); +%% We skip up to 128 characters of chunk extensions. The value +%% is hardcoded: chunk extensions are very rarely seen in the +%% wild and Cowboy doesn't do anything with them anyway. +%% +%% Line breaks are not allowed in the middle of chunk extensions. +skip_chunk_ext(<< C, R/bits >>, S, A, Len, Skipped) when C =/= $\n, Skipped < 128 -> + skip_chunk_ext(R, S, A, Len, Skipped + 1). + +%% @doc Encode a chunk. + +-spec chunk(D) -> D when D::iodata(). +chunk(Data) -> + [integer_to_list(iolist_size(Data), 16), <<"\r\n">>, + Data, <<"\r\n">>]. + +%% @doc Encode the last chunk of a chunked stream. + +-spec last_chunk() -> << _:40 >>. +last_chunk() -> + <<"0\r\n\r\n">>. + +-ifdef(TEST). +stream_chunked_identity_test() -> + {done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>} + = stream_chunked(iolist_to_binary([ + chunk("Wiki"), + chunk("pedia"), + chunk(" in\r\n\r\nchunks."), + last_chunk() + ]), {0, 0}), + ok. + +stream_chunked_one_pass_test() -> + {done, no_trailers, <<>>} = stream_chunked(<<"0\r\n\r\n">>, {0, 0}), + {done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>} + = stream_chunked(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, {0, 0}), + %% Same but with extra spaces or chunk extensions. + {done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>} + = stream_chunked(<< + "4 \r\n" + "Wiki\r\n" + "5 ; ext = abc\r\n" + "pedia\r\n" + "e;ext=abc\r\n" + " in\r\n\r\nchunks.\r\n" + "0;ext\r\n" + "\r\n">>, {0, 0}), + %% Same but with trailers. + {done, <<"Wikipedia in\r\n\r\nchunks.">>, trailers, Rest} + = stream_chunked(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "x-foo-bar: bar foo\r\n" + "\r\n">>, {0, 0}), + {[{<<"x-foo-bar">>, <<"bar foo">>}], <<>>} = cow_http:parse_headers(Rest), + ok. + +stream_chunked_n_passes_test() -> + S0 = {0, 0}, + more = stream_chunked(<<"4\r">>, S0), + {more, <<>>, 6, S1} = stream_chunked(<<"4\r\n">>, S0), + {more, <<"Wiki">>, 0, S2} = stream_chunked(<<"Wiki\r\n">>, S1), + {more, <<"pedia">>, <<"e\r">>, S3} = stream_chunked(<<"5\r\npedia\r\ne\r">>, S2), + {more, <<" in\r\n\r\nchunks.">>, 2, S4} = stream_chunked(<<"e\r\n in\r\n\r\nchunks.">>, S3), + {done, no_trailers, <<>>} = stream_chunked(<<"\r\n0\r\n\r\n">>, S4), + %% A few extra for coverage purposes. + more = stream_chunked(<<"\n3">>, {1, 0}), + {more, <<"abc">>, 2, {2, 3}} = stream_chunked(<<"\n3\r\nabc">>, {1, 0}), + {more, <<"abc">>, {1, 3}} = stream_chunked(<<"3\r\nabc\r">>, {0, 0}), + {more, <<"abc">>, <<"123">>, {0, 3}} = stream_chunked(<<"3\r\nabc\r\n123">>, {0, 0}), + ok. + +stream_chunked_dripfeed_test() -> + dripfeed(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, <<>>, {0, 0}, fun stream_chunked/2). + +do_body_to_chunks(_, <<>>, Acc) -> + lists:reverse([<<"0\r\n\r\n">>|Acc]); +do_body_to_chunks(ChunkSize, Body, Acc) -> + BodySize = byte_size(Body), + ChunkSize2 = case BodySize < ChunkSize of + true -> BodySize; + false -> ChunkSize + end, + << Chunk:ChunkSize2/binary, Rest/binary >> = Body, + ChunkSizeBin = list_to_binary(integer_to_list(ChunkSize2, 16)), + do_body_to_chunks(ChunkSize, Rest, + [<< ChunkSizeBin/binary, "\r\n", Chunk/binary, "\r\n" >>|Acc]). + +stream_chunked_dripfeed2_test() -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + Body2 = iolist_to_binary(do_body_to_chunks(50, Body, [])), + dripfeed(Body2, <<>>, {0, 0}, fun stream_chunked/2). + +stream_chunked_error_test_() -> + Tests = [ + {<<>>, undefined}, + {<<"\n\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, {2, 0}} + ], + [{lists:flatten(io_lib:format("value ~p state ~p", [V, S])), + fun() -> {'EXIT', _} = (catch stream_chunked(V, S)) end} + || {V, S} <- Tests]. + +horse_stream_chunked() -> + horse:repeat(10000, + stream_chunked(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, {0, 0}) + ). + +horse_stream_chunked_dripfeed() -> + horse:repeat(10000, + dripfeed(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, <<>>, {0, 43}, fun stream_chunked/2) + ). +-endif. diff --git a/src/wsLib/cow_iolists.erl b/src/wsLib/cow_iolists.erl new file mode 100644 index 0000000..dcb48d7 --- /dev/null +++ b/src/wsLib/cow_iolists.erl @@ -0,0 +1,95 @@ +%% Copyright (c) 2017-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_iolists). + +-export([split/2]). + +-ifdef(TEST). +-include_lib("proper/include/proper.hrl"). +-endif. + +-spec split(non_neg_integer(), iodata()) -> {iodata(), iodata()}. +split(N, Iolist) -> + case split(N, Iolist, []) of + {ok, Before, After} -> + {Before, After}; + {more, _, Before} -> + {lists:reverse(Before), <<>>} + end. + +split(0, Rest, Acc) -> + {ok, lists:reverse(Acc), Rest}; +split(N, [], Acc) -> + {more, N, Acc}; +split(N, Binary, Acc) when byte_size(Binary) =< N -> + {more, N - byte_size(Binary), [Binary|Acc]}; +split(N, Binary, Acc) when is_binary(Binary) -> + << Before:N/binary, After/bits >> = Binary, + {ok, lists:reverse([Before|Acc]), After}; +split(N, [Binary|Tail], Acc) when byte_size(Binary) =< N -> + split(N - byte_size(Binary), Tail, [Binary|Acc]); +split(N, [Binary|Tail], Acc) when is_binary(Binary) -> + << Before:N/binary, After/bits >> = Binary, + {ok, lists:reverse([Before|Acc]), [After|Tail]}; +split(N, [Char|Tail], Acc) when is_integer(Char) -> + split(N - 1, Tail, [Char|Acc]); +split(N, [List|Tail], Acc0) -> + case split(N, List, Acc0) of + {ok, Before, After} -> + {ok, Before, [After|Tail]}; + {more, More, Acc} -> + split(More, Tail, Acc) + end. + +-ifdef(TEST). + +split_test_() -> + Tests = [ + {10, "Hello world!", "Hello worl", "d!"}, + {10, <<"Hello world!">>, "Hello worl", "d!"}, + {10, ["He", [<<"llo">>], $\s, [["world"], <<"!">>]], "Hello worl", "d!"}, + {10, ["Hello "|<<"world!">>], "Hello worl", "d!"}, + {10, "Hello!", "Hello!", ""}, + {10, <<"Hello!">>, "Hello!", ""}, + {10, ["He", [<<"ll">>], $o, [["!"]]], "Hello!", ""}, + {10, ["Hel"|<<"lo!">>], "Hello!", ""}, + {10, [[<<>>|<<>>], [], <<"Hello world!">>], "Hello worl", "d!"}, + {10, [[<<"He">>|<<"llo">>], [$\s], <<"world!">>], "Hello worl", "d!"}, + {10, [[[]|<<"He">>], [[]|<<"llo wor">>]|<<"ld!">>], "Hello worl", "d!"} + ], + [{iolist_to_binary(V), fun() -> + {B, A} = split(N, V), + true = iolist_to_binary(RB) =:= iolist_to_binary(B), + true = iolist_to_binary(RA) =:= iolist_to_binary(A) + end} || {N, V, RB, RA} <- Tests]. + +prop_split_test() -> + ?FORALL({N, Input}, + {non_neg_integer(), iolist()}, + begin + Size = iolist_size(Input), + {Before, After} = split(N, Input), + if + N >= Size -> + ((iolist_size(After) =:= 0) + andalso iolist_to_binary(Before) =:= iolist_to_binary(Input)); + true -> + <> = iolist_to_binary(Input), + (ExpectBefore =:= iolist_to_binary(Before)) + andalso (ExpectAfter =:= iolist_to_binary(After)) + end + end). + +-endif. diff --git a/src/wsLib/cow_link.erl b/src/wsLib/cow_link.erl new file mode 100644 index 0000000..8320297 --- /dev/null +++ b/src/wsLib/cow_link.erl @@ -0,0 +1,445 @@ +%% Copyright (c) 2019, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_link). +-compile({no_auto_import, [link/1]}). + +-export([parse_link/1]). +-export([resolve_link/2]). +-export([resolve_link/3]). +-export([link/1]). + +-include("cow_inline.hrl"). +-include("cow_parse.hrl"). + +-type link() :: #{ + target := binary(), + rel := binary(), + attributes := [{binary(), binary()}] +}. +-export_type([link/0]). + +-type resolve_opts() :: #{ + allow_anchor => boolean() +}. + +-type uri() :: uri_string:uri_map() | uri_string:uri_string() | undefined. + +%% Parse a link header. + +%% This function returns the URI target from the header directly. +%% Relative URIs must then be resolved as per RFC3986 5. In some +%% cases it might not be possible to resolve URIs, for example when +%% the link header is returned with a 404 status code. +-spec parse_link(binary()) -> [link()]. +parse_link(Link) -> + before_target(Link, []). + +before_target(<<>>, Acc) -> lists:reverse(Acc); +before_target(<<$<,R/bits>>, Acc) -> target(R, Acc, <<>>); +before_target(<>, Acc) when ?IS_WS(C) -> before_target(R, Acc). + +target(<<$>,R/bits>>, Acc, T) -> param_sep(R, Acc, T, []); +target(<>, Acc, T) -> target(R, Acc, <>). + +param_sep(<<>>, Acc, T, P) -> lists:reverse(acc_link(Acc, T, P)); +param_sep(<<$,,R/bits>>, Acc, T, P) -> before_target(R, acc_link(Acc, T, P)); +param_sep(<<$;,R/bits>>, Acc, T, P) -> before_param(R, Acc, T, P); +param_sep(<>, Acc, T, P) when ?IS_WS(C) -> param_sep(R, Acc, T, P). + +before_param(<>, Acc, T, P) when ?IS_WS(C) -> before_param(R, Acc, T, P); +before_param(<>, Acc, T, P) when ?IS_TOKEN(C) -> ?LOWER(param, R, Acc, T, P, <<>>). + +param(<<$=,$",R/bits>>, Acc, T, P, K) -> quoted(R, Acc, T, P, K, <<>>); +param(<<$=,C,R/bits>>, Acc, T, P, K) when ?IS_TOKEN(C) -> value(R, Acc, T, P, K, <>); +param(<>, Acc, T, P, K) when ?IS_TOKEN(C) -> ?LOWER(param, R, Acc, T, P, K). + +quoted(<<$",R/bits>>, Acc, T, P, K, V) -> param_sep(R, Acc, T, [{K, V}|P]); +quoted(<<$\\,C,R/bits>>, Acc, T, P, K, V) when ?IS_VCHAR_OBS(C) -> quoted(R, Acc, T, P, K, <>); +quoted(<>, Acc, T, P, K, V) when ?IS_VCHAR_OBS(C) -> quoted(R, Acc, T, P, K, <>). + +value(<>, Acc, T, P, K, V) when ?IS_TOKEN(C) -> value(R, Acc, T, P, K, <>); +value(R, Acc, T, P, K, V) -> param_sep(R, Acc, T, [{K, V}|P]). + +acc_link(Acc, Target, Params0) -> + Params1 = lists:reverse(Params0), + %% The rel parameter MUST be present. (RFC8288 3.3) + {value, {_, Rel}, Params2} = lists:keytake(<<"rel">>, 1, Params1), + %% Occurrences after the first MUST be ignored by parsers. + Params = filter_out_duplicates(Params2, #{}), + [#{ + target => Target, + rel => ?LOWER(Rel), + attributes => Params + }|Acc]. + +%% This function removes duplicates for attributes that don't allow them. +filter_out_duplicates([], _) -> + []; +%% The "rel" is mandatory and was already removed from params. +filter_out_duplicates([{<<"rel">>, _}|Tail], State) -> + filter_out_duplicates(Tail, State); +filter_out_duplicates([{<<"anchor">>, _}|Tail], State=#{anchor := true}) -> + filter_out_duplicates(Tail, State); +filter_out_duplicates([{<<"media">>, _}|Tail], State=#{media := true}) -> + filter_out_duplicates(Tail, State); +filter_out_duplicates([{<<"title">>, _}|Tail], State=#{title := true}) -> + filter_out_duplicates(Tail, State); +filter_out_duplicates([{<<"title*">>, _}|Tail], State=#{title_star := true}) -> + filter_out_duplicates(Tail, State); +filter_out_duplicates([{<<"type">>, _}|Tail], State=#{type := true}) -> + filter_out_duplicates(Tail, State); +filter_out_duplicates([Tuple={<<"anchor">>, _}|Tail], State) -> + [Tuple|filter_out_duplicates(Tail, State#{anchor => true})]; +filter_out_duplicates([Tuple={<<"media">>, _}|Tail], State) -> + [Tuple|filter_out_duplicates(Tail, State#{media => true})]; +filter_out_duplicates([Tuple={<<"title">>, _}|Tail], State) -> + [Tuple|filter_out_duplicates(Tail, State#{title => true})]; +filter_out_duplicates([Tuple={<<"title*">>, _}|Tail], State) -> + [Tuple|filter_out_duplicates(Tail, State#{title_star => true})]; +filter_out_duplicates([Tuple={<<"type">>, _}|Tail], State) -> + [Tuple|filter_out_duplicates(Tail, State#{type => true})]; +filter_out_duplicates([Tuple|Tail], State) -> + [Tuple|filter_out_duplicates(Tail, State)]. + +-ifdef(TEST). +parse_link_test_() -> + Tests = [ + {<<>>, []}, + {<<" ">>, []}, + %% Examples from the RFC. + {<<"; rel=\"previous\"; title=\"previous chapter\"">>, [ + #{ + target => <<"http://example.com/TheBook/chapter2">>, + rel => <<"previous">>, + attributes => [ + {<<"title">>, <<"previous chapter">>} + ] + } + ]}, + {<<"; rel=\"http://example.net/foo\"">>, [ + #{ + target => <<"/">>, + rel => <<"http://example.net/foo">>, + attributes => [] + } + ]}, + {<<"; rel=\"copyright\"; anchor=\"#foo\"">>, [ + #{ + target => <<"/terms">>, + rel => <<"copyright">>, + attributes => [ + {<<"anchor">>, <<"#foo">>} + ] + } + ]}, +% {<<"; rel=\"previous\"; title*=UTF-8'de'letztes%20Kapitel, " +% "; rel=\"next\"; title*=UTF-8'de'n%c3%a4chstes%20Kapitel">>, [ +% %% @todo +% ]} + {<<"; rel=\"start http://example.net/relation/other\"">>, [ + #{ + target => <<"http://example.org/">>, + rel => <<"start http://example.net/relation/other">>, + attributes => [] + } + ]}, + {<<"; rel=\"start\", " + "; rel=\"index\"">>, [ + #{ + target => <<"https://example.org/">>, + rel => <<"start">>, + attributes => [] + }, + #{ + target => <<"https://example.org/index">>, + rel => <<"index">>, + attributes => [] + } + ]}, + %% Relation types are case insensitive. + {<<"; rel=\"SELF\"">>, [ + #{ + target => <<"/">>, + rel => <<"self">>, + attributes => [] + } + ]}, + {<<"; rel=\"HTTP://EXAMPLE.NET/FOO\"">>, [ + #{ + target => <<"/">>, + rel => <<"http://example.net/foo">>, + attributes => [] + } + ]}, + %% Attribute names are case insensitive. + {<<"; REL=\"copyright\"; ANCHOR=\"#foo\"">>, [ + #{ + target => <<"/terms">>, + rel => <<"copyright">>, + attributes => [ + {<<"anchor">>, <<"#foo">>} + ] + } + ]} + ], + [{V, fun() -> R = parse_link(V) end} || {V, R} <- Tests]. +-endif. + +%% Resolve a link based on the context URI and options. + +-spec resolve_link(Link, uri()) -> Link | false when Link::link(). +resolve_link(Link, ContextURI) -> + resolve_link(Link, ContextURI, #{}). + +-spec resolve_link(Link, uri(), resolve_opts()) -> Link | false when Link::link(). +%% When we do not have a context URI we only succeed when the target URI is absolute. +%% The target URI will only be normalized in that case. +resolve_link(Link=#{target := TargetURI}, undefined, _) -> + case uri_string:parse(TargetURI) of + URIMap = #{scheme := _} -> + Link#{target => uri_string:normalize(URIMap)}; + _ -> + false + end; +resolve_link(Link=#{attributes := Params}, ContextURI, Opts) -> + AllowAnchor = maps:get(allow_anchor, Opts, true), + case lists:keyfind(<<"anchor">>, 1, Params) of + false -> + do_resolve_link(Link, ContextURI); + {_, Anchor} when AllowAnchor -> + do_resolve_link(Link, resolve(Anchor, ContextURI)); + _ -> + false + end. + +do_resolve_link(Link=#{target := TargetURI}, ContextURI) -> + Link#{target => uri_string:recompose(resolve(TargetURI, ContextURI))}. + +-ifdef(TEST). +resolve_link_test_() -> + Tests = [ + %% No context URI available. + {#{target => <<"http://a/b/./c">>}, undefined, #{}, + #{target => <<"http://a/b/c">>}}, + {#{target => <<"a/b/./c">>}, undefined, #{}, + false}, + %% Context URI available, allow_anchor => true. + {#{target => <<"http://a/b">>, attributes => []}, <<"http://a/c">>, #{}, + #{target => <<"http://a/b">>, attributes => []}}, + {#{target => <<"b">>, attributes => []}, <<"http://a/c">>, #{}, + #{target => <<"http://a/b">>, attributes => []}}, + {#{target => <<"b">>, attributes => [{<<"anchor">>, <<"#frag">>}]}, <<"http://a/c">>, #{}, + #{target => <<"http://a/b">>, attributes => [{<<"anchor">>, <<"#frag">>}]}}, + {#{target => <<"b">>, attributes => [{<<"anchor">>, <<"d/e">>}]}, <<"http://a/c">>, #{}, + #{target => <<"http://a/d/b">>, attributes => [{<<"anchor">>, <<"d/e">>}]}}, + %% Context URI available, allow_anchor => false. + {#{target => <<"http://a/b">>, attributes => []}, <<"http://a/c">>, #{allow_anchor => false}, + #{target => <<"http://a/b">>, attributes => []}}, + {#{target => <<"b">>, attributes => []}, <<"http://a/c">>, #{allow_anchor => false}, + #{target => <<"http://a/b">>, attributes => []}}, + {#{target => <<"b">>, attributes => [{<<"anchor">>, <<"#frag">>}]}, + <<"http://a/c">>, #{allow_anchor => false}, false}, + {#{target => <<"b">>, attributes => [{<<"anchor">>, <<"d/e">>}]}, + <<"http://a/c">>, #{allow_anchor => false}, false} + ], + [{iolist_to_binary(io_lib:format("~0p", [L])), + fun() -> R = resolve_link(L, C, O) end} || {L, C, O, R} <- Tests]. +-endif. + +%% @todo This function has been added to Erlang/OTP 22.3 as uri_string:resolve/2,3. +resolve(URI, BaseURI) -> + case resolve1(ensure_map_uri(URI), BaseURI) of + TargetURI = #{path := Path0} -> + %% We remove dot segments. Normalizing the entire URI + %% will sometimes add an extra slash we don't want. + #{path := Path} = uri_string:normalize(#{path => Path0}, [return_map]), + TargetURI#{path => Path}; + TargetURI -> + TargetURI + end. + +resolve1(URI=#{scheme := _}, _) -> + URI; +resolve1(URI=#{host := _}, BaseURI) -> + #{scheme := Scheme} = ensure_map_uri(BaseURI), + URI#{scheme => Scheme}; +resolve1(URI=#{path := <<>>}, BaseURI0) -> + BaseURI = ensure_map_uri(BaseURI0), + Keys = case maps:is_key(query, URI) of + true -> [scheme, host, port, path]; + false -> [scheme, host, port, path, query] + end, + maps:merge(URI, maps:with(Keys, BaseURI)); +resolve1(URI=#{path := <<"/",_/bits>>}, BaseURI0) -> + BaseURI = ensure_map_uri(BaseURI0), + maps:merge(URI, maps:with([scheme, host, port], BaseURI)); +resolve1(URI=#{path := Path}, BaseURI0) -> + BaseURI = ensure_map_uri(BaseURI0), + maps:merge( + URI#{path := merge_paths(Path, BaseURI)}, + maps:with([scheme, host, port], BaseURI)). + +merge_paths(Path, #{host := _, path := <<>>}) -> + <<$/, Path/binary>>; +merge_paths(Path, #{path := BasePath0}) -> + case string:split(BasePath0, <<$/>>, trailing) of + [BasePath, _] -> <>; + [_] -> <<$/, Path/binary>> + end. + +ensure_map_uri(URI) when is_map(URI) -> URI; +ensure_map_uri(URI) -> uri_string:parse(iolist_to_binary(URI)). + +-ifdef(TEST). +resolve_test_() -> + Tests = [ + %% 5.4.1. Normal Examples + {<<"g:h">>, <<"g:h">>}, + {<<"g">>, <<"http://a/b/c/g">>}, + {<<"./g">>, <<"http://a/b/c/g">>}, + {<<"g/">>, <<"http://a/b/c/g/">>}, + {<<"/g">>, <<"http://a/g">>}, + {<<"//g">>, <<"http://g">>}, + {<<"?y">>, <<"http://a/b/c/d;p?y">>}, + {<<"g?y">>, <<"http://a/b/c/g?y">>}, + {<<"#s">>, <<"http://a/b/c/d;p?q#s">>}, + {<<"g#s">>, <<"http://a/b/c/g#s">>}, + {<<"g?y#s">>, <<"http://a/b/c/g?y#s">>}, + {<<";x">>, <<"http://a/b/c/;x">>}, + {<<"g;x">>, <<"http://a/b/c/g;x">>}, + {<<"g;x?y#s">>, <<"http://a/b/c/g;x?y#s">>}, + {<<"">>, <<"http://a/b/c/d;p?q">>}, + {<<".">>, <<"http://a/b/c/">>}, + {<<"./">>, <<"http://a/b/c/">>}, + {<<"..">>, <<"http://a/b/">>}, + {<<"../">>, <<"http://a/b/">>}, + {<<"../g">>, <<"http://a/b/g">>}, + {<<"../..">>, <<"http://a/">>}, + {<<"../../">>, <<"http://a/">>}, + {<<"../../g">>, <<"http://a/g">>}, + %% 5.4.2. Abnormal Examples + {<<"../../../g">>, <<"http://a/g">>}, + {<<"../../../../g">>, <<"http://a/g">>}, + {<<"/./g">>, <<"http://a/g">>}, + {<<"/../g">>, <<"http://a/g">>}, + {<<"g.">>, <<"http://a/b/c/g.">>}, + {<<".g">>, <<"http://a/b/c/.g">>}, + {<<"g..">>, <<"http://a/b/c/g..">>}, + {<<"..g">>, <<"http://a/b/c/..g">>}, + {<<"./../g">>, <<"http://a/b/g">>}, + {<<"./g/.">>, <<"http://a/b/c/g/">>}, + {<<"g/./h">>, <<"http://a/b/c/g/h">>}, + {<<"g/../h">>, <<"http://a/b/c/h">>}, + {<<"g;x=1/./y">>, <<"http://a/b/c/g;x=1/y">>}, + {<<"g;x=1/../y">>, <<"http://a/b/c/y">>}, + {<<"g?y/./x">>, <<"http://a/b/c/g?y/./x">>}, + {<<"g?y/../x">>, <<"http://a/b/c/g?y/../x">>}, + {<<"g#s/./x">>, <<"http://a/b/c/g#s/./x">>}, + {<<"g#s/../x">>, <<"http://a/b/c/g#s/../x">>}, + {<<"http:g">>, <<"http:g">>} %% for strict parsers + ], + [{V, fun() -> R = uri_string:recompose(resolve(V, <<"http://a/b/c/d;p?q">>)) end} || {V, R} <- Tests]. +-endif. + +%% Build a link header. + +-spec link([#{ + target := binary(), + rel := binary(), + attributes := [{binary(), binary()}] +}]) -> iodata(). +link(Links) -> + lists:join(<<", ">>, [do_link(Link) || Link <- Links]). + +do_link(#{target := TargetURI, rel := Rel, attributes := Params}) -> + [ + $<, TargetURI, <<">" + "; rel=\"">>, Rel, $", + [[<<"; ">>, Key, <<"=\"">>, escape(iolist_to_binary(Value), <<>>), $"] + || {Key, Value} <- Params] + ]. + +escape(<<>>, Acc) -> Acc; +escape(<<$\\,R/bits>>, Acc) -> escape(R, <>); +escape(<<$\",R/bits>>, Acc) -> escape(R, <>); +escape(<>, Acc) -> escape(R, <>). + +-ifdef(TEST). +link_test_() -> + Tests = [ + {<<>>, []}, + %% Examples from the RFC. + {<<"; rel=\"previous\"; title=\"previous chapter\"">>, [ + #{ + target => <<"http://example.com/TheBook/chapter2">>, + rel => <<"previous">>, + attributes => [ + {<<"title">>, <<"previous chapter">>} + ] + } + ]}, + {<<"; rel=\"http://example.net/foo\"">>, [ + #{ + target => <<"/">>, + rel => <<"http://example.net/foo">>, + attributes => [] + } + ]}, + {<<"; rel=\"copyright\"; anchor=\"#foo\"">>, [ + #{ + target => <<"/terms">>, + rel => <<"copyright">>, + attributes => [ + {<<"anchor">>, <<"#foo">>} + ] + } + ]}, +% {<<"; rel=\"previous\"; title*=UTF-8'de'letztes%20Kapitel, " +% "; rel=\"next\"; title*=UTF-8'de'n%c3%a4chstes%20Kapitel">>, [ +% %% @todo +% ]} + {<<"; rel=\"start http://example.net/relation/other\"">>, [ + #{ + target => <<"http://example.org/">>, + rel => <<"start http://example.net/relation/other">>, + attributes => [] + } + ]}, + {<<"; rel=\"start\", " + "; rel=\"index\"">>, [ + #{ + target => <<"https://example.org/">>, + rel => <<"start">>, + attributes => [] + }, + #{ + target => <<"https://example.org/index">>, + rel => <<"index">>, + attributes => [] + } + ]}, + {<<"; rel=\"previous\"; quoted=\"name=\\\"value\\\"\"">>, [ + #{ + target => <<"/">>, + rel => <<"previous">>, + attributes => [ + {<<"quoted">>, <<"name=\"value\"">>} + ] + } + ]} + ], + [{iolist_to_binary(io_lib:format("~0p", [V])), + fun() -> R = iolist_to_binary(link(V)) end} || {R, V} <- Tests]. +-endif. diff --git a/src/wsLib/cow_mimetypes.erl b/src/wsLib/cow_mimetypes.erl new file mode 100644 index 0000000..07fc69f --- /dev/null +++ b/src/wsLib/cow_mimetypes.erl @@ -0,0 +1,1045 @@ +%% Copyright (c) 2013-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_mimetypes). + +-export([all/1]). +-export([web/1]). + +%% @doc Return the mimetype for any file by looking at its extension. + +-spec all(binary()) -> {binary(), binary(), []}. +all(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + %% @todo Convert to string:lowercase on OTP-20+. + << $., Ext/binary >> -> all_ext(list_to_binary(string:to_lower(binary_to_list(Ext)))) + end. + +%% @doc Return the mimetype for a Web related file by looking at its extension. + +-spec web(binary()) -> {binary(), binary(), []}. +web(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + %% @todo Convert to string:lowercase on OTP-20+. + << $., Ext/binary >> -> web_ext(list_to_binary(string:to_lower(binary_to_list(Ext)))) + end. + +%% Internal. + +%% GENERATED +all_ext(<<"123">>) -> {<<"application">>, <<"vnd.lotus-1-2-3">>, []}; +all_ext(<<"3dml">>) -> {<<"text">>, <<"vnd.in3d.3dml">>, []}; +all_ext(<<"3ds">>) -> {<<"image">>, <<"x-3ds">>, []}; +all_ext(<<"3g2">>) -> {<<"video">>, <<"3gpp2">>, []}; +all_ext(<<"3gp">>) -> {<<"video">>, <<"3gpp">>, []}; +all_ext(<<"7z">>) -> {<<"application">>, <<"x-7z-compressed">>, []}; +all_ext(<<"aab">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"aac">>) -> {<<"audio">>, <<"x-aac">>, []}; +all_ext(<<"aam">>) -> {<<"application">>, <<"x-authorware-map">>, []}; +all_ext(<<"aas">>) -> {<<"application">>, <<"x-authorware-seg">>, []}; +all_ext(<<"abw">>) -> {<<"application">>, <<"x-abiword">>, []}; +all_ext(<<"ac">>) -> {<<"application">>, <<"pkix-attr-cert">>, []}; +all_ext(<<"acc">>) -> {<<"application">>, <<"vnd.americandynamics.acc">>, []}; +all_ext(<<"ace">>) -> {<<"application">>, <<"x-ace-compressed">>, []}; +all_ext(<<"acu">>) -> {<<"application">>, <<"vnd.acucobol">>, []}; +all_ext(<<"acutc">>) -> {<<"application">>, <<"vnd.acucorp">>, []}; +all_ext(<<"adp">>) -> {<<"audio">>, <<"adpcm">>, []}; +all_ext(<<"aep">>) -> {<<"application">>, <<"vnd.audiograph">>, []}; +all_ext(<<"afm">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"afp">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []}; +all_ext(<<"ahead">>) -> {<<"application">>, <<"vnd.ahead.space">>, []}; +all_ext(<<"ai">>) -> {<<"application">>, <<"postscript">>, []}; +all_ext(<<"aif">>) -> {<<"audio">>, <<"x-aiff">>, []}; +all_ext(<<"aifc">>) -> {<<"audio">>, <<"x-aiff">>, []}; +all_ext(<<"aiff">>) -> {<<"audio">>, <<"x-aiff">>, []}; +all_ext(<<"air">>) -> {<<"application">>, <<"vnd.adobe.air-application-installer-package+zip">>, []}; +all_ext(<<"ait">>) -> {<<"application">>, <<"vnd.dvb.ait">>, []}; +all_ext(<<"ami">>) -> {<<"application">>, <<"vnd.amiga.ami">>, []}; +all_ext(<<"apk">>) -> {<<"application">>, <<"vnd.android.package-archive">>, []}; +all_ext(<<"appcache">>) -> {<<"text">>, <<"cache-manifest">>, []}; +all_ext(<<"application">>) -> {<<"application">>, <<"x-ms-application">>, []}; +all_ext(<<"apr">>) -> {<<"application">>, <<"vnd.lotus-approach">>, []}; +all_ext(<<"arc">>) -> {<<"application">>, <<"x-freearc">>, []}; +all_ext(<<"asc">>) -> {<<"application">>, <<"pgp-signature">>, []}; +all_ext(<<"asf">>) -> {<<"video">>, <<"x-ms-asf">>, []}; +all_ext(<<"asm">>) -> {<<"text">>, <<"x-asm">>, []}; +all_ext(<<"aso">>) -> {<<"application">>, <<"vnd.accpac.simply.aso">>, []}; +all_ext(<<"asx">>) -> {<<"video">>, <<"x-ms-asf">>, []}; +all_ext(<<"atc">>) -> {<<"application">>, <<"vnd.acucorp">>, []}; +all_ext(<<"atom">>) -> {<<"application">>, <<"atom+xml">>, []}; +all_ext(<<"atomcat">>) -> {<<"application">>, <<"atomcat+xml">>, []}; +all_ext(<<"atomsvc">>) -> {<<"application">>, <<"atomsvc+xml">>, []}; +all_ext(<<"atx">>) -> {<<"application">>, <<"vnd.antix.game-component">>, []}; +all_ext(<<"au">>) -> {<<"audio">>, <<"basic">>, []}; +all_ext(<<"avi">>) -> {<<"video">>, <<"x-msvideo">>, []}; +all_ext(<<"aw">>) -> {<<"application">>, <<"applixware">>, []}; +all_ext(<<"azf">>) -> {<<"application">>, <<"vnd.airzip.filesecure.azf">>, []}; +all_ext(<<"azs">>) -> {<<"application">>, <<"vnd.airzip.filesecure.azs">>, []}; +all_ext(<<"azw">>) -> {<<"application">>, <<"vnd.amazon.ebook">>, []}; +all_ext(<<"bat">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"bcpio">>) -> {<<"application">>, <<"x-bcpio">>, []}; +all_ext(<<"bdf">>) -> {<<"application">>, <<"x-font-bdf">>, []}; +all_ext(<<"bdm">>) -> {<<"application">>, <<"vnd.syncml.dm+wbxml">>, []}; +all_ext(<<"bed">>) -> {<<"application">>, <<"vnd.realvnc.bed">>, []}; +all_ext(<<"bh2">>) -> {<<"application">>, <<"vnd.fujitsu.oasysprs">>, []}; +all_ext(<<"bin">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"blb">>) -> {<<"application">>, <<"x-blorb">>, []}; +all_ext(<<"blorb">>) -> {<<"application">>, <<"x-blorb">>, []}; +all_ext(<<"bmi">>) -> {<<"application">>, <<"vnd.bmi">>, []}; +all_ext(<<"bmp">>) -> {<<"image">>, <<"bmp">>, []}; +all_ext(<<"book">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"box">>) -> {<<"application">>, <<"vnd.previewsystems.box">>, []}; +all_ext(<<"boz">>) -> {<<"application">>, <<"x-bzip2">>, []}; +all_ext(<<"bpk">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"btif">>) -> {<<"image">>, <<"prs.btif">>, []}; +all_ext(<<"bz2">>) -> {<<"application">>, <<"x-bzip2">>, []}; +all_ext(<<"bz">>) -> {<<"application">>, <<"x-bzip">>, []}; +all_ext(<<"c11amc">>) -> {<<"application">>, <<"vnd.cluetrust.cartomobile-config">>, []}; +all_ext(<<"c11amz">>) -> {<<"application">>, <<"vnd.cluetrust.cartomobile-config-pkg">>, []}; +all_ext(<<"c4d">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4f">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4g">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4p">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4u">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"cab">>) -> {<<"application">>, <<"vnd.ms-cab-compressed">>, []}; +all_ext(<<"caf">>) -> {<<"audio">>, <<"x-caf">>, []}; +all_ext(<<"cap">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []}; +all_ext(<<"car">>) -> {<<"application">>, <<"vnd.curl.car">>, []}; +all_ext(<<"cat">>) -> {<<"application">>, <<"vnd.ms-pki.seccat">>, []}; +all_ext(<<"cb7">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cba">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cbr">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cbt">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cbz">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cct">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"cc">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"ccxml">>) -> {<<"application">>, <<"ccxml+xml">>, []}; +all_ext(<<"cdbcmsg">>) -> {<<"application">>, <<"vnd.contact.cmsg">>, []}; +all_ext(<<"cdf">>) -> {<<"application">>, <<"x-netcdf">>, []}; +all_ext(<<"cdkey">>) -> {<<"application">>, <<"vnd.mediastation.cdkey">>, []}; +all_ext(<<"cdmia">>) -> {<<"application">>, <<"cdmi-capability">>, []}; +all_ext(<<"cdmic">>) -> {<<"application">>, <<"cdmi-container">>, []}; +all_ext(<<"cdmid">>) -> {<<"application">>, <<"cdmi-domain">>, []}; +all_ext(<<"cdmio">>) -> {<<"application">>, <<"cdmi-object">>, []}; +all_ext(<<"cdmiq">>) -> {<<"application">>, <<"cdmi-queue">>, []}; +all_ext(<<"cdx">>) -> {<<"chemical">>, <<"x-cdx">>, []}; +all_ext(<<"cdxml">>) -> {<<"application">>, <<"vnd.chemdraw+xml">>, []}; +all_ext(<<"cdy">>) -> {<<"application">>, <<"vnd.cinderella">>, []}; +all_ext(<<"cer">>) -> {<<"application">>, <<"pkix-cert">>, []}; +all_ext(<<"cfs">>) -> {<<"application">>, <<"x-cfs-compressed">>, []}; +all_ext(<<"cgm">>) -> {<<"image">>, <<"cgm">>, []}; +all_ext(<<"chat">>) -> {<<"application">>, <<"x-chat">>, []}; +all_ext(<<"chm">>) -> {<<"application">>, <<"vnd.ms-htmlhelp">>, []}; +all_ext(<<"chrt">>) -> {<<"application">>, <<"vnd.kde.kchart">>, []}; +all_ext(<<"cif">>) -> {<<"chemical">>, <<"x-cif">>, []}; +all_ext(<<"cii">>) -> {<<"application">>, <<"vnd.anser-web-certificate-issue-initiation">>, []}; +all_ext(<<"cil">>) -> {<<"application">>, <<"vnd.ms-artgalry">>, []}; +all_ext(<<"cla">>) -> {<<"application">>, <<"vnd.claymore">>, []}; +all_ext(<<"class">>) -> {<<"application">>, <<"java-vm">>, []}; +all_ext(<<"clkk">>) -> {<<"application">>, <<"vnd.crick.clicker.keyboard">>, []}; +all_ext(<<"clkp">>) -> {<<"application">>, <<"vnd.crick.clicker.palette">>, []}; +all_ext(<<"clkt">>) -> {<<"application">>, <<"vnd.crick.clicker.template">>, []}; +all_ext(<<"clkw">>) -> {<<"application">>, <<"vnd.crick.clicker.wordbank">>, []}; +all_ext(<<"clkx">>) -> {<<"application">>, <<"vnd.crick.clicker">>, []}; +all_ext(<<"clp">>) -> {<<"application">>, <<"x-msclip">>, []}; +all_ext(<<"cmc">>) -> {<<"application">>, <<"vnd.cosmocaller">>, []}; +all_ext(<<"cmdf">>) -> {<<"chemical">>, <<"x-cmdf">>, []}; +all_ext(<<"cml">>) -> {<<"chemical">>, <<"x-cml">>, []}; +all_ext(<<"cmp">>) -> {<<"application">>, <<"vnd.yellowriver-custom-menu">>, []}; +all_ext(<<"cmx">>) -> {<<"image">>, <<"x-cmx">>, []}; +all_ext(<<"cod">>) -> {<<"application">>, <<"vnd.rim.cod">>, []}; +all_ext(<<"com">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"conf">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"cpio">>) -> {<<"application">>, <<"x-cpio">>, []}; +all_ext(<<"cpp">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"cpt">>) -> {<<"application">>, <<"mac-compactpro">>, []}; +all_ext(<<"crd">>) -> {<<"application">>, <<"x-mscardfile">>, []}; +all_ext(<<"crl">>) -> {<<"application">>, <<"pkix-crl">>, []}; +all_ext(<<"crt">>) -> {<<"application">>, <<"x-x509-ca-cert">>, []}; +all_ext(<<"cryptonote">>) -> {<<"application">>, <<"vnd.rig.cryptonote">>, []}; +all_ext(<<"csh">>) -> {<<"application">>, <<"x-csh">>, []}; +all_ext(<<"csml">>) -> {<<"chemical">>, <<"x-csml">>, []}; +all_ext(<<"csp">>) -> {<<"application">>, <<"vnd.commonspace">>, []}; +all_ext(<<"css">>) -> {<<"text">>, <<"css">>, []}; +all_ext(<<"cst">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"csv">>) -> {<<"text">>, <<"csv">>, []}; +all_ext(<<"c">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"cu">>) -> {<<"application">>, <<"cu-seeme">>, []}; +all_ext(<<"curl">>) -> {<<"text">>, <<"vnd.curl">>, []}; +all_ext(<<"cww">>) -> {<<"application">>, <<"prs.cww">>, []}; +all_ext(<<"cxt">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"cxx">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"dae">>) -> {<<"model">>, <<"vnd.collada+xml">>, []}; +all_ext(<<"daf">>) -> {<<"application">>, <<"vnd.mobius.daf">>, []}; +all_ext(<<"dart">>) -> {<<"application">>, <<"vnd.dart">>, []}; +all_ext(<<"dataless">>) -> {<<"application">>, <<"vnd.fdsn.seed">>, []}; +all_ext(<<"davmount">>) -> {<<"application">>, <<"davmount+xml">>, []}; +all_ext(<<"dbk">>) -> {<<"application">>, <<"docbook+xml">>, []}; +all_ext(<<"dcr">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"dcurl">>) -> {<<"text">>, <<"vnd.curl.dcurl">>, []}; +all_ext(<<"dd2">>) -> {<<"application">>, <<"vnd.oma.dd2+xml">>, []}; +all_ext(<<"ddd">>) -> {<<"application">>, <<"vnd.fujixerox.ddd">>, []}; +all_ext(<<"deb">>) -> {<<"application">>, <<"x-debian-package">>, []}; +all_ext(<<"def">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"deploy">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"der">>) -> {<<"application">>, <<"x-x509-ca-cert">>, []}; +all_ext(<<"dfac">>) -> {<<"application">>, <<"vnd.dreamfactory">>, []}; +all_ext(<<"dgc">>) -> {<<"application">>, <<"x-dgc-compressed">>, []}; +all_ext(<<"dic">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"dir">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"dis">>) -> {<<"application">>, <<"vnd.mobius.dis">>, []}; +all_ext(<<"dist">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"distz">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"djv">>) -> {<<"image">>, <<"vnd.djvu">>, []}; +all_ext(<<"djvu">>) -> {<<"image">>, <<"vnd.djvu">>, []}; +all_ext(<<"dll">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"dmg">>) -> {<<"application">>, <<"x-apple-diskimage">>, []}; +all_ext(<<"dmp">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []}; +all_ext(<<"dms">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"dna">>) -> {<<"application">>, <<"vnd.dna">>, []}; +all_ext(<<"doc">>) -> {<<"application">>, <<"msword">>, []}; +all_ext(<<"docm">>) -> {<<"application">>, <<"vnd.ms-word.document.macroenabled.12">>, []}; +all_ext(<<"docx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.wordprocessingml.document">>, []}; +all_ext(<<"dot">>) -> {<<"application">>, <<"msword">>, []}; +all_ext(<<"dotm">>) -> {<<"application">>, <<"vnd.ms-word.template.macroenabled.12">>, []}; +all_ext(<<"dotx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.wordprocessingml.template">>, []}; +all_ext(<<"dp">>) -> {<<"application">>, <<"vnd.osgi.dp">>, []}; +all_ext(<<"dpg">>) -> {<<"application">>, <<"vnd.dpgraph">>, []}; +all_ext(<<"dra">>) -> {<<"audio">>, <<"vnd.dra">>, []}; +all_ext(<<"dsc">>) -> {<<"text">>, <<"prs.lines.tag">>, []}; +all_ext(<<"dssc">>) -> {<<"application">>, <<"dssc+der">>, []}; +all_ext(<<"dtb">>) -> {<<"application">>, <<"x-dtbook+xml">>, []}; +all_ext(<<"dtd">>) -> {<<"application">>, <<"xml-dtd">>, []}; +all_ext(<<"dts">>) -> {<<"audio">>, <<"vnd.dts">>, []}; +all_ext(<<"dtshd">>) -> {<<"audio">>, <<"vnd.dts.hd">>, []}; +all_ext(<<"dump">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"dvb">>) -> {<<"video">>, <<"vnd.dvb.file">>, []}; +all_ext(<<"dvi">>) -> {<<"application">>, <<"x-dvi">>, []}; +all_ext(<<"dwf">>) -> {<<"model">>, <<"vnd.dwf">>, []}; +all_ext(<<"dwg">>) -> {<<"image">>, <<"vnd.dwg">>, []}; +all_ext(<<"dxf">>) -> {<<"image">>, <<"vnd.dxf">>, []}; +all_ext(<<"dxp">>) -> {<<"application">>, <<"vnd.spotfire.dxp">>, []}; +all_ext(<<"dxr">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"ecelp4800">>) -> {<<"audio">>, <<"vnd.nuera.ecelp4800">>, []}; +all_ext(<<"ecelp7470">>) -> {<<"audio">>, <<"vnd.nuera.ecelp7470">>, []}; +all_ext(<<"ecelp9600">>) -> {<<"audio">>, <<"vnd.nuera.ecelp9600">>, []}; +all_ext(<<"ecma">>) -> {<<"application">>, <<"ecmascript">>, []}; +all_ext(<<"edm">>) -> {<<"application">>, <<"vnd.novadigm.edm">>, []}; +all_ext(<<"edx">>) -> {<<"application">>, <<"vnd.novadigm.edx">>, []}; +all_ext(<<"efif">>) -> {<<"application">>, <<"vnd.picsel">>, []}; +all_ext(<<"ei6">>) -> {<<"application">>, <<"vnd.pg.osasli">>, []}; +all_ext(<<"elc">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"emf">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"eml">>) -> {<<"message">>, <<"rfc822">>, []}; +all_ext(<<"emma">>) -> {<<"application">>, <<"emma+xml">>, []}; +all_ext(<<"emz">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"eol">>) -> {<<"audio">>, <<"vnd.digital-winds">>, []}; +all_ext(<<"eot">>) -> {<<"application">>, <<"vnd.ms-fontobject">>, []}; +all_ext(<<"eps">>) -> {<<"application">>, <<"postscript">>, []}; +all_ext(<<"epub">>) -> {<<"application">>, <<"epub+zip">>, []}; +all_ext(<<"es3">>) -> {<<"application">>, <<"vnd.eszigno3+xml">>, []}; +all_ext(<<"esa">>) -> {<<"application">>, <<"vnd.osgi.subsystem">>, []}; +all_ext(<<"esf">>) -> {<<"application">>, <<"vnd.epson.esf">>, []}; +all_ext(<<"et3">>) -> {<<"application">>, <<"vnd.eszigno3+xml">>, []}; +all_ext(<<"etx">>) -> {<<"text">>, <<"x-setext">>, []}; +all_ext(<<"eva">>) -> {<<"application">>, <<"x-eva">>, []}; +all_ext(<<"evy">>) -> {<<"application">>, <<"x-envoy">>, []}; +all_ext(<<"exe">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"exi">>) -> {<<"application">>, <<"exi">>, []}; +all_ext(<<"ext">>) -> {<<"application">>, <<"vnd.novadigm.ext">>, []}; +all_ext(<<"ez2">>) -> {<<"application">>, <<"vnd.ezpix-album">>, []}; +all_ext(<<"ez3">>) -> {<<"application">>, <<"vnd.ezpix-package">>, []}; +all_ext(<<"ez">>) -> {<<"application">>, <<"andrew-inset">>, []}; +all_ext(<<"f4v">>) -> {<<"video">>, <<"x-f4v">>, []}; +all_ext(<<"f77">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"f90">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"fbs">>) -> {<<"image">>, <<"vnd.fastbidsheet">>, []}; +all_ext(<<"fcdt">>) -> {<<"application">>, <<"vnd.adobe.formscentral.fcdt">>, []}; +all_ext(<<"fcs">>) -> {<<"application">>, <<"vnd.isac.fcs">>, []}; +all_ext(<<"fdf">>) -> {<<"application">>, <<"vnd.fdf">>, []}; +all_ext(<<"fe_launch">>) -> {<<"application">>, <<"vnd.denovo.fcselayout-link">>, []}; +all_ext(<<"fg5">>) -> {<<"application">>, <<"vnd.fujitsu.oasysgp">>, []}; +all_ext(<<"fgd">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"fh4">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fh5">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fh7">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fhc">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fh">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fig">>) -> {<<"application">>, <<"x-xfig">>, []}; +all_ext(<<"flac">>) -> {<<"audio">>, <<"x-flac">>, []}; +all_ext(<<"fli">>) -> {<<"video">>, <<"x-fli">>, []}; +all_ext(<<"flo">>) -> {<<"application">>, <<"vnd.micrografx.flo">>, []}; +all_ext(<<"flv">>) -> {<<"video">>, <<"x-flv">>, []}; +all_ext(<<"flw">>) -> {<<"application">>, <<"vnd.kde.kivio">>, []}; +all_ext(<<"flx">>) -> {<<"text">>, <<"vnd.fmi.flexstor">>, []}; +all_ext(<<"fly">>) -> {<<"text">>, <<"vnd.fly">>, []}; +all_ext(<<"fm">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"fnc">>) -> {<<"application">>, <<"vnd.frogans.fnc">>, []}; +all_ext(<<"for">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"fpx">>) -> {<<"image">>, <<"vnd.fpx">>, []}; +all_ext(<<"frame">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"fsc">>) -> {<<"application">>, <<"vnd.fsc.weblaunch">>, []}; +all_ext(<<"fst">>) -> {<<"image">>, <<"vnd.fst">>, []}; +all_ext(<<"ftc">>) -> {<<"application">>, <<"vnd.fluxtime.clip">>, []}; +all_ext(<<"f">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"fti">>) -> {<<"application">>, <<"vnd.anser-web-funds-transfer-initiation">>, []}; +all_ext(<<"fvt">>) -> {<<"video">>, <<"vnd.fvt">>, []}; +all_ext(<<"fxp">>) -> {<<"application">>, <<"vnd.adobe.fxp">>, []}; +all_ext(<<"fxpl">>) -> {<<"application">>, <<"vnd.adobe.fxp">>, []}; +all_ext(<<"fzs">>) -> {<<"application">>, <<"vnd.fuzzysheet">>, []}; +all_ext(<<"g2w">>) -> {<<"application">>, <<"vnd.geoplan">>, []}; +all_ext(<<"g3">>) -> {<<"image">>, <<"g3fax">>, []}; +all_ext(<<"g3w">>) -> {<<"application">>, <<"vnd.geospace">>, []}; +all_ext(<<"gac">>) -> {<<"application">>, <<"vnd.groove-account">>, []}; +all_ext(<<"gam">>) -> {<<"application">>, <<"x-tads">>, []}; +all_ext(<<"gbr">>) -> {<<"application">>, <<"rpki-ghostbusters">>, []}; +all_ext(<<"gca">>) -> {<<"application">>, <<"x-gca-compressed">>, []}; +all_ext(<<"gdl">>) -> {<<"model">>, <<"vnd.gdl">>, []}; +all_ext(<<"geo">>) -> {<<"application">>, <<"vnd.dynageo">>, []}; +all_ext(<<"gex">>) -> {<<"application">>, <<"vnd.geometry-explorer">>, []}; +all_ext(<<"ggb">>) -> {<<"application">>, <<"vnd.geogebra.file">>, []}; +all_ext(<<"ggt">>) -> {<<"application">>, <<"vnd.geogebra.tool">>, []}; +all_ext(<<"ghf">>) -> {<<"application">>, <<"vnd.groove-help">>, []}; +all_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []}; +all_ext(<<"gim">>) -> {<<"application">>, <<"vnd.groove-identity-message">>, []}; +all_ext(<<"gml">>) -> {<<"application">>, <<"gml+xml">>, []}; +all_ext(<<"gmx">>) -> {<<"application">>, <<"vnd.gmx">>, []}; +all_ext(<<"gnumeric">>) -> {<<"application">>, <<"x-gnumeric">>, []}; +all_ext(<<"gph">>) -> {<<"application">>, <<"vnd.flographit">>, []}; +all_ext(<<"gpx">>) -> {<<"application">>, <<"gpx+xml">>, []}; +all_ext(<<"gqf">>) -> {<<"application">>, <<"vnd.grafeq">>, []}; +all_ext(<<"gqs">>) -> {<<"application">>, <<"vnd.grafeq">>, []}; +all_ext(<<"gram">>) -> {<<"application">>, <<"srgs">>, []}; +all_ext(<<"gramps">>) -> {<<"application">>, <<"x-gramps-xml">>, []}; +all_ext(<<"gre">>) -> {<<"application">>, <<"vnd.geometry-explorer">>, []}; +all_ext(<<"grv">>) -> {<<"application">>, <<"vnd.groove-injector">>, []}; +all_ext(<<"grxml">>) -> {<<"application">>, <<"srgs+xml">>, []}; +all_ext(<<"gsf">>) -> {<<"application">>, <<"x-font-ghostscript">>, []}; +all_ext(<<"gtar">>) -> {<<"application">>, <<"x-gtar">>, []}; +all_ext(<<"gtm">>) -> {<<"application">>, <<"vnd.groove-tool-message">>, []}; +all_ext(<<"gtw">>) -> {<<"model">>, <<"vnd.gtw">>, []}; +all_ext(<<"gv">>) -> {<<"text">>, <<"vnd.graphviz">>, []}; +all_ext(<<"gxf">>) -> {<<"application">>, <<"gxf">>, []}; +all_ext(<<"gxt">>) -> {<<"application">>, <<"vnd.geonext">>, []}; +all_ext(<<"h261">>) -> {<<"video">>, <<"h261">>, []}; +all_ext(<<"h263">>) -> {<<"video">>, <<"h263">>, []}; +all_ext(<<"h264">>) -> {<<"video">>, <<"h264">>, []}; +all_ext(<<"hal">>) -> {<<"application">>, <<"vnd.hal+xml">>, []}; +all_ext(<<"hbci">>) -> {<<"application">>, <<"vnd.hbci">>, []}; +all_ext(<<"hdf">>) -> {<<"application">>, <<"x-hdf">>, []}; +all_ext(<<"hh">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"hlp">>) -> {<<"application">>, <<"winhlp">>, []}; +all_ext(<<"hpgl">>) -> {<<"application">>, <<"vnd.hp-hpgl">>, []}; +all_ext(<<"hpid">>) -> {<<"application">>, <<"vnd.hp-hpid">>, []}; +all_ext(<<"hps">>) -> {<<"application">>, <<"vnd.hp-hps">>, []}; +all_ext(<<"hqx">>) -> {<<"application">>, <<"mac-binhex40">>, []}; +all_ext(<<"h">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"htke">>) -> {<<"application">>, <<"vnd.kenameaapp">>, []}; +all_ext(<<"html">>) -> {<<"text">>, <<"html">>, []}; +all_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []}; +all_ext(<<"hvd">>) -> {<<"application">>, <<"vnd.yamaha.hv-dic">>, []}; +all_ext(<<"hvp">>) -> {<<"application">>, <<"vnd.yamaha.hv-voice">>, []}; +all_ext(<<"hvs">>) -> {<<"application">>, <<"vnd.yamaha.hv-script">>, []}; +all_ext(<<"i2g">>) -> {<<"application">>, <<"vnd.intergeo">>, []}; +all_ext(<<"icc">>) -> {<<"application">>, <<"vnd.iccprofile">>, []}; +all_ext(<<"ice">>) -> {<<"x-conference">>, <<"x-cooltalk">>, []}; +all_ext(<<"icm">>) -> {<<"application">>, <<"vnd.iccprofile">>, []}; +all_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []}; +all_ext(<<"ics">>) -> {<<"text">>, <<"calendar">>, []}; +all_ext(<<"ief">>) -> {<<"image">>, <<"ief">>, []}; +all_ext(<<"ifb">>) -> {<<"text">>, <<"calendar">>, []}; +all_ext(<<"ifm">>) -> {<<"application">>, <<"vnd.shana.informed.formdata">>, []}; +all_ext(<<"iges">>) -> {<<"model">>, <<"iges">>, []}; +all_ext(<<"igl">>) -> {<<"application">>, <<"vnd.igloader">>, []}; +all_ext(<<"igm">>) -> {<<"application">>, <<"vnd.insors.igm">>, []}; +all_ext(<<"igs">>) -> {<<"model">>, <<"iges">>, []}; +all_ext(<<"igx">>) -> {<<"application">>, <<"vnd.micrografx.igx">>, []}; +all_ext(<<"iif">>) -> {<<"application">>, <<"vnd.shana.informed.interchange">>, []}; +all_ext(<<"imp">>) -> {<<"application">>, <<"vnd.accpac.simply.imp">>, []}; +all_ext(<<"ims">>) -> {<<"application">>, <<"vnd.ms-ims">>, []}; +all_ext(<<"ink">>) -> {<<"application">>, <<"inkml+xml">>, []}; +all_ext(<<"inkml">>) -> {<<"application">>, <<"inkml+xml">>, []}; +all_ext(<<"install">>) -> {<<"application">>, <<"x-install-instructions">>, []}; +all_ext(<<"in">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"iota">>) -> {<<"application">>, <<"vnd.astraea-software.iota">>, []}; +all_ext(<<"ipfix">>) -> {<<"application">>, <<"ipfix">>, []}; +all_ext(<<"ipk">>) -> {<<"application">>, <<"vnd.shana.informed.package">>, []}; +all_ext(<<"irm">>) -> {<<"application">>, <<"vnd.ibm.rights-management">>, []}; +all_ext(<<"irp">>) -> {<<"application">>, <<"vnd.irepository.package+xml">>, []}; +all_ext(<<"iso">>) -> {<<"application">>, <<"x-iso9660-image">>, []}; +all_ext(<<"itp">>) -> {<<"application">>, <<"vnd.shana.informed.formtemplate">>, []}; +all_ext(<<"ivp">>) -> {<<"application">>, <<"vnd.immervision-ivp">>, []}; +all_ext(<<"ivu">>) -> {<<"application">>, <<"vnd.immervision-ivu">>, []}; +all_ext(<<"jad">>) -> {<<"text">>, <<"vnd.sun.j2me.app-descriptor">>, []}; +all_ext(<<"jam">>) -> {<<"application">>, <<"vnd.jam">>, []}; +all_ext(<<"jar">>) -> {<<"application">>, <<"java-archive">>, []}; +all_ext(<<"java">>) -> {<<"text">>, <<"x-java-source">>, []}; +all_ext(<<"jisp">>) -> {<<"application">>, <<"vnd.jisp">>, []}; +all_ext(<<"jlt">>) -> {<<"application">>, <<"vnd.hp-jlyt">>, []}; +all_ext(<<"jnlp">>) -> {<<"application">>, <<"x-java-jnlp-file">>, []}; +all_ext(<<"joda">>) -> {<<"application">>, <<"vnd.joost.joda-archive">>, []}; +all_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []}; +all_ext(<<"jpe">>) -> {<<"image">>, <<"jpeg">>, []}; +all_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []}; +all_ext(<<"jpgm">>) -> {<<"video">>, <<"jpm">>, []}; +all_ext(<<"jpgv">>) -> {<<"video">>, <<"jpeg">>, []}; +all_ext(<<"jpm">>) -> {<<"video">>, <<"jpm">>, []}; +all_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []}; +all_ext(<<"json">>) -> {<<"application">>, <<"json">>, []}; +all_ext(<<"jsonml">>) -> {<<"application">>, <<"jsonml+json">>, []}; +all_ext(<<"kar">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"karbon">>) -> {<<"application">>, <<"vnd.kde.karbon">>, []}; +all_ext(<<"kfo">>) -> {<<"application">>, <<"vnd.kde.kformula">>, []}; +all_ext(<<"kia">>) -> {<<"application">>, <<"vnd.kidspiration">>, []}; +all_ext(<<"kml">>) -> {<<"application">>, <<"vnd.google-earth.kml+xml">>, []}; +all_ext(<<"kmz">>) -> {<<"application">>, <<"vnd.google-earth.kmz">>, []}; +all_ext(<<"kne">>) -> {<<"application">>, <<"vnd.kinar">>, []}; +all_ext(<<"knp">>) -> {<<"application">>, <<"vnd.kinar">>, []}; +all_ext(<<"kon">>) -> {<<"application">>, <<"vnd.kde.kontour">>, []}; +all_ext(<<"kpr">>) -> {<<"application">>, <<"vnd.kde.kpresenter">>, []}; +all_ext(<<"kpt">>) -> {<<"application">>, <<"vnd.kde.kpresenter">>, []}; +all_ext(<<"kpxx">>) -> {<<"application">>, <<"vnd.ds-keypoint">>, []}; +all_ext(<<"ksp">>) -> {<<"application">>, <<"vnd.kde.kspread">>, []}; +all_ext(<<"ktr">>) -> {<<"application">>, <<"vnd.kahootz">>, []}; +all_ext(<<"ktx">>) -> {<<"image">>, <<"ktx">>, []}; +all_ext(<<"ktz">>) -> {<<"application">>, <<"vnd.kahootz">>, []}; +all_ext(<<"kwd">>) -> {<<"application">>, <<"vnd.kde.kword">>, []}; +all_ext(<<"kwt">>) -> {<<"application">>, <<"vnd.kde.kword">>, []}; +all_ext(<<"lasxml">>) -> {<<"application">>, <<"vnd.las.las+xml">>, []}; +all_ext(<<"latex">>) -> {<<"application">>, <<"x-latex">>, []}; +all_ext(<<"lbd">>) -> {<<"application">>, <<"vnd.llamagraphics.life-balance.desktop">>, []}; +all_ext(<<"lbe">>) -> {<<"application">>, <<"vnd.llamagraphics.life-balance.exchange+xml">>, []}; +all_ext(<<"les">>) -> {<<"application">>, <<"vnd.hhe.lesson-player">>, []}; +all_ext(<<"lha">>) -> {<<"application">>, <<"x-lzh-compressed">>, []}; +all_ext(<<"link66">>) -> {<<"application">>, <<"vnd.route66.link66+xml">>, []}; +all_ext(<<"list3820">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []}; +all_ext(<<"listafp">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []}; +all_ext(<<"list">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"lnk">>) -> {<<"application">>, <<"x-ms-shortcut">>, []}; +all_ext(<<"log">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"lostxml">>) -> {<<"application">>, <<"lost+xml">>, []}; +all_ext(<<"lrf">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"lrm">>) -> {<<"application">>, <<"vnd.ms-lrm">>, []}; +all_ext(<<"ltf">>) -> {<<"application">>, <<"vnd.frogans.ltf">>, []}; +all_ext(<<"lvp">>) -> {<<"audio">>, <<"vnd.lucent.voice">>, []}; +all_ext(<<"lwp">>) -> {<<"application">>, <<"vnd.lotus-wordpro">>, []}; +all_ext(<<"lzh">>) -> {<<"application">>, <<"x-lzh-compressed">>, []}; +all_ext(<<"m13">>) -> {<<"application">>, <<"x-msmediaview">>, []}; +all_ext(<<"m14">>) -> {<<"application">>, <<"x-msmediaview">>, []}; +all_ext(<<"m1v">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"m21">>) -> {<<"application">>, <<"mp21">>, []}; +all_ext(<<"m2a">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"m2v">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"m3a">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"m3u8">>) -> {<<"application">>, <<"vnd.apple.mpegurl">>, []}; +all_ext(<<"m3u">>) -> {<<"audio">>, <<"x-mpegurl">>, []}; +all_ext(<<"m4a">>) -> {<<"audio">>, <<"mp4">>, []}; +all_ext(<<"m4u">>) -> {<<"video">>, <<"vnd.mpegurl">>, []}; +all_ext(<<"m4v">>) -> {<<"video">>, <<"x-m4v">>, []}; +all_ext(<<"ma">>) -> {<<"application">>, <<"mathematica">>, []}; +all_ext(<<"mads">>) -> {<<"application">>, <<"mads+xml">>, []}; +all_ext(<<"mag">>) -> {<<"application">>, <<"vnd.ecowin.chart">>, []}; +all_ext(<<"maker">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"man">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"mar">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"mathml">>) -> {<<"application">>, <<"mathml+xml">>, []}; +all_ext(<<"mb">>) -> {<<"application">>, <<"mathematica">>, []}; +all_ext(<<"mbk">>) -> {<<"application">>, <<"vnd.mobius.mbk">>, []}; +all_ext(<<"mbox">>) -> {<<"application">>, <<"mbox">>, []}; +all_ext(<<"mc1">>) -> {<<"application">>, <<"vnd.medcalcdata">>, []}; +all_ext(<<"mcd">>) -> {<<"application">>, <<"vnd.mcd">>, []}; +all_ext(<<"mcurl">>) -> {<<"text">>, <<"vnd.curl.mcurl">>, []}; +all_ext(<<"mdb">>) -> {<<"application">>, <<"x-msaccess">>, []}; +all_ext(<<"mdi">>) -> {<<"image">>, <<"vnd.ms-modi">>, []}; +all_ext(<<"mesh">>) -> {<<"model">>, <<"mesh">>, []}; +all_ext(<<"meta4">>) -> {<<"application">>, <<"metalink4+xml">>, []}; +all_ext(<<"metalink">>) -> {<<"application">>, <<"metalink+xml">>, []}; +all_ext(<<"me">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"mets">>) -> {<<"application">>, <<"mets+xml">>, []}; +all_ext(<<"mfm">>) -> {<<"application">>, <<"vnd.mfmp">>, []}; +all_ext(<<"mft">>) -> {<<"application">>, <<"rpki-manifest">>, []}; +all_ext(<<"mgp">>) -> {<<"application">>, <<"vnd.osgeo.mapguide.package">>, []}; +all_ext(<<"mgz">>) -> {<<"application">>, <<"vnd.proteus.magazine">>, []}; +all_ext(<<"mid">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"midi">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"mie">>) -> {<<"application">>, <<"x-mie">>, []}; +all_ext(<<"mif">>) -> {<<"application">>, <<"vnd.mif">>, []}; +all_ext(<<"mime">>) -> {<<"message">>, <<"rfc822">>, []}; +all_ext(<<"mj2">>) -> {<<"video">>, <<"mj2">>, []}; +all_ext(<<"mjp2">>) -> {<<"video">>, <<"mj2">>, []}; +all_ext(<<"mk3d">>) -> {<<"video">>, <<"x-matroska">>, []}; +all_ext(<<"mka">>) -> {<<"audio">>, <<"x-matroska">>, []}; +all_ext(<<"mks">>) -> {<<"video">>, <<"x-matroska">>, []}; +all_ext(<<"mkv">>) -> {<<"video">>, <<"x-matroska">>, []}; +all_ext(<<"mlp">>) -> {<<"application">>, <<"vnd.dolby.mlp">>, []}; +all_ext(<<"mmd">>) -> {<<"application">>, <<"vnd.chipnuts.karaoke-mmd">>, []}; +all_ext(<<"mmf">>) -> {<<"application">>, <<"vnd.smaf">>, []}; +all_ext(<<"mmr">>) -> {<<"image">>, <<"vnd.fujixerox.edmics-mmr">>, []}; +all_ext(<<"mng">>) -> {<<"video">>, <<"x-mng">>, []}; +all_ext(<<"mny">>) -> {<<"application">>, <<"x-msmoney">>, []}; +all_ext(<<"mobi">>) -> {<<"application">>, <<"x-mobipocket-ebook">>, []}; +all_ext(<<"mods">>) -> {<<"application">>, <<"mods+xml">>, []}; +all_ext(<<"movie">>) -> {<<"video">>, <<"x-sgi-movie">>, []}; +all_ext(<<"mov">>) -> {<<"video">>, <<"quicktime">>, []}; +all_ext(<<"mp21">>) -> {<<"application">>, <<"mp21">>, []}; +all_ext(<<"mp2a">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mp2">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mp4a">>) -> {<<"audio">>, <<"mp4">>, []}; +all_ext(<<"mp4s">>) -> {<<"application">>, <<"mp4">>, []}; +all_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []}; +all_ext(<<"mp4v">>) -> {<<"video">>, <<"mp4">>, []}; +all_ext(<<"mpc">>) -> {<<"application">>, <<"vnd.mophun.certificate">>, []}; +all_ext(<<"mpeg">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"mpe">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"mpg4">>) -> {<<"video">>, <<"mp4">>, []}; +all_ext(<<"mpga">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mpg">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"mpkg">>) -> {<<"application">>, <<"vnd.apple.installer+xml">>, []}; +all_ext(<<"mpm">>) -> {<<"application">>, <<"vnd.blueice.multipass">>, []}; +all_ext(<<"mpn">>) -> {<<"application">>, <<"vnd.mophun.application">>, []}; +all_ext(<<"mpp">>) -> {<<"application">>, <<"vnd.ms-project">>, []}; +all_ext(<<"mpt">>) -> {<<"application">>, <<"vnd.ms-project">>, []}; +all_ext(<<"mpy">>) -> {<<"application">>, <<"vnd.ibm.minipay">>, []}; +all_ext(<<"mqy">>) -> {<<"application">>, <<"vnd.mobius.mqy">>, []}; +all_ext(<<"mrc">>) -> {<<"application">>, <<"marc">>, []}; +all_ext(<<"mrcx">>) -> {<<"application">>, <<"marcxml+xml">>, []}; +all_ext(<<"mscml">>) -> {<<"application">>, <<"mediaservercontrol+xml">>, []}; +all_ext(<<"mseed">>) -> {<<"application">>, <<"vnd.fdsn.mseed">>, []}; +all_ext(<<"mseq">>) -> {<<"application">>, <<"vnd.mseq">>, []}; +all_ext(<<"msf">>) -> {<<"application">>, <<"vnd.epson.msf">>, []}; +all_ext(<<"msh">>) -> {<<"model">>, <<"mesh">>, []}; +all_ext(<<"msi">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"msl">>) -> {<<"application">>, <<"vnd.mobius.msl">>, []}; +all_ext(<<"ms">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"msty">>) -> {<<"application">>, <<"vnd.muvee.style">>, []}; +all_ext(<<"mts">>) -> {<<"model">>, <<"vnd.mts">>, []}; +all_ext(<<"mus">>) -> {<<"application">>, <<"vnd.musician">>, []}; +all_ext(<<"musicxml">>) -> {<<"application">>, <<"vnd.recordare.musicxml+xml">>, []}; +all_ext(<<"mvb">>) -> {<<"application">>, <<"x-msmediaview">>, []}; +all_ext(<<"mwf">>) -> {<<"application">>, <<"vnd.mfer">>, []}; +all_ext(<<"mxf">>) -> {<<"application">>, <<"mxf">>, []}; +all_ext(<<"mxl">>) -> {<<"application">>, <<"vnd.recordare.musicxml">>, []}; +all_ext(<<"mxml">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"mxs">>) -> {<<"application">>, <<"vnd.triscape.mxs">>, []}; +all_ext(<<"mxu">>) -> {<<"video">>, <<"vnd.mpegurl">>, []}; +all_ext(<<"n3">>) -> {<<"text">>, <<"n3">>, []}; +all_ext(<<"nb">>) -> {<<"application">>, <<"mathematica">>, []}; +all_ext(<<"nbp">>) -> {<<"application">>, <<"vnd.wolfram.player">>, []}; +all_ext(<<"nc">>) -> {<<"application">>, <<"x-netcdf">>, []}; +all_ext(<<"ncx">>) -> {<<"application">>, <<"x-dtbncx+xml">>, []}; +all_ext(<<"nfo">>) -> {<<"text">>, <<"x-nfo">>, []}; +all_ext(<<"n-gage">>) -> {<<"application">>, <<"vnd.nokia.n-gage.symbian.install">>, []}; +all_ext(<<"ngdat">>) -> {<<"application">>, <<"vnd.nokia.n-gage.data">>, []}; +all_ext(<<"nitf">>) -> {<<"application">>, <<"vnd.nitf">>, []}; +all_ext(<<"nlu">>) -> {<<"application">>, <<"vnd.neurolanguage.nlu">>, []}; +all_ext(<<"nml">>) -> {<<"application">>, <<"vnd.enliven">>, []}; +all_ext(<<"nnd">>) -> {<<"application">>, <<"vnd.noblenet-directory">>, []}; +all_ext(<<"nns">>) -> {<<"application">>, <<"vnd.noblenet-sealer">>, []}; +all_ext(<<"nnw">>) -> {<<"application">>, <<"vnd.noblenet-web">>, []}; +all_ext(<<"npx">>) -> {<<"image">>, <<"vnd.net-fpx">>, []}; +all_ext(<<"nsc">>) -> {<<"application">>, <<"x-conference">>, []}; +all_ext(<<"nsf">>) -> {<<"application">>, <<"vnd.lotus-notes">>, []}; +all_ext(<<"ntf">>) -> {<<"application">>, <<"vnd.nitf">>, []}; +all_ext(<<"nzb">>) -> {<<"application">>, <<"x-nzb">>, []}; +all_ext(<<"oa2">>) -> {<<"application">>, <<"vnd.fujitsu.oasys2">>, []}; +all_ext(<<"oa3">>) -> {<<"application">>, <<"vnd.fujitsu.oasys3">>, []}; +all_ext(<<"oas">>) -> {<<"application">>, <<"vnd.fujitsu.oasys">>, []}; +all_ext(<<"obd">>) -> {<<"application">>, <<"x-msbinder">>, []}; +all_ext(<<"obj">>) -> {<<"application">>, <<"x-tgif">>, []}; +all_ext(<<"oda">>) -> {<<"application">>, <<"oda">>, []}; +all_ext(<<"odb">>) -> {<<"application">>, <<"vnd.oasis.opendocument.database">>, []}; +all_ext(<<"odc">>) -> {<<"application">>, <<"vnd.oasis.opendocument.chart">>, []}; +all_ext(<<"odf">>) -> {<<"application">>, <<"vnd.oasis.opendocument.formula">>, []}; +all_ext(<<"odft">>) -> {<<"application">>, <<"vnd.oasis.opendocument.formula-template">>, []}; +all_ext(<<"odg">>) -> {<<"application">>, <<"vnd.oasis.opendocument.graphics">>, []}; +all_ext(<<"odi">>) -> {<<"application">>, <<"vnd.oasis.opendocument.image">>, []}; +all_ext(<<"odm">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-master">>, []}; +all_ext(<<"odp">>) -> {<<"application">>, <<"vnd.oasis.opendocument.presentation">>, []}; +all_ext(<<"ods">>) -> {<<"application">>, <<"vnd.oasis.opendocument.spreadsheet">>, []}; +all_ext(<<"odt">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text">>, []}; +all_ext(<<"oga">>) -> {<<"audio">>, <<"ogg">>, []}; +all_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []}; +all_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []}; +all_ext(<<"ogx">>) -> {<<"application">>, <<"ogg">>, []}; +all_ext(<<"omdoc">>) -> {<<"application">>, <<"omdoc+xml">>, []}; +all_ext(<<"onepkg">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"onetmp">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"onetoc2">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"onetoc">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"opf">>) -> {<<"application">>, <<"oebps-package+xml">>, []}; +all_ext(<<"opml">>) -> {<<"text">>, <<"x-opml">>, []}; +all_ext(<<"oprc">>) -> {<<"application">>, <<"vnd.palm">>, []}; +all_ext(<<"org">>) -> {<<"application">>, <<"vnd.lotus-organizer">>, []}; +all_ext(<<"osf">>) -> {<<"application">>, <<"vnd.yamaha.openscoreformat">>, []}; +all_ext(<<"osfpvg">>) -> {<<"application">>, <<"vnd.yamaha.openscoreformat.osfpvg+xml">>, []}; +all_ext(<<"otc">>) -> {<<"application">>, <<"vnd.oasis.opendocument.chart-template">>, []}; +all_ext(<<"otf">>) -> {<<"font">>, <<"otf">>, []}; +all_ext(<<"otg">>) -> {<<"application">>, <<"vnd.oasis.opendocument.graphics-template">>, []}; +all_ext(<<"oth">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-web">>, []}; +all_ext(<<"oti">>) -> {<<"application">>, <<"vnd.oasis.opendocument.image-template">>, []}; +all_ext(<<"otp">>) -> {<<"application">>, <<"vnd.oasis.opendocument.presentation-template">>, []}; +all_ext(<<"ots">>) -> {<<"application">>, <<"vnd.oasis.opendocument.spreadsheet-template">>, []}; +all_ext(<<"ott">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-template">>, []}; +all_ext(<<"oxps">>) -> {<<"application">>, <<"oxps">>, []}; +all_ext(<<"oxt">>) -> {<<"application">>, <<"vnd.openofficeorg.extension">>, []}; +all_ext(<<"p10">>) -> {<<"application">>, <<"pkcs10">>, []}; +all_ext(<<"p12">>) -> {<<"application">>, <<"x-pkcs12">>, []}; +all_ext(<<"p7b">>) -> {<<"application">>, <<"x-pkcs7-certificates">>, []}; +all_ext(<<"p7c">>) -> {<<"application">>, <<"pkcs7-mime">>, []}; +all_ext(<<"p7m">>) -> {<<"application">>, <<"pkcs7-mime">>, []}; +all_ext(<<"p7r">>) -> {<<"application">>, <<"x-pkcs7-certreqresp">>, []}; +all_ext(<<"p7s">>) -> {<<"application">>, <<"pkcs7-signature">>, []}; +all_ext(<<"p8">>) -> {<<"application">>, <<"pkcs8">>, []}; +all_ext(<<"pas">>) -> {<<"text">>, <<"x-pascal">>, []}; +all_ext(<<"paw">>) -> {<<"application">>, <<"vnd.pawaafile">>, []}; +all_ext(<<"pbd">>) -> {<<"application">>, <<"vnd.powerbuilder6">>, []}; +all_ext(<<"pbm">>) -> {<<"image">>, <<"x-portable-bitmap">>, []}; +all_ext(<<"pcap">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []}; +all_ext(<<"pcf">>) -> {<<"application">>, <<"x-font-pcf">>, []}; +all_ext(<<"pcl">>) -> {<<"application">>, <<"vnd.hp-pcl">>, []}; +all_ext(<<"pclxl">>) -> {<<"application">>, <<"vnd.hp-pclxl">>, []}; +all_ext(<<"pct">>) -> {<<"image">>, <<"x-pict">>, []}; +all_ext(<<"pcurl">>) -> {<<"application">>, <<"vnd.curl.pcurl">>, []}; +all_ext(<<"pcx">>) -> {<<"image">>, <<"x-pcx">>, []}; +all_ext(<<"pdb">>) -> {<<"application">>, <<"vnd.palm">>, []}; +all_ext(<<"pdf">>) -> {<<"application">>, <<"pdf">>, []}; +all_ext(<<"pfa">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"pfb">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"pfm">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"pfr">>) -> {<<"application">>, <<"font-tdpfr">>, []}; +all_ext(<<"pfx">>) -> {<<"application">>, <<"x-pkcs12">>, []}; +all_ext(<<"pgm">>) -> {<<"image">>, <<"x-portable-graymap">>, []}; +all_ext(<<"pgn">>) -> {<<"application">>, <<"x-chess-pgn">>, []}; +all_ext(<<"pgp">>) -> {<<"application">>, <<"pgp-encrypted">>, []}; +all_ext(<<"pic">>) -> {<<"image">>, <<"x-pict">>, []}; +all_ext(<<"pkg">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"pki">>) -> {<<"application">>, <<"pkixcmp">>, []}; +all_ext(<<"pkipath">>) -> {<<"application">>, <<"pkix-pkipath">>, []}; +all_ext(<<"plb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-large">>, []}; +all_ext(<<"plc">>) -> {<<"application">>, <<"vnd.mobius.plc">>, []}; +all_ext(<<"plf">>) -> {<<"application">>, <<"vnd.pocketlearn">>, []}; +all_ext(<<"pls">>) -> {<<"application">>, <<"pls+xml">>, []}; +all_ext(<<"pml">>) -> {<<"application">>, <<"vnd.ctc-posml">>, []}; +all_ext(<<"png">>) -> {<<"image">>, <<"png">>, []}; +all_ext(<<"pnm">>) -> {<<"image">>, <<"x-portable-anymap">>, []}; +all_ext(<<"portpkg">>) -> {<<"application">>, <<"vnd.macports.portpkg">>, []}; +all_ext(<<"pot">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []}; +all_ext(<<"potm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.template.macroenabled.12">>, []}; +all_ext(<<"potx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.template">>, []}; +all_ext(<<"ppam">>) -> {<<"application">>, <<"vnd.ms-powerpoint.addin.macroenabled.12">>, []}; +all_ext(<<"ppd">>) -> {<<"application">>, <<"vnd.cups-ppd">>, []}; +all_ext(<<"ppm">>) -> {<<"image">>, <<"x-portable-pixmap">>, []}; +all_ext(<<"pps">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []}; +all_ext(<<"ppsm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.slideshow.macroenabled.12">>, []}; +all_ext(<<"ppsx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.slideshow">>, []}; +all_ext(<<"ppt">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []}; +all_ext(<<"pptm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.presentation.macroenabled.12">>, []}; +all_ext(<<"pptx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.presentation">>, []}; +all_ext(<<"pqa">>) -> {<<"application">>, <<"vnd.palm">>, []}; +all_ext(<<"prc">>) -> {<<"application">>, <<"x-mobipocket-ebook">>, []}; +all_ext(<<"pre">>) -> {<<"application">>, <<"vnd.lotus-freelance">>, []}; +all_ext(<<"prf">>) -> {<<"application">>, <<"pics-rules">>, []}; +all_ext(<<"ps">>) -> {<<"application">>, <<"postscript">>, []}; +all_ext(<<"psb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-small">>, []}; +all_ext(<<"psd">>) -> {<<"image">>, <<"vnd.adobe.photoshop">>, []}; +all_ext(<<"psf">>) -> {<<"application">>, <<"x-font-linux-psf">>, []}; +all_ext(<<"pskcxml">>) -> {<<"application">>, <<"pskc+xml">>, []}; +all_ext(<<"p">>) -> {<<"text">>, <<"x-pascal">>, []}; +all_ext(<<"ptid">>) -> {<<"application">>, <<"vnd.pvi.ptid1">>, []}; +all_ext(<<"pub">>) -> {<<"application">>, <<"x-mspublisher">>, []}; +all_ext(<<"pvb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-var">>, []}; +all_ext(<<"pwn">>) -> {<<"application">>, <<"vnd.3m.post-it-notes">>, []}; +all_ext(<<"pya">>) -> {<<"audio">>, <<"vnd.ms-playready.media.pya">>, []}; +all_ext(<<"pyv">>) -> {<<"video">>, <<"vnd.ms-playready.media.pyv">>, []}; +all_ext(<<"qam">>) -> {<<"application">>, <<"vnd.epson.quickanime">>, []}; +all_ext(<<"qbo">>) -> {<<"application">>, <<"vnd.intu.qbo">>, []}; +all_ext(<<"qfx">>) -> {<<"application">>, <<"vnd.intu.qfx">>, []}; +all_ext(<<"qps">>) -> {<<"application">>, <<"vnd.publishare-delta-tree">>, []}; +all_ext(<<"qt">>) -> {<<"video">>, <<"quicktime">>, []}; +all_ext(<<"qwd">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qwt">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxb">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxd">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxl">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxt">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"ra">>) -> {<<"audio">>, <<"x-pn-realaudio">>, []}; +all_ext(<<"ram">>) -> {<<"audio">>, <<"x-pn-realaudio">>, []}; +all_ext(<<"rar">>) -> {<<"application">>, <<"x-rar-compressed">>, []}; +all_ext(<<"ras">>) -> {<<"image">>, <<"x-cmu-raster">>, []}; +all_ext(<<"rcprofile">>) -> {<<"application">>, <<"vnd.ipunplugged.rcprofile">>, []}; +all_ext(<<"rdf">>) -> {<<"application">>, <<"rdf+xml">>, []}; +all_ext(<<"rdz">>) -> {<<"application">>, <<"vnd.data-vision.rdz">>, []}; +all_ext(<<"rep">>) -> {<<"application">>, <<"vnd.businessobjects">>, []}; +all_ext(<<"res">>) -> {<<"application">>, <<"x-dtbresource+xml">>, []}; +all_ext(<<"rgb">>) -> {<<"image">>, <<"x-rgb">>, []}; +all_ext(<<"rif">>) -> {<<"application">>, <<"reginfo+xml">>, []}; +all_ext(<<"rip">>) -> {<<"audio">>, <<"vnd.rip">>, []}; +all_ext(<<"ris">>) -> {<<"application">>, <<"x-research-info-systems">>, []}; +all_ext(<<"rl">>) -> {<<"application">>, <<"resource-lists+xml">>, []}; +all_ext(<<"rlc">>) -> {<<"image">>, <<"vnd.fujixerox.edmics-rlc">>, []}; +all_ext(<<"rld">>) -> {<<"application">>, <<"resource-lists-diff+xml">>, []}; +all_ext(<<"rm">>) -> {<<"application">>, <<"vnd.rn-realmedia">>, []}; +all_ext(<<"rmi">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"rmp">>) -> {<<"audio">>, <<"x-pn-realaudio-plugin">>, []}; +all_ext(<<"rms">>) -> {<<"application">>, <<"vnd.jcp.javame.midlet-rms">>, []}; +all_ext(<<"rmvb">>) -> {<<"application">>, <<"vnd.rn-realmedia-vbr">>, []}; +all_ext(<<"rnc">>) -> {<<"application">>, <<"relax-ng-compact-syntax">>, []}; +all_ext(<<"roa">>) -> {<<"application">>, <<"rpki-roa">>, []}; +all_ext(<<"roff">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"rp9">>) -> {<<"application">>, <<"vnd.cloanto.rp9">>, []}; +all_ext(<<"rpss">>) -> {<<"application">>, <<"vnd.nokia.radio-presets">>, []}; +all_ext(<<"rpst">>) -> {<<"application">>, <<"vnd.nokia.radio-preset">>, []}; +all_ext(<<"rq">>) -> {<<"application">>, <<"sparql-query">>, []}; +all_ext(<<"rs">>) -> {<<"application">>, <<"rls-services+xml">>, []}; +all_ext(<<"rsd">>) -> {<<"application">>, <<"rsd+xml">>, []}; +all_ext(<<"rss">>) -> {<<"application">>, <<"rss+xml">>, []}; +all_ext(<<"rtf">>) -> {<<"application">>, <<"rtf">>, []}; +all_ext(<<"rtx">>) -> {<<"text">>, <<"richtext">>, []}; +all_ext(<<"s3m">>) -> {<<"audio">>, <<"s3m">>, []}; +all_ext(<<"saf">>) -> {<<"application">>, <<"vnd.yamaha.smaf-audio">>, []}; +all_ext(<<"sbml">>) -> {<<"application">>, <<"sbml+xml">>, []}; +all_ext(<<"sc">>) -> {<<"application">>, <<"vnd.ibm.secure-container">>, []}; +all_ext(<<"scd">>) -> {<<"application">>, <<"x-msschedule">>, []}; +all_ext(<<"scm">>) -> {<<"application">>, <<"vnd.lotus-screencam">>, []}; +all_ext(<<"scq">>) -> {<<"application">>, <<"scvp-cv-request">>, []}; +all_ext(<<"scs">>) -> {<<"application">>, <<"scvp-cv-response">>, []}; +all_ext(<<"scurl">>) -> {<<"text">>, <<"vnd.curl.scurl">>, []}; +all_ext(<<"sda">>) -> {<<"application">>, <<"vnd.stardivision.draw">>, []}; +all_ext(<<"sdc">>) -> {<<"application">>, <<"vnd.stardivision.calc">>, []}; +all_ext(<<"sdd">>) -> {<<"application">>, <<"vnd.stardivision.impress">>, []}; +all_ext(<<"sdkd">>) -> {<<"application">>, <<"vnd.solent.sdkm+xml">>, []}; +all_ext(<<"sdkm">>) -> {<<"application">>, <<"vnd.solent.sdkm+xml">>, []}; +all_ext(<<"sdp">>) -> {<<"application">>, <<"sdp">>, []}; +all_ext(<<"sdw">>) -> {<<"application">>, <<"vnd.stardivision.writer">>, []}; +all_ext(<<"see">>) -> {<<"application">>, <<"vnd.seemail">>, []}; +all_ext(<<"seed">>) -> {<<"application">>, <<"vnd.fdsn.seed">>, []}; +all_ext(<<"sema">>) -> {<<"application">>, <<"vnd.sema">>, []}; +all_ext(<<"semd">>) -> {<<"application">>, <<"vnd.semd">>, []}; +all_ext(<<"semf">>) -> {<<"application">>, <<"vnd.semf">>, []}; +all_ext(<<"ser">>) -> {<<"application">>, <<"java-serialized-object">>, []}; +all_ext(<<"setpay">>) -> {<<"application">>, <<"set-payment-initiation">>, []}; +all_ext(<<"setreg">>) -> {<<"application">>, <<"set-registration-initiation">>, []}; +all_ext(<<"sfd-hdstx">>) -> {<<"application">>, <<"vnd.hydrostatix.sof-data">>, []}; +all_ext(<<"sfs">>) -> {<<"application">>, <<"vnd.spotfire.sfs">>, []}; +all_ext(<<"sfv">>) -> {<<"text">>, <<"x-sfv">>, []}; +all_ext(<<"sgi">>) -> {<<"image">>, <<"sgi">>, []}; +all_ext(<<"sgl">>) -> {<<"application">>, <<"vnd.stardivision.writer-global">>, []}; +all_ext(<<"sgml">>) -> {<<"text">>, <<"sgml">>, []}; +all_ext(<<"sgm">>) -> {<<"text">>, <<"sgml">>, []}; +all_ext(<<"sh">>) -> {<<"application">>, <<"x-sh">>, []}; +all_ext(<<"shar">>) -> {<<"application">>, <<"x-shar">>, []}; +all_ext(<<"shf">>) -> {<<"application">>, <<"shf+xml">>, []}; +all_ext(<<"sid">>) -> {<<"image">>, <<"x-mrsid-image">>, []}; +all_ext(<<"sig">>) -> {<<"application">>, <<"pgp-signature">>, []}; +all_ext(<<"sil">>) -> {<<"audio">>, <<"silk">>, []}; +all_ext(<<"silo">>) -> {<<"model">>, <<"mesh">>, []}; +all_ext(<<"sis">>) -> {<<"application">>, <<"vnd.symbian.install">>, []}; +all_ext(<<"sisx">>) -> {<<"application">>, <<"vnd.symbian.install">>, []}; +all_ext(<<"sit">>) -> {<<"application">>, <<"x-stuffit">>, []}; +all_ext(<<"sitx">>) -> {<<"application">>, <<"x-stuffitx">>, []}; +all_ext(<<"skd">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"skm">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"skp">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"skt">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"sldm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.slide.macroenabled.12">>, []}; +all_ext(<<"sldx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.slide">>, []}; +all_ext(<<"slt">>) -> {<<"application">>, <<"vnd.epson.salt">>, []}; +all_ext(<<"sm">>) -> {<<"application">>, <<"vnd.stepmania.stepchart">>, []}; +all_ext(<<"smf">>) -> {<<"application">>, <<"vnd.stardivision.math">>, []}; +all_ext(<<"smi">>) -> {<<"application">>, <<"smil+xml">>, []}; +all_ext(<<"smil">>) -> {<<"application">>, <<"smil+xml">>, []}; +all_ext(<<"smv">>) -> {<<"video">>, <<"x-smv">>, []}; +all_ext(<<"smzip">>) -> {<<"application">>, <<"vnd.stepmania.package">>, []}; +all_ext(<<"snd">>) -> {<<"audio">>, <<"basic">>, []}; +all_ext(<<"snf">>) -> {<<"application">>, <<"x-font-snf">>, []}; +all_ext(<<"so">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"spc">>) -> {<<"application">>, <<"x-pkcs7-certificates">>, []}; +all_ext(<<"spf">>) -> {<<"application">>, <<"vnd.yamaha.smaf-phrase">>, []}; +all_ext(<<"spl">>) -> {<<"application">>, <<"x-futuresplash">>, []}; +all_ext(<<"spot">>) -> {<<"text">>, <<"vnd.in3d.spot">>, []}; +all_ext(<<"spp">>) -> {<<"application">>, <<"scvp-vp-response">>, []}; +all_ext(<<"spq">>) -> {<<"application">>, <<"scvp-vp-request">>, []}; +all_ext(<<"spx">>) -> {<<"audio">>, <<"ogg">>, []}; +all_ext(<<"sql">>) -> {<<"application">>, <<"x-sql">>, []}; +all_ext(<<"src">>) -> {<<"application">>, <<"x-wais-source">>, []}; +all_ext(<<"srt">>) -> {<<"application">>, <<"x-subrip">>, []}; +all_ext(<<"sru">>) -> {<<"application">>, <<"sru+xml">>, []}; +all_ext(<<"srx">>) -> {<<"application">>, <<"sparql-results+xml">>, []}; +all_ext(<<"ssdl">>) -> {<<"application">>, <<"ssdl+xml">>, []}; +all_ext(<<"sse">>) -> {<<"application">>, <<"vnd.kodak-descriptor">>, []}; +all_ext(<<"ssf">>) -> {<<"application">>, <<"vnd.epson.ssf">>, []}; +all_ext(<<"ssml">>) -> {<<"application">>, <<"ssml+xml">>, []}; +all_ext(<<"st">>) -> {<<"application">>, <<"vnd.sailingtracker.track">>, []}; +all_ext(<<"stc">>) -> {<<"application">>, <<"vnd.sun.xml.calc.template">>, []}; +all_ext(<<"std">>) -> {<<"application">>, <<"vnd.sun.xml.draw.template">>, []}; +all_ext(<<"s">>) -> {<<"text">>, <<"x-asm">>, []}; +all_ext(<<"stf">>) -> {<<"application">>, <<"vnd.wt.stf">>, []}; +all_ext(<<"sti">>) -> {<<"application">>, <<"vnd.sun.xml.impress.template">>, []}; +all_ext(<<"stk">>) -> {<<"application">>, <<"hyperstudio">>, []}; +all_ext(<<"stl">>) -> {<<"application">>, <<"vnd.ms-pki.stl">>, []}; +all_ext(<<"str">>) -> {<<"application">>, <<"vnd.pg.format">>, []}; +all_ext(<<"stw">>) -> {<<"application">>, <<"vnd.sun.xml.writer.template">>, []}; +all_ext(<<"sub">>) -> {<<"image">>, <<"vnd.dvb.subtitle">>, []}; +all_ext(<<"sus">>) -> {<<"application">>, <<"vnd.sus-calendar">>, []}; +all_ext(<<"susp">>) -> {<<"application">>, <<"vnd.sus-calendar">>, []}; +all_ext(<<"sv4cpio">>) -> {<<"application">>, <<"x-sv4cpio">>, []}; +all_ext(<<"sv4crc">>) -> {<<"application">>, <<"x-sv4crc">>, []}; +all_ext(<<"svc">>) -> {<<"application">>, <<"vnd.dvb.service">>, []}; +all_ext(<<"svd">>) -> {<<"application">>, <<"vnd.svd">>, []}; +all_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []}; +all_ext(<<"svgz">>) -> {<<"image">>, <<"svg+xml">>, []}; +all_ext(<<"swa">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"swf">>) -> {<<"application">>, <<"x-shockwave-flash">>, []}; +all_ext(<<"swi">>) -> {<<"application">>, <<"vnd.aristanetworks.swi">>, []}; +all_ext(<<"sxc">>) -> {<<"application">>, <<"vnd.sun.xml.calc">>, []}; +all_ext(<<"sxd">>) -> {<<"application">>, <<"vnd.sun.xml.draw">>, []}; +all_ext(<<"sxg">>) -> {<<"application">>, <<"vnd.sun.xml.writer.global">>, []}; +all_ext(<<"sxi">>) -> {<<"application">>, <<"vnd.sun.xml.impress">>, []}; +all_ext(<<"sxm">>) -> {<<"application">>, <<"vnd.sun.xml.math">>, []}; +all_ext(<<"sxw">>) -> {<<"application">>, <<"vnd.sun.xml.writer">>, []}; +all_ext(<<"t3">>) -> {<<"application">>, <<"x-t3vm-image">>, []}; +all_ext(<<"taglet">>) -> {<<"application">>, <<"vnd.mynfc">>, []}; +all_ext(<<"tao">>) -> {<<"application">>, <<"vnd.tao.intent-module-archive">>, []}; +all_ext(<<"tar">>) -> {<<"application">>, <<"x-tar">>, []}; +all_ext(<<"tcap">>) -> {<<"application">>, <<"vnd.3gpp2.tcap">>, []}; +all_ext(<<"tcl">>) -> {<<"application">>, <<"x-tcl">>, []}; +all_ext(<<"teacher">>) -> {<<"application">>, <<"vnd.smart.teacher">>, []}; +all_ext(<<"tei">>) -> {<<"application">>, <<"tei+xml">>, []}; +all_ext(<<"teicorpus">>) -> {<<"application">>, <<"tei+xml">>, []}; +all_ext(<<"tex">>) -> {<<"application">>, <<"x-tex">>, []}; +all_ext(<<"texi">>) -> {<<"application">>, <<"x-texinfo">>, []}; +all_ext(<<"texinfo">>) -> {<<"application">>, <<"x-texinfo">>, []}; +all_ext(<<"text">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"tfi">>) -> {<<"application">>, <<"thraud+xml">>, []}; +all_ext(<<"tfm">>) -> {<<"application">>, <<"x-tex-tfm">>, []}; +all_ext(<<"tga">>) -> {<<"image">>, <<"x-tga">>, []}; +all_ext(<<"thmx">>) -> {<<"application">>, <<"vnd.ms-officetheme">>, []}; +all_ext(<<"tiff">>) -> {<<"image">>, <<"tiff">>, []}; +all_ext(<<"tif">>) -> {<<"image">>, <<"tiff">>, []}; +all_ext(<<"tmo">>) -> {<<"application">>, <<"vnd.tmobile-livetv">>, []}; +all_ext(<<"torrent">>) -> {<<"application">>, <<"x-bittorrent">>, []}; +all_ext(<<"tpl">>) -> {<<"application">>, <<"vnd.groove-tool-template">>, []}; +all_ext(<<"tpt">>) -> {<<"application">>, <<"vnd.trid.tpt">>, []}; +all_ext(<<"tra">>) -> {<<"application">>, <<"vnd.trueapp">>, []}; +all_ext(<<"trm">>) -> {<<"application">>, <<"x-msterminal">>, []}; +all_ext(<<"tr">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"tsd">>) -> {<<"application">>, <<"timestamped-data">>, []}; +all_ext(<<"tsv">>) -> {<<"text">>, <<"tab-separated-values">>, []}; +all_ext(<<"ttc">>) -> {<<"font">>, <<"collection">>, []}; +all_ext(<<"t">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"ttf">>) -> {<<"font">>, <<"ttf">>, []}; +all_ext(<<"ttl">>) -> {<<"text">>, <<"turtle">>, []}; +all_ext(<<"twd">>) -> {<<"application">>, <<"vnd.simtech-mindmapper">>, []}; +all_ext(<<"twds">>) -> {<<"application">>, <<"vnd.simtech-mindmapper">>, []}; +all_ext(<<"txd">>) -> {<<"application">>, <<"vnd.genomatix.tuxedo">>, []}; +all_ext(<<"txf">>) -> {<<"application">>, <<"vnd.mobius.txf">>, []}; +all_ext(<<"txt">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"u32">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"udeb">>) -> {<<"application">>, <<"x-debian-package">>, []}; +all_ext(<<"ufd">>) -> {<<"application">>, <<"vnd.ufdl">>, []}; +all_ext(<<"ufdl">>) -> {<<"application">>, <<"vnd.ufdl">>, []}; +all_ext(<<"ulx">>) -> {<<"application">>, <<"x-glulx">>, []}; +all_ext(<<"umj">>) -> {<<"application">>, <<"vnd.umajin">>, []}; +all_ext(<<"unityweb">>) -> {<<"application">>, <<"vnd.unity">>, []}; +all_ext(<<"uoml">>) -> {<<"application">>, <<"vnd.uoml+xml">>, []}; +all_ext(<<"uris">>) -> {<<"text">>, <<"uri-list">>, []}; +all_ext(<<"uri">>) -> {<<"text">>, <<"uri-list">>, []}; +all_ext(<<"urls">>) -> {<<"text">>, <<"uri-list">>, []}; +all_ext(<<"ustar">>) -> {<<"application">>, <<"x-ustar">>, []}; +all_ext(<<"utz">>) -> {<<"application">>, <<"vnd.uiq.theme">>, []}; +all_ext(<<"uu">>) -> {<<"text">>, <<"x-uuencode">>, []}; +all_ext(<<"uva">>) -> {<<"audio">>, <<"vnd.dece.audio">>, []}; +all_ext(<<"uvd">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvf">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvg">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvh">>) -> {<<"video">>, <<"vnd.dece.hd">>, []}; +all_ext(<<"uvi">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvm">>) -> {<<"video">>, <<"vnd.dece.mobile">>, []}; +all_ext(<<"uvp">>) -> {<<"video">>, <<"vnd.dece.pd">>, []}; +all_ext(<<"uvs">>) -> {<<"video">>, <<"vnd.dece.sd">>, []}; +all_ext(<<"uvt">>) -> {<<"application">>, <<"vnd.dece.ttml+xml">>, []}; +all_ext(<<"uvu">>) -> {<<"video">>, <<"vnd.uvvu.mp4">>, []}; +all_ext(<<"uvva">>) -> {<<"audio">>, <<"vnd.dece.audio">>, []}; +all_ext(<<"uvvd">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvvf">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvvg">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvvh">>) -> {<<"video">>, <<"vnd.dece.hd">>, []}; +all_ext(<<"uvvi">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvvm">>) -> {<<"video">>, <<"vnd.dece.mobile">>, []}; +all_ext(<<"uvvp">>) -> {<<"video">>, <<"vnd.dece.pd">>, []}; +all_ext(<<"uvvs">>) -> {<<"video">>, <<"vnd.dece.sd">>, []}; +all_ext(<<"uvvt">>) -> {<<"application">>, <<"vnd.dece.ttml+xml">>, []}; +all_ext(<<"uvvu">>) -> {<<"video">>, <<"vnd.uvvu.mp4">>, []}; +all_ext(<<"uvv">>) -> {<<"video">>, <<"vnd.dece.video">>, []}; +all_ext(<<"uvvv">>) -> {<<"video">>, <<"vnd.dece.video">>, []}; +all_ext(<<"uvvx">>) -> {<<"application">>, <<"vnd.dece.unspecified">>, []}; +all_ext(<<"uvvz">>) -> {<<"application">>, <<"vnd.dece.zip">>, []}; +all_ext(<<"uvx">>) -> {<<"application">>, <<"vnd.dece.unspecified">>, []}; +all_ext(<<"uvz">>) -> {<<"application">>, <<"vnd.dece.zip">>, []}; +all_ext(<<"vcard">>) -> {<<"text">>, <<"vcard">>, []}; +all_ext(<<"vcd">>) -> {<<"application">>, <<"x-cdlink">>, []}; +all_ext(<<"vcf">>) -> {<<"text">>, <<"x-vcard">>, []}; +all_ext(<<"vcg">>) -> {<<"application">>, <<"vnd.groove-vcard">>, []}; +all_ext(<<"vcs">>) -> {<<"text">>, <<"x-vcalendar">>, []}; +all_ext(<<"vcx">>) -> {<<"application">>, <<"vnd.vcx">>, []}; +all_ext(<<"vis">>) -> {<<"application">>, <<"vnd.visionary">>, []}; +all_ext(<<"viv">>) -> {<<"video">>, <<"vnd.vivo">>, []}; +all_ext(<<"vob">>) -> {<<"video">>, <<"x-ms-vob">>, []}; +all_ext(<<"vor">>) -> {<<"application">>, <<"vnd.stardivision.writer">>, []}; +all_ext(<<"vox">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"vrml">>) -> {<<"model">>, <<"vrml">>, []}; +all_ext(<<"vsd">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vsf">>) -> {<<"application">>, <<"vnd.vsf">>, []}; +all_ext(<<"vss">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vst">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vsw">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vtu">>) -> {<<"model">>, <<"vnd.vtu">>, []}; +all_ext(<<"vxml">>) -> {<<"application">>, <<"voicexml+xml">>, []}; +all_ext(<<"w3d">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"wad">>) -> {<<"application">>, <<"x-doom">>, []}; +all_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []}; +all_ext(<<"wax">>) -> {<<"audio">>, <<"x-ms-wax">>, []}; +all_ext(<<"wbmp">>) -> {<<"image">>, <<"vnd.wap.wbmp">>, []}; +all_ext(<<"wbs">>) -> {<<"application">>, <<"vnd.criticaltools.wbs+xml">>, []}; +all_ext(<<"wbxml">>) -> {<<"application">>, <<"vnd.wap.wbxml">>, []}; +all_ext(<<"wcm">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wdb">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wdp">>) -> {<<"image">>, <<"vnd.ms-photo">>, []}; +all_ext(<<"weba">>) -> {<<"audio">>, <<"webm">>, []}; +all_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []}; +all_ext(<<"webp">>) -> {<<"image">>, <<"webp">>, []}; +all_ext(<<"wg">>) -> {<<"application">>, <<"vnd.pmi.widget">>, []}; +all_ext(<<"wgt">>) -> {<<"application">>, <<"widget">>, []}; +all_ext(<<"wks">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wma">>) -> {<<"audio">>, <<"x-ms-wma">>, []}; +all_ext(<<"wmd">>) -> {<<"application">>, <<"x-ms-wmd">>, []}; +all_ext(<<"wmf">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"wmlc">>) -> {<<"application">>, <<"vnd.wap.wmlc">>, []}; +all_ext(<<"wmlsc">>) -> {<<"application">>, <<"vnd.wap.wmlscriptc">>, []}; +all_ext(<<"wmls">>) -> {<<"text">>, <<"vnd.wap.wmlscript">>, []}; +all_ext(<<"wml">>) -> {<<"text">>, <<"vnd.wap.wml">>, []}; +all_ext(<<"wm">>) -> {<<"video">>, <<"x-ms-wm">>, []}; +all_ext(<<"wmv">>) -> {<<"video">>, <<"x-ms-wmv">>, []}; +all_ext(<<"wmx">>) -> {<<"video">>, <<"x-ms-wmx">>, []}; +all_ext(<<"wmz">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"woff2">>) -> {<<"font">>, <<"woff2">>, []}; +all_ext(<<"woff">>) -> {<<"font">>, <<"woff">>, []}; +all_ext(<<"wpd">>) -> {<<"application">>, <<"vnd.wordperfect">>, []}; +all_ext(<<"wpl">>) -> {<<"application">>, <<"vnd.ms-wpl">>, []}; +all_ext(<<"wps">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wqd">>) -> {<<"application">>, <<"vnd.wqd">>, []}; +all_ext(<<"wri">>) -> {<<"application">>, <<"x-mswrite">>, []}; +all_ext(<<"wrl">>) -> {<<"model">>, <<"vrml">>, []}; +all_ext(<<"wsdl">>) -> {<<"application">>, <<"wsdl+xml">>, []}; +all_ext(<<"wspolicy">>) -> {<<"application">>, <<"wspolicy+xml">>, []}; +all_ext(<<"wtb">>) -> {<<"application">>, <<"vnd.webturbo">>, []}; +all_ext(<<"wvx">>) -> {<<"video">>, <<"x-ms-wvx">>, []}; +all_ext(<<"x32">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"x3db">>) -> {<<"model">>, <<"x3d+binary">>, []}; +all_ext(<<"x3dbz">>) -> {<<"model">>, <<"x3d+binary">>, []}; +all_ext(<<"x3d">>) -> {<<"model">>, <<"x3d+xml">>, []}; +all_ext(<<"x3dv">>) -> {<<"model">>, <<"x3d+vrml">>, []}; +all_ext(<<"x3dvz">>) -> {<<"model">>, <<"x3d+vrml">>, []}; +all_ext(<<"x3dz">>) -> {<<"model">>, <<"x3d+xml">>, []}; +all_ext(<<"xaml">>) -> {<<"application">>, <<"xaml+xml">>, []}; +all_ext(<<"xap">>) -> {<<"application">>, <<"x-silverlight-app">>, []}; +all_ext(<<"xar">>) -> {<<"application">>, <<"vnd.xara">>, []}; +all_ext(<<"xbap">>) -> {<<"application">>, <<"x-ms-xbap">>, []}; +all_ext(<<"xbd">>) -> {<<"application">>, <<"vnd.fujixerox.docuworks.binder">>, []}; +all_ext(<<"xbm">>) -> {<<"image">>, <<"x-xbitmap">>, []}; +all_ext(<<"xdf">>) -> {<<"application">>, <<"xcap-diff+xml">>, []}; +all_ext(<<"xdm">>) -> {<<"application">>, <<"vnd.syncml.dm+xml">>, []}; +all_ext(<<"xdp">>) -> {<<"application">>, <<"vnd.adobe.xdp+xml">>, []}; +all_ext(<<"xdssc">>) -> {<<"application">>, <<"dssc+xml">>, []}; +all_ext(<<"xdw">>) -> {<<"application">>, <<"vnd.fujixerox.docuworks">>, []}; +all_ext(<<"xenc">>) -> {<<"application">>, <<"xenc+xml">>, []}; +all_ext(<<"xer">>) -> {<<"application">>, <<"patch-ops-error+xml">>, []}; +all_ext(<<"xfdf">>) -> {<<"application">>, <<"vnd.adobe.xfdf">>, []}; +all_ext(<<"xfdl">>) -> {<<"application">>, <<"vnd.xfdl">>, []}; +all_ext(<<"xht">>) -> {<<"application">>, <<"xhtml+xml">>, []}; +all_ext(<<"xhtml">>) -> {<<"application">>, <<"xhtml+xml">>, []}; +all_ext(<<"xhvml">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"xif">>) -> {<<"image">>, <<"vnd.xiff">>, []}; +all_ext(<<"xla">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xlam">>) -> {<<"application">>, <<"vnd.ms-excel.addin.macroenabled.12">>, []}; +all_ext(<<"xlc">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xlf">>) -> {<<"application">>, <<"x-xliff+xml">>, []}; +all_ext(<<"xlm">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xls">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xlsb">>) -> {<<"application">>, <<"vnd.ms-excel.sheet.binary.macroenabled.12">>, []}; +all_ext(<<"xlsm">>) -> {<<"application">>, <<"vnd.ms-excel.sheet.macroenabled.12">>, []}; +all_ext(<<"xlsx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.spreadsheetml.sheet">>, []}; +all_ext(<<"xlt">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xltm">>) -> {<<"application">>, <<"vnd.ms-excel.template.macroenabled.12">>, []}; +all_ext(<<"xltx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.spreadsheetml.template">>, []}; +all_ext(<<"xlw">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xm">>) -> {<<"audio">>, <<"xm">>, []}; +all_ext(<<"xml">>) -> {<<"application">>, <<"xml">>, []}; +all_ext(<<"xo">>) -> {<<"application">>, <<"vnd.olpc-sugar">>, []}; +all_ext(<<"xop">>) -> {<<"application">>, <<"xop+xml">>, []}; +all_ext(<<"xpi">>) -> {<<"application">>, <<"x-xpinstall">>, []}; +all_ext(<<"xpl">>) -> {<<"application">>, <<"xproc+xml">>, []}; +all_ext(<<"xpm">>) -> {<<"image">>, <<"x-xpixmap">>, []}; +all_ext(<<"xpr">>) -> {<<"application">>, <<"vnd.is-xpr">>, []}; +all_ext(<<"xps">>) -> {<<"application">>, <<"vnd.ms-xpsdocument">>, []}; +all_ext(<<"xpw">>) -> {<<"application">>, <<"vnd.intercon.formnet">>, []}; +all_ext(<<"xpx">>) -> {<<"application">>, <<"vnd.intercon.formnet">>, []}; +all_ext(<<"xsl">>) -> {<<"application">>, <<"xml">>, []}; +all_ext(<<"xslt">>) -> {<<"application">>, <<"xslt+xml">>, []}; +all_ext(<<"xsm">>) -> {<<"application">>, <<"vnd.syncml+xml">>, []}; +all_ext(<<"xspf">>) -> {<<"application">>, <<"xspf+xml">>, []}; +all_ext(<<"xul">>) -> {<<"application">>, <<"vnd.mozilla.xul+xml">>, []}; +all_ext(<<"xvm">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"xvml">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"xwd">>) -> {<<"image">>, <<"x-xwindowdump">>, []}; +all_ext(<<"xyz">>) -> {<<"chemical">>, <<"x-xyz">>, []}; +all_ext(<<"xz">>) -> {<<"application">>, <<"x-xz">>, []}; +all_ext(<<"yang">>) -> {<<"application">>, <<"yang">>, []}; +all_ext(<<"yin">>) -> {<<"application">>, <<"yin+xml">>, []}; +all_ext(<<"z1">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z2">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z3">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z4">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z5">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z6">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z7">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z8">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"zaz">>) -> {<<"application">>, <<"vnd.zzazz.deck+xml">>, []}; +all_ext(<<"zip">>) -> {<<"application">>, <<"zip">>, []}; +all_ext(<<"zir">>) -> {<<"application">>, <<"vnd.zul">>, []}; +all_ext(<<"zirz">>) -> {<<"application">>, <<"vnd.zul">>, []}; +all_ext(<<"zmm">>) -> {<<"application">>, <<"vnd.handheld-entertainment+xml">>, []}; +%% GENERATED +all_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. + +web_ext(<<"css">>) -> {<<"text">>, <<"css">>, []}; +web_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []}; +web_ext(<<"html">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []}; +web_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []}; +web_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []}; +web_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []}; +web_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []}; +web_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []}; +web_ext(<<"png">>) -> {<<"image">>, <<"png">>, []}; +web_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []}; +web_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []}; +web_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []}; +web_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. diff --git a/src/wsLib/cow_mimetypes.erl.src b/src/wsLib/cow_mimetypes.erl.src new file mode 100644 index 0000000..2c57834 --- /dev/null +++ b/src/wsLib/cow_mimetypes.erl.src @@ -0,0 +1,61 @@ +%% Copyright (c) 2013-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_mimetypes). + +-export([all/1]). +-export([web/1]). + +%% @doc Return the mimetype for any file by looking at its extension. + +-spec all(binary()) -> {binary(), binary(), []}. +all(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + %% @todo Convert to string:lowercase on OTP-20+. + << $., Ext/binary >> -> all_ext(list_to_binary(string:to_lower(binary_to_list(Ext)))) + end. + +%% @doc Return the mimetype for a Web related file by looking at its extension. + +-spec web(binary()) -> {binary(), binary(), []}. +web(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + %% @todo Convert to string:lowercase on OTP-20+. + << $., Ext/binary >> -> web_ext(list_to_binary(string:to_lower(binary_to_list(Ext)))) + end. + +%% Internal. + +%% GENERATED +all_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. + +web_ext(<<"css">>) -> {<<"text">>, <<"css">>, []}; +web_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []}; +web_ext(<<"html">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []}; +web_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []}; +web_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []}; +web_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []}; +web_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []}; +web_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []}; +web_ext(<<"png">>) -> {<<"image">>, <<"png">>, []}; +web_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []}; +web_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []}; +web_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []}; +web_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. diff --git a/src/wsLib/cow_multipart.erl b/src/wsLib/cow_multipart.erl new file mode 100644 index 0000000..f418813 --- /dev/null +++ b/src/wsLib/cow_multipart.erl @@ -0,0 +1,775 @@ +%% Copyright (c) 2014-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_multipart). + +%% Parsing. +-export([parse_headers/2]). +-export([parse_body/2]). + +%% Building. +-export([boundary/0]). +-export([first_part/2]). +-export([part/2]). +-export([close/1]). + +%% Headers. +-export([form_data/1]). +-export([parse_content_disposition/1]). +-export([parse_content_transfer_encoding/1]). +-export([parse_content_type/1]). + +-type headers() :: [{iodata(), iodata()}]. +-export_type([headers/0]). + +-include("cow_inline.hrl"). + +-define(TEST1_MIME, << + "This is a message with multiple parts in MIME format.\r\n" + "--frontier\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "This is the body of the message.\r\n" + "--frontier\r\n" + "Content-Type: application/octet-stream\r\n" + "Content-Transfer-Encoding: base64\r\n" + "\r\n" + "PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==\r\n" + "--frontier--" +>>). +-define(TEST1_BOUNDARY, <<"frontier">>). + +-define(TEST2_MIME, << + "--AaB03x\r\n" + "Content-Disposition: form-data; name=\"submit-name\"\r\n" + "\r\n" + "Larry\r\n" + "--AaB03x\r\n" + "Content-Disposition: form-data; name=\"files\"\r\n" + "Content-Type: multipart/mixed; boundary=BbC04y\r\n" + "\r\n" + "--BbC04y\r\n" + "Content-Disposition: file; filename=\"file1.txt\"\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "... contents of file1.txt ...\r\n" + "--BbC04y\r\n" + "Content-Disposition: file; filename=\"file2.gif\"\r\n" + "Content-Type: image/gif\r\n" + "Content-Transfer-Encoding: binary\r\n" + "\r\n" + "...contents of file2.gif...\r\n" + "--BbC04y--\r\n" + "--AaB03x--" +>>). +-define(TEST2_BOUNDARY, <<"AaB03x">>). + +-define(TEST3_MIME, << + "This is the preamble.\r\n" + "--boundary\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "This is the body of the message.\r\n" + "--boundary--" + "\r\nThis is the epilogue. Here it includes leading CRLF" +>>). +-define(TEST3_BOUNDARY, <<"boundary">>). + +-define(TEST4_MIME, << + "This is the preamble.\r\n" + "--boundary\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "This is the body of the message.\r\n" + "--boundary--" + "\r\n" +>>). +-define(TEST4_BOUNDARY, <<"boundary">>). + +%% RFC 2046, Section 5.1.1 +-define(TEST5_MIME, << + "This is the preamble. It is to be ignored, though it\r\n" + "is a handy place for composition agents to include an\r\n" + "explanatory note to non-MIME conformant readers.\r\n" + "\r\n" + "--simple boundary\r\n", + "\r\n" + "This is implicitly typed plain US-ASCII text.\r\n" + "It does NOT end with a linebreak." + "\r\n" + "--simple boundary\r\n", + "Content-type: text/plain; charset=us-ascii\r\n" + "\r\n" + "This is explicitly typed plain US-ASCII text.\r\n" + "It DOES end with a linebreak.\r\n" + "\r\n" + "--simple boundary--\r\n" + "\r\n" + "This is the epilogue. It is also to be ignored." +>>). +-define(TEST5_BOUNDARY, <<"simple boundary">>). + +%% Parsing. +%% +%% The multipart format is defined in RFC 2045. + +%% @doc Parse the headers for the next multipart part. +%% +%% This function skips any preamble before the boundary. +%% The preamble may be retrieved using parse_body/2. +%% +%% This function will accept input of any size, it is +%% up to the caller to limit it if needed. + +-spec parse_headers(binary(), binary()) + -> more | {more, binary()} + | {ok, headers(), binary()} + | {done, binary()}. +%% If the stream starts with the boundary we can make a few assumptions +%% and quickly figure out if we got the complete list of headers. +parse_headers(<< "--", Stream/bits >>, Boundary) -> + BoundarySize = byte_size(Boundary), + case Stream of + %% Last boundary. Return the epilogue. + << Boundary:BoundarySize/binary, "--", Stream2/bits >> -> + {done, Stream2}; + << Boundary:BoundarySize/binary, Stream2/bits >> -> + %% We have all the headers only if there is a \r\n\r\n + %% somewhere in the data after the boundary. + case binary:match(Stream2, <<"\r\n\r\n">>) of + nomatch -> + more; + _ -> + before_parse_headers(Stream2) + end; + %% If there isn't enough to represent Boundary \r\n\r\n + %% then we definitely don't have all the headers. + _ when byte_size(Stream) < byte_size(Boundary) + 4 -> + more; + %% Otherwise we have preamble data to skip. + %% We still got rid of the first two misleading bytes. + _ -> + skip_preamble(Stream, Boundary) + end; +%% Otherwise we have preamble data to skip. +parse_headers(Stream, Boundary) -> + skip_preamble(Stream, Boundary). + +%% We need to find the boundary and a \r\n\r\n after that. +%% Since the boundary isn't at the start, it must be right +%% after a \r\n too. +skip_preamble(Stream, Boundary) -> + case binary:match(Stream, <<"\r\n--", Boundary/bits >>) of + %% No boundary, need more data. + nomatch -> + %% We can safely skip the size of the stream + %% minus the last 3 bytes which may be a partial boundary. + SkipSize = byte_size(Stream) - 3, + case SkipSize > 0 of + false -> + more; + true -> + << _:SkipSize/binary, Stream2/bits >> = Stream, + {more, Stream2} + end; + {Start, Length} -> + Start2 = Start + Length, + << _:Start2/binary, Stream2/bits >> = Stream, + case Stream2 of + %% Last boundary. Return the epilogue. + << "--", Stream3/bits >> -> + {done, Stream3}; + _ -> + case binary:match(Stream, <<"\r\n\r\n">>) of + %% We don't have the full headers. + nomatch -> + {more, Stream2}; + _ -> + before_parse_headers(Stream2) + end + end + end. + +before_parse_headers(<< "\r\n\r\n", Stream/bits >>) -> + %% This indicates that there are no headers, so we can abort immediately. + {ok, [], Stream}; +before_parse_headers(<< "\r\n", Stream/bits >>) -> + %% There is a line break right after the boundary, skip it. + parse_hd_name(Stream, [], <<>>). + +parse_hd_name(<< C, Rest/bits >>, H, SoFar) -> + case C of + $: -> parse_hd_before_value(Rest, H, SoFar); + $\s -> parse_hd_name_ws(Rest, H, SoFar); + $\t -> parse_hd_name_ws(Rest, H, SoFar); + _ -> ?LOWER(parse_hd_name, Rest, H, SoFar) + end. + +parse_hd_name_ws(<< C, Rest/bits >>, H, Name) -> + case C of + $\s -> parse_hd_name_ws(Rest, H, Name); + $\t -> parse_hd_name_ws(Rest, H, Name); + $: -> parse_hd_before_value(Rest, H, Name) + end. + +parse_hd_before_value(<< $\s, Rest/bits >>, H, N) -> + parse_hd_before_value(Rest, H, N); +parse_hd_before_value(<< $\t, Rest/bits >>, H, N) -> + parse_hd_before_value(Rest, H, N); +parse_hd_before_value(Buffer, H, N) -> + parse_hd_value(Buffer, H, N, <<>>). + +parse_hd_value(<< $\r, Rest/bits >>, Headers, Name, SoFar) -> + case Rest of + << "\n\r\n", Rest2/bits >> -> + {ok, [{Name, SoFar}|Headers], Rest2}; + << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t -> + parse_hd_value(Rest2, Headers, Name, SoFar); + << $\n, Rest2/bits >> -> + parse_hd_name(Rest2, [{Name, SoFar}|Headers], <<>>) + end; +parse_hd_value(<< C, Rest/bits >>, H, N, SoFar) -> + parse_hd_value(Rest, H, N, << SoFar/binary, C >>). + +%% @doc Parse the body of the current multipart part. +%% +%% The body is everything until the next boundary. + +-spec parse_body(binary(), binary()) + -> {ok, binary()} | {ok, binary(), binary()} + | done | {done, binary()} | {done, binary(), binary()}. +parse_body(Stream, Boundary) -> + BoundarySize = byte_size(Boundary), + case Stream of + << "--", Boundary:BoundarySize/binary, _/bits >> -> + done; + _ -> + case binary:match(Stream, << "\r\n--", Boundary/bits >>) of + %% No boundary, check for a possible partial at the end. + %% Return more or less of the body depending on the result. + nomatch -> + StreamSize = byte_size(Stream), + From = StreamSize - BoundarySize - 3, + MatchOpts = if + %% Binary too small to contain boundary, check it fully. + From < 0 -> []; + %% Optimize, only check the end of the binary. + true -> [{scope, {From, StreamSize - From}}] + end, + case binary:match(Stream, <<"\r">>, MatchOpts) of + nomatch -> + {ok, Stream}; + {Pos, _} -> + case Stream of + << Body:Pos/binary >> -> + {ok, Body}; + << Body:Pos/binary, Rest/bits >> -> + {ok, Body, Rest} + end + end; + %% Boundary found, this is the last chunk of the body. + {Pos, _} -> + case Stream of + << Body:Pos/binary, "\r\n" >> -> + {done, Body}; + << Body:Pos/binary, "\r\n", Rest/bits >> -> + {done, Body, Rest}; + << Body:Pos/binary, Rest/bits >> -> + {done, Body, Rest} + end + end + end. + +-ifdef(TEST). +parse_test() -> + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + H2 = lists:sort([{<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + Body2 = <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + {ok, H1, Rest} = parse_headers(?TEST1_MIME, ?TEST1_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST1_BOUNDARY), + done = parse_body(Rest2, ?TEST1_BOUNDARY), + {ok, H2Unsorted, Rest3} = parse_headers(Rest2, ?TEST1_BOUNDARY), + H2 = lists:sort(H2Unsorted), + {done, Body2, Rest4} = parse_body(Rest3, ?TEST1_BOUNDARY), + done = parse_body(Rest4, ?TEST1_BOUNDARY), + {done, <<>>} = parse_headers(Rest4, ?TEST1_BOUNDARY), + ok. + +parse_interleaved_test() -> + H1 = [{<<"content-disposition">>, <<"form-data; name=\"submit-name\"">>}], + Body1 = <<"Larry">>, + H2 = lists:sort([{<<"content-disposition">>, <<"form-data; name=\"files\"">>}, + {<<"content-type">>, <<"multipart/mixed; boundary=BbC04y">>}]), + InH1 = lists:sort([{<<"content-disposition">>, <<"file; filename=\"file1.txt\"">>}, + {<<"content-type">>, <<"text/plain">>}]), + InBody1 = <<"... contents of file1.txt ...">>, + InH2 = lists:sort([{<<"content-disposition">>, <<"file; filename=\"file2.gif\"">>}, + {<<"content-type">>, <<"image/gif">>}, + {<<"content-transfer-encoding">>, <<"binary">>}]), + InBody2 = <<"...contents of file2.gif...">>, + {ok, H1, Rest} = parse_headers(?TEST2_MIME, ?TEST2_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST2_BOUNDARY), + done = parse_body(Rest2, ?TEST2_BOUNDARY), + {ok, H2Unsorted, Rest3} = parse_headers(Rest2, ?TEST2_BOUNDARY), + H2 = lists:sort(H2Unsorted), + {_, ContentType} = lists:keyfind(<<"content-type">>, 1, H2), + {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, InBoundary}]} + = parse_content_type(ContentType), + {ok, InH1Unsorted, InRest} = parse_headers(Rest3, InBoundary), + InH1 = lists:sort(InH1Unsorted), + {done, InBody1, InRest2} = parse_body(InRest, InBoundary), + done = parse_body(InRest2, InBoundary), + {ok, InH2Unsorted, InRest3} = parse_headers(InRest2, InBoundary), + InH2 = lists:sort(InH2Unsorted), + {done, InBody2, InRest4} = parse_body(InRest3, InBoundary), + done = parse_body(InRest4, InBoundary), + {done, Rest4} = parse_headers(InRest4, InBoundary), + {done, <<>>} = parse_headers(Rest4, ?TEST2_BOUNDARY), + ok. + +parse_epilogue_test() -> + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + Epilogue = <<"\r\nThis is the epilogue. Here it includes leading CRLF">>, + {ok, H1, Rest} = parse_headers(?TEST3_MIME, ?TEST3_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST3_BOUNDARY), + done = parse_body(Rest2, ?TEST3_BOUNDARY), + {done, Epilogue} = parse_headers(Rest2, ?TEST3_BOUNDARY), + ok. + +parse_epilogue_crlf_test() -> + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + Epilogue = <<"\r\n">>, + {ok, H1, Rest} = parse_headers(?TEST4_MIME, ?TEST4_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST4_BOUNDARY), + done = parse_body(Rest2, ?TEST4_BOUNDARY), + {done, Epilogue} = parse_headers(Rest2, ?TEST4_BOUNDARY), + ok. + +parse_rfc2046_test() -> + %% The following is an example included in RFC 2046, Section 5.1.1. + Body1 = <<"This is implicitly typed plain US-ASCII text.\r\n" + "It does NOT end with a linebreak.">>, + Body2 = <<"This is explicitly typed plain US-ASCII text.\r\n" + "It DOES end with a linebreak.\r\n">>, + H2 = [{<<"content-type">>, <<"text/plain; charset=us-ascii">>}], + Epilogue = <<"\r\n\r\nThis is the epilogue. It is also to be ignored.">>, + {ok, [], Rest} = parse_headers(?TEST5_MIME, ?TEST5_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST5_BOUNDARY), + {ok, H2, Rest3} = parse_headers(Rest2, ?TEST5_BOUNDARY), + {done, Body2, Rest4} = parse_body(Rest3, ?TEST5_BOUNDARY), + {done, Epilogue} = parse_headers(Rest4, ?TEST5_BOUNDARY), + ok. + +parse_partial_test() -> + {ok, <<0:8000, "abcdef">>, <<"\rghij">>} + = parse_body(<<0:8000, "abcdef\rghij">>, <<"boundary">>), + {ok, <<"abcdef">>, <<"\rghij">>} + = parse_body(<<"abcdef\rghij">>, <<"boundary">>), + {ok, <<"abc">>, <<"\rdef">>} + = parse_body(<<"abc\rdef">>, <<"boundaryboundary">>), + {ok, <<0:8000, "abcdef">>, <<"\r\nghij">>} + = parse_body(<<0:8000, "abcdef\r\nghij">>, <<"boundary">>), + {ok, <<"abcdef">>, <<"\r\nghij">>} + = parse_body(<<"abcdef\r\nghij">>, <<"boundary">>), + {ok, <<"abc">>, <<"\r\ndef">>} + = parse_body(<<"abc\r\ndef">>, <<"boundaryboundary">>), + {ok, <<"boundary">>, <<"\r">>} + = parse_body(<<"boundary\r">>, <<"boundary">>), + {ok, <<"boundary">>, <<"\r\n">>} + = parse_body(<<"boundary\r\n">>, <<"boundary">>), + {ok, <<"boundary">>, <<"\r\n-">>} + = parse_body(<<"boundary\r\n-">>, <<"boundary">>), + {ok, <<"boundary">>, <<"\r\n--">>} + = parse_body(<<"boundary\r\n--">>, <<"boundary">>), + ok. + +perf_parse_multipart(Stream, Boundary) -> + case parse_headers(Stream, Boundary) of + {ok, _, Rest} -> + {_, _, Rest2} = parse_body(Rest, Boundary), + perf_parse_multipart(Rest2, Boundary); + {done, _} -> + ok + end. + +horse_parse() -> + horse:repeat(50000, + perf_parse_multipart(?TEST1_MIME, ?TEST1_BOUNDARY) + ). +-endif. + +%% Building. + +%% @doc Generate a new random boundary. +%% +%% The boundary generated has a low probability of ever appearing +%% in the data. + +-spec boundary() -> binary(). +boundary() -> + cow_base64url:encode(crypto:strong_rand_bytes(48), #{padding => false}). + +%% @doc Return the first part's head. +%% +%% This works exactly like the part/2 function except there is +%% no leading \r\n. It's not required to use this function, +%% just makes the output a little smaller and prettier. + +-spec first_part(binary(), headers()) -> iodata(). +first_part(Boundary, Headers) -> + [<<"--">>, Boundary, <<"\r\n">>, headers_to_iolist(Headers, [])]. + +%% @doc Return a part's head. + +-spec part(binary(), headers()) -> iodata(). +part(Boundary, Headers) -> + [<<"\r\n--">>, Boundary, <<"\r\n">>, headers_to_iolist(Headers, [])]. + +headers_to_iolist([], Acc) -> + lists:reverse([<<"\r\n">>|Acc]); +headers_to_iolist([{N, V}|Tail], Acc) -> + %% We don't want to create a sublist so we list the + %% values in reverse order so that it gets reversed properly. + headers_to_iolist(Tail, [<<"\r\n">>, V, <<": ">>, N|Acc]). + +%% @doc Return the closing delimiter of the multipart message. + +-spec close(binary()) -> iodata(). +close(Boundary) -> + [<<"\r\n--">>, Boundary, <<"--">>]. + +-ifdef(TEST). +build_test() -> + Result = string:to_lower(binary_to_list(?TEST1_MIME)), + Result = string:to_lower(binary_to_list(iolist_to_binary([ + <<"This is a message with multiple parts in MIME format.\r\n">>, + first_part(?TEST1_BOUNDARY, [{<<"content-type">>, <<"text/plain">>}]), + <<"This is the body of the message.">>, + part(?TEST1_BOUNDARY, [ + {<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + close(?TEST1_BOUNDARY) + ]))), + ok. + +identity_test() -> + B = boundary(), + Preamble = <<"This is a message with multiple parts in MIME format.">>, + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + H2 = lists:sort([{<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + Body2 = <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + Epilogue = <<"Gotta go fast!">>, + M = iolist_to_binary([ + Preamble, + part(B, H1), Body1, + part(B, H2), Body2, + close(B), + Epilogue + ]), + {done, Preamble, M2} = parse_body(M, B), + {ok, H1, M3} = parse_headers(M2, B), + {done, Body1, M4} = parse_body(M3, B), + {ok, H2Unsorted, M5} = parse_headers(M4, B), + H2 = lists:sort(H2Unsorted), + {done, Body2, M6} = parse_body(M5, B), + {done, Epilogue} = parse_headers(M6, B), + ok. + +perf_build_multipart() -> + B = boundary(), + [ + <<"preamble\r\n">>, + first_part(B, [{<<"content-type">>, <<"text/plain">>}]), + <<"This is the body of the message.">>, + part(B, [ + {<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + close(B), + <<"epilogue">> + ]. + +horse_build() -> + horse:repeat(50000, + perf_build_multipart() + ). +-endif. + +%% Headers. + +%% @doc Convenience function for extracting information from headers +%% when parsing a multipart/form-data stream. + +-spec form_data(headers() | #{binary() => binary()}) + -> {data, binary()} + | {file, binary(), binary(), binary()}. +form_data(Headers) when is_map(Headers) -> + form_data(maps:to_list(Headers)); +form_data(Headers) -> + {_, DispositionBin} = lists:keyfind(<<"content-disposition">>, 1, Headers), + {<<"form-data">>, Params} = parse_content_disposition(DispositionBin), + {_, FieldName} = lists:keyfind(<<"name">>, 1, Params), + case lists:keyfind(<<"filename">>, 1, Params) of + false -> + {data, FieldName}; + {_, Filename} -> + Type = case lists:keyfind(<<"content-type">>, 1, Headers) of + false -> <<"text/plain">>; + {_, T} -> T + end, + {file, FieldName, Filename, Type} + end. + +-ifdef(TEST). +form_data_test_() -> + Tests = [ + {[{<<"content-disposition">>, <<"form-data; name=\"submit-name\"">>}], + {data, <<"submit-name">>}}, + {[{<<"content-disposition">>, + <<"form-data; name=\"files\"; filename=\"file1.txt\"">>}, + {<<"content-type">>, <<"text/x-plain">>}], + {file, <<"files">>, <<"file1.txt">>, <<"text/x-plain">>}} + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> R = form_data(V) end} || {V, R} <- Tests]. +-endif. + +%% @todo parse_content_description +%% @todo parse_content_id + +%% @doc Parse an RFC 2183 content-disposition value. +%% @todo Support RFC 2231. + +-spec parse_content_disposition(binary()) + -> {binary(), [{binary(), binary()}]}. +parse_content_disposition(Bin) -> + parse_cd_type(Bin, <<>>). + +parse_cd_type(<<>>, Acc) -> + {Acc, []}; +parse_cd_type(<< C, Rest/bits >>, Acc) -> + case C of + $; -> {Acc, parse_before_param(Rest, [])}; + $\s -> {Acc, parse_before_param(Rest, [])}; + $\t -> {Acc, parse_before_param(Rest, [])}; + _ -> ?LOWER(parse_cd_type, Rest, Acc) + end. + +-ifdef(TEST). +parse_content_disposition_test_() -> + Tests = [ + {<<"inline">>, {<<"inline">>, []}}, + {<<"attachment">>, {<<"attachment">>, []}}, + {<<"attachment; filename=genome.jpeg;" + " modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";">>, + {<<"attachment">>, [ + {<<"filename">>, <<"genome.jpeg">>}, + {<<"modification-date">>, <<"Wed, 12 Feb 1997 16:29:51 -0500">>} + ]}}, + {<<"form-data; name=\"user\"">>, + {<<"form-data">>, [{<<"name">>, <<"user">>}]}}, + {<<"form-data; NAME=\"submit-name\"">>, + {<<"form-data">>, [{<<"name">>, <<"submit-name">>}]}}, + {<<"form-data; name=\"files\"; filename=\"file1.txt\"">>, + {<<"form-data">>, [ + {<<"name">>, <<"files">>}, + {<<"filename">>, <<"file1.txt">>} + ]}}, + {<<"file; filename=\"file1.txt\"">>, + {<<"file">>, [{<<"filename">>, <<"file1.txt">>}]}}, + {<<"file; filename=\"file2.gif\"">>, + {<<"file">>, [{<<"filename">>, <<"file2.gif">>}]}} + ], + [{V, fun() -> R = parse_content_disposition(V) end} || {V, R} <- Tests]. + +horse_parse_content_disposition_attachment() -> + horse:repeat(100000, + parse_content_disposition(<<"attachment; filename=genome.jpeg;" + " modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";">>) + ). + +horse_parse_content_disposition_form_data() -> + horse:repeat(100000, + parse_content_disposition( + <<"form-data; name=\"files\"; filename=\"file1.txt\"">>) + ). + +horse_parse_content_disposition_inline() -> + horse:repeat(100000, + parse_content_disposition(<<"inline">>) + ). +-endif. + +%% @doc Parse an RFC 2045 content-transfer-encoding header. + +-spec parse_content_transfer_encoding(binary()) -> binary(). +parse_content_transfer_encoding(Bin) -> + ?LOWER(Bin). + +-ifdef(TEST). +parse_content_transfer_encoding_test_() -> + Tests = [ + {<<"7bit">>, <<"7bit">>}, + {<<"7BIT">>, <<"7bit">>}, + {<<"8bit">>, <<"8bit">>}, + {<<"binary">>, <<"binary">>}, + {<<"quoted-printable">>, <<"quoted-printable">>}, + {<<"base64">>, <<"base64">>}, + {<<"Base64">>, <<"base64">>}, + {<<"BASE64">>, <<"base64">>}, + {<<"bAsE64">>, <<"base64">>} + ], + [{V, fun() -> R = parse_content_transfer_encoding(V) end} + || {V, R} <- Tests]. + +horse_parse_content_transfer_encoding() -> + horse:repeat(100000, + parse_content_transfer_encoding(<<"QUOTED-PRINTABLE">>) + ). +-endif. + +%% @doc Parse an RFC 2045 content-type header. + +-spec parse_content_type(binary()) + -> {binary(), binary(), [{binary(), binary()}]}. +parse_content_type(Bin) -> + parse_ct_type(Bin, <<>>). + +parse_ct_type(<< C, Rest/bits >>, Acc) -> + case C of + $/ -> parse_ct_subtype(Rest, Acc, <<>>); + _ -> ?LOWER(parse_ct_type, Rest, Acc) + end. + +parse_ct_subtype(<<>>, Type, Subtype) when Subtype =/= <<>> -> + {Type, Subtype, []}; +parse_ct_subtype(<< C, Rest/bits >>, Type, Acc) -> + case C of + $; -> {Type, Acc, parse_before_param(Rest, [])}; + $\s -> {Type, Acc, parse_before_param(Rest, [])}; + $\t -> {Type, Acc, parse_before_param(Rest, [])}; + _ -> ?LOWER(parse_ct_subtype, Rest, Type, Acc) + end. + +-ifdef(TEST). +parse_content_type_test_() -> + Tests = [ + {<<"image/gif">>, + {<<"image">>, <<"gif">>, []}}, + {<<"text/plain">>, + {<<"text">>, <<"plain">>, []}}, + {<<"text/plain; charset=us-ascii">>, + {<<"text">>, <<"plain">>, [{<<"charset">>, <<"us-ascii">>}]}}, + {<<"text/plain; charset=\"us-ascii\"">>, + {<<"text">>, <<"plain">>, [{<<"charset">>, <<"us-ascii">>}]}}, + {<<"multipart/form-data; boundary=AaB03x">>, + {<<"multipart">>, <<"form-data">>, + [{<<"boundary">>, <<"AaB03x">>}]}}, + {<<"multipart/mixed; boundary=BbC04y">>, + {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, <<"BbC04y">>}]}}, + {<<"multipart/mixed; boundary=--------">>, + {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, <<"--------">>}]}}, + {<<"application/x-horse; filename=genome.jpeg;" + " some-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";" + " charset=us-ascii; empty=; number=12345">>, + {<<"application">>, <<"x-horse">>, [ + {<<"filename">>, <<"genome.jpeg">>}, + {<<"some-date">>, <<"Wed, 12 Feb 1997 16:29:51 -0500">>}, + {<<"charset">>, <<"us-ascii">>}, + {<<"empty">>, <<>>}, + {<<"number">>, <<"12345">>} + ]}} + ], + [{V, fun() -> R = parse_content_type(V) end} + || {V, R} <- Tests]. + +horse_parse_content_type_zero() -> + horse:repeat(100000, + parse_content_type(<<"text/plain">>) + ). + +horse_parse_content_type_one() -> + horse:repeat(100000, + parse_content_type(<<"text/plain; charset=\"us-ascii\"">>) + ). + +horse_parse_content_type_five() -> + horse:repeat(100000, + parse_content_type(<<"application/x-horse; filename=genome.jpeg;" + " some-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";" + " charset=us-ascii; empty=; number=12345">>) + ). +-endif. + +%% @doc Parse RFC 2045 parameters. + +parse_before_param(<<>>, Params) -> + lists:reverse(Params); +parse_before_param(<< C, Rest/bits >>, Params) -> + case C of + $; -> parse_before_param(Rest, Params); + $\s -> parse_before_param(Rest, Params); + $\t -> parse_before_param(Rest, Params); + _ -> ?LOWER(parse_param_name, Rest, Params, <<>>) + end. + +parse_param_name(<<>>, Params, Acc) -> + lists:reverse([{Acc, <<>>}|Params]); +parse_param_name(<< C, Rest/bits >>, Params, Acc) -> + case C of + $= -> parse_param_value(Rest, Params, Acc); + _ -> ?LOWER(parse_param_name, Rest, Params, Acc) + end. + +parse_param_value(<<>>, Params, Name) -> + lists:reverse([{Name, <<>>}|Params]); +parse_param_value(<< C, Rest/bits >>, Params, Name) -> + case C of + $" -> parse_param_quoted_value(Rest, Params, Name, <<>>); + $; -> parse_before_param(Rest, [{Name, <<>>}|Params]); + $\s -> parse_before_param(Rest, [{Name, <<>>}|Params]); + $\t -> parse_before_param(Rest, [{Name, <<>>}|Params]); + C -> parse_param_value(Rest, Params, Name, << C >>) + end. + +parse_param_value(<<>>, Params, Name, Acc) -> + lists:reverse([{Name, Acc}|Params]); +parse_param_value(<< C, Rest/bits >>, Params, Name, Acc) -> + case C of + $; -> parse_before_param(Rest, [{Name, Acc}|Params]); + $\s -> parse_before_param(Rest, [{Name, Acc}|Params]); + $\t -> parse_before_param(Rest, [{Name, Acc}|Params]); + C -> parse_param_value(Rest, Params, Name, << Acc/binary, C >>) + end. + +%% We expect a final $" so no need to test for <<>>. +parse_param_quoted_value(<< $\\, C, Rest/bits >>, Params, Name, Acc) -> + parse_param_quoted_value(Rest, Params, Name, << Acc/binary, C >>); +parse_param_quoted_value(<< $", Rest/bits >>, Params, Name, Acc) -> + parse_before_param(Rest, [{Name, Acc}|Params]); +parse_param_quoted_value(<< C, Rest/bits >>, Params, Name, Acc) + when C =/= $\r -> + parse_param_quoted_value(Rest, Params, Name, << Acc/binary, C >>). diff --git a/src/wsLib/cow_qs.erl b/src/wsLib/cow_qs.erl new file mode 100644 index 0000000..d812e39 --- /dev/null +++ b/src/wsLib/cow_qs.erl @@ -0,0 +1,563 @@ +%% Copyright (c) 2013-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_qs). + +-export([parse_qs/1]). +-export([qs/1]). +-export([urldecode/1]). +-export([urlencode/1]). + +-type qs_vals() :: [{binary(), binary() | true}]. + +%% @doc Parse an application/x-www-form-urlencoded string. +%% +%% The percent decoding is inlined to greatly improve the performance +%% by avoiding copying binaries twice (once for extracting, once for +%% decoding) instead of just extracting the proper representation. + +-spec parse_qs(binary()) -> qs_vals(). +parse_qs(B) -> + parse_qs_name(B, [], <<>>). + +parse_qs_name(<< $%, H, L, Rest/bits >>, Acc, Name) -> + C = (unhex(H) bsl 4 bor unhex(L)), + parse_qs_name(Rest, Acc, << Name/bits, C >>); +parse_qs_name(<< $+, Rest/bits >>, Acc, Name) -> + parse_qs_name(Rest, Acc, << Name/bits, " " >>); +parse_qs_name(<< $=, Rest/bits >>, Acc, Name) when Name =/= <<>> -> + parse_qs_value(Rest, Acc, Name, <<>>); +parse_qs_name(<< $&, Rest/bits >>, Acc, Name) -> + case Name of + <<>> -> parse_qs_name(Rest, Acc, <<>>); + _ -> parse_qs_name(Rest, [{Name, true}|Acc], <<>>) + end; +parse_qs_name(<< C, Rest/bits >>, Acc, Name) when C =/= $%, C =/= $= -> + parse_qs_name(Rest, Acc, << Name/bits, C >>); +parse_qs_name(<<>>, Acc, Name) -> + case Name of + <<>> -> lists:reverse(Acc); + _ -> lists:reverse([{Name, true}|Acc]) + end. + +parse_qs_value(<< $%, H, L, Rest/bits >>, Acc, Name, Value) -> + C = (unhex(H) bsl 4 bor unhex(L)), + parse_qs_value(Rest, Acc, Name, << Value/bits, C >>); +parse_qs_value(<< $+, Rest/bits >>, Acc, Name, Value) -> + parse_qs_value(Rest, Acc, Name, << Value/bits, " " >>); +parse_qs_value(<< $&, Rest/bits >>, Acc, Name, Value) -> + parse_qs_name(Rest, [{Name, Value}|Acc], <<>>); +parse_qs_value(<< C, Rest/bits >>, Acc, Name, Value) when C =/= $% -> + parse_qs_value(Rest, Acc, Name, << Value/bits, C >>); +parse_qs_value(<<>>, Acc, Name, Value) -> + lists:reverse([{Name, Value}|Acc]). + +-ifdef(TEST). +parse_qs_test_() -> + Tests = [ + {<<>>, []}, + {<<"&">>, []}, + {<<"a">>, [{<<"a">>, true}]}, + {<<"a&">>, [{<<"a">>, true}]}, + {<<"&a">>, [{<<"a">>, true}]}, + {<<"a&b">>, [{<<"a">>, true}, {<<"b">>, true}]}, + {<<"a&&b">>, [{<<"a">>, true}, {<<"b">>, true}]}, + {<<"a&b&">>, [{<<"a">>, true}, {<<"b">>, true}]}, + {<<"=">>, error}, + {<<"=b">>, error}, + {<<"a=">>, [{<<"a">>, <<>>}]}, + {<<"a=b">>, [{<<"a">>, <<"b">>}]}, + {<<"a=&b=">>, [{<<"a">>, <<>>}, {<<"b">>, <<>>}]}, + {<<"a=b&c&d=e">>, [{<<"a">>, <<"b">>}, + {<<"c">>, true}, {<<"d">>, <<"e">>}]}, + {<<"a=b=c&d=e=f&g=h=i">>, [{<<"a">>, <<"b=c">>}, + {<<"d">>, <<"e=f">>}, {<<"g">>, <<"h=i">>}]}, + {<<"+">>, [{<<" ">>, true}]}, + {<<"+=+">>, [{<<" ">>, <<" ">>}]}, + {<<"a+b=c+d">>, [{<<"a b">>, <<"c d">>}]}, + {<<"+a+=+b+&+c+=+d+">>, [{<<" a ">>, <<" b ">>}, + {<<" c ">>, <<" d ">>}]}, + {<<"a%20b=c%20d">>, [{<<"a b">>, <<"c d">>}]}, + {<<"%25%26%3D=%25%26%3D&_-.=.-_">>, [{<<"%&=">>, <<"%&=">>}, + {<<"_-.">>, <<".-_">>}]}, + {<<"for=extend%2Franch">>, [{<<"for">>, <<"extend/ranch">>}]} + ], + [{Qs, fun() -> + E = try parse_qs(Qs) of + R -> R + catch _:_ -> + error + end + end} || {Qs, E} <- Tests]. + +parse_qs_identity_test_() -> + Tests = [ + <<"+">>, + <<"hl=en&q=erlang+cowboy">>, + <<"direction=desc&for=extend%2Franch&sort=updated&state=open">>, + <<"i=EWiIXmPj5gl6&v=QowBp0oDLQXdd4x_GwiywA&ip=98.20.31.81&" + "la=en&pg=New8.undertonebrandsafe.com%2F698a2525065ee2" + "60c0b2f2aaad89ab82&re=&sz=1&fc=1&fr=140&br=3&bv=11.0." + "696.16&os=3&ov=&rs=vpl&k=cookies%7Csale%7Cbrowser%7Cm" + "ore%7Cprivacy%7Cstatistics%7Cactivities%7Cauction%7Ce" + "mail%7Cfree%7Cin...&t=112373&xt=5%7C61%7C0&tz=-1&ev=x" + "&tk=&za=1&ortb-za=1&zu=&zl=&ax=U&ay=U&ortb-pid=536454" + ".55&ortb-sid=112373.8&seats=999&ortb-xt=IAB24&ortb-ugc=">>, + <<"i=9pQNskA&v=0ySQQd1F&ev=12345678&t=12345&sz=3&ip=67.58." + "236.89&la=en&pg=http%3A%2F%2Fwww.yahoo.com%2Fpage1.ht" + "m&re=http%3A%2F%2Fsearch.google.com&fc=1&fr=1&br=2&bv" + "=3.0.14&os=1&ov=XP&k=cars%2Cford&rs=js&xt=5%7C22%7C23" + "4&tz=%2B180&tk=key1%3Dvalue1%7Ckey2%3Dvalue2&zl=4%2C5" + "%2C6&za=4&zu=competitor.com&ua=Mozilla%2F5.0+%28Windo" + "ws%3B+U%3B+Windows+NT+6.1%3B+en-US%29+AppleWebKit%2F5" + "34.13+%28KHTML%2C+like+Gecko%29+Chrome%2F9.0.597.98+S" + "afari%2F534.13&ortb-za=1%2C6%2C13&ortb-pid=521732&ort" + "b-sid=521732&ortb-xt=IAB3&ortb-ugc=">> + ], + [{V, fun() -> V = qs(parse_qs(V)) end} || V <- Tests]. + +horse_parse_qs_shorter() -> + horse:repeat(20000, + parse_qs(<<"hl=en&q=erlang%20cowboy">>) + ). + +horse_parse_qs_short() -> + horse:repeat(20000, + parse_qs( + <<"direction=desc&for=extend%2Franch&sort=updated&state=open">>) + ). + +horse_parse_qs_long() -> + horse:repeat(20000, + parse_qs(<<"i=EWiIXmPj5gl6&v=QowBp0oDLQXdd4x_GwiywA&ip=98.20.31.81&" + "la=en&pg=New8.undertonebrandsafe.com%2F698a2525065ee260c0b2f2a" + "aad89ab82&re=&sz=1&fc=1&fr=140&br=3&bv=11.0.696.16&os=3&ov=&rs" + "=vpl&k=cookies%7Csale%7Cbrowser%7Cmore%7Cprivacy%7Cstatistics%" + "7Cactivities%7Cauction%7Cemail%7Cfree%7Cin...&t=112373&xt=5%7C" + "61%7C0&tz=-1&ev=x&tk=&za=1&ortb-za=1&zu=&zl=&ax=U&ay=U&ortb-pi" + "d=536454.55&ortb-sid=112373.8&seats=999&ortb-xt=IAB24&ortb-ugc" + "=">>) + ). + +horse_parse_qs_longer() -> + horse:repeat(20000, + parse_qs(<<"i=9pQNskA&v=0ySQQd1F&ev=12345678&t=12345&sz=3&ip=67.58." + "236.89&la=en&pg=http%3A%2F%2Fwww.yahoo.com%2Fpage1.htm&re=http" + "%3A%2F%2Fsearch.google.com&fc=1&fr=1&br=2&bv=3.0.14&os=1&ov=XP" + "&k=cars%2cford&rs=js&xt=5%7c22%7c234&tz=%2b180&tk=key1%3Dvalue" + "1%7Ckey2%3Dvalue2&zl=4,5,6&za=4&zu=competitor.com&ua=Mozilla%2" + "F5.0%20(Windows%3B%20U%3B%20Windows%20NT%206.1%3B%20en-US)%20A" + "ppleWebKit%2F534.13%20(KHTML%2C%20like%20Gecko)%20Chrome%2F9.0" + ".597.98%20Safari%2F534.13&ortb-za=1%2C6%2C13&ortb-pid=521732&o" + "rtb-sid=521732&ortb-xt=IAB3&ortb-ugc=">>) + ). +-endif. + +%% @doc Build an application/x-www-form-urlencoded string. + +-spec qs(qs_vals()) -> binary(). +qs([]) -> + <<>>; +qs(L) -> + qs(L, <<>>). + +qs([], Acc) -> + << $&, Qs/bits >> = Acc, + Qs; +qs([{Name, true}|Tail], Acc) -> + Acc2 = urlencode(Name, << Acc/bits, $& >>), + qs(Tail, Acc2); +qs([{Name, Value}|Tail], Acc) -> + Acc2 = urlencode(Name, << Acc/bits, $& >>), + Acc3 = urlencode(Value, << Acc2/bits, $= >>), + qs(Tail, Acc3). + +-define(QS_SHORTER, [ + {<<"hl">>, <<"en">>}, + {<<"q">>, <<"erlang cowboy">>} +]). + +-define(QS_SHORT, [ + {<<"direction">>, <<"desc">>}, + {<<"for">>, <<"extend/ranch">>}, + {<<"sort">>, <<"updated">>}, + {<<"state">>, <<"open">>} +]). + +-define(QS_LONG, [ + {<<"i">>, <<"EWiIXmPj5gl6">>}, + {<<"v">>, <<"QowBp0oDLQXdd4x_GwiywA">>}, + {<<"ip">>, <<"98.20.31.81">>}, + {<<"la">>, <<"en">>}, + {<<"pg">>, <<"New8.undertonebrandsafe.com/" + "698a2525065ee260c0b2f2aaad89ab82">>}, + {<<"re">>, <<>>}, + {<<"sz">>, <<"1">>}, + {<<"fc">>, <<"1">>}, + {<<"fr">>, <<"140">>}, + {<<"br">>, <<"3">>}, + {<<"bv">>, <<"11.0.696.16">>}, + {<<"os">>, <<"3">>}, + {<<"ov">>, <<>>}, + {<<"rs">>, <<"vpl">>}, + {<<"k">>, <<"cookies|sale|browser|more|privacy|statistics|" + "activities|auction|email|free|in...">>}, + {<<"t">>, <<"112373">>}, + {<<"xt">>, <<"5|61|0">>}, + {<<"tz">>, <<"-1">>}, + {<<"ev">>, <<"x">>}, + {<<"tk">>, <<>>}, + {<<"za">>, <<"1">>}, + {<<"ortb-za">>, <<"1">>}, + {<<"zu">>, <<>>}, + {<<"zl">>, <<>>}, + {<<"ax">>, <<"U">>}, + {<<"ay">>, <<"U">>}, + {<<"ortb-pid">>, <<"536454.55">>}, + {<<"ortb-sid">>, <<"112373.8">>}, + {<<"seats">>, <<"999">>}, + {<<"ortb-xt">>, <<"IAB24">>}, + {<<"ortb-ugc">>, <<>>} +]). + +-define(QS_LONGER, [ + {<<"i">>, <<"9pQNskA">>}, + {<<"v">>, <<"0ySQQd1F">>}, + {<<"ev">>, <<"12345678">>}, + {<<"t">>, <<"12345">>}, + {<<"sz">>, <<"3">>}, + {<<"ip">>, <<"67.58.236.89">>}, + {<<"la">>, <<"en">>}, + {<<"pg">>, <<"http://www.yahoo.com/page1.htm">>}, + {<<"re">>, <<"http://search.google.com">>}, + {<<"fc">>, <<"1">>}, + {<<"fr">>, <<"1">>}, + {<<"br">>, <<"2">>}, + {<<"bv">>, <<"3.0.14">>}, + {<<"os">>, <<"1">>}, + {<<"ov">>, <<"XP">>}, + {<<"k">>, <<"cars,ford">>}, + {<<"rs">>, <<"js">>}, + {<<"xt">>, <<"5|22|234">>}, + {<<"tz">>, <<"+180">>}, + {<<"tk">>, <<"key1=value1|key2=value2">>}, + {<<"zl">>, <<"4,5,6">>}, + {<<"za">>, <<"4">>}, + {<<"zu">>, <<"competitor.com">>}, + {<<"ua">>, <<"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) " + "AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.98 " + "Safari/534.13">>}, + {<<"ortb-za">>, <<"1,6,13">>}, + {<<"ortb-pid">>, <<"521732">>}, + {<<"ortb-sid">>, <<"521732">>}, + {<<"ortb-xt">>, <<"IAB3">>}, + {<<"ortb-ugc">>, <<>>} +]). + +-ifdef(TEST). +qs_test_() -> + Tests = [ + {[<<"a">>], error}, + {[{<<"a">>, <<"b">>, <<"c">>}], error}, + {[], <<>>}, + {[{<<"a">>, true}], <<"a">>}, + {[{<<"a">>, true}, {<<"b">>, true}], <<"a&b">>}, + {[{<<"a">>, <<>>}], <<"a=">>}, + {[{<<"a">>, <<"b">>}], <<"a=b">>}, + {[{<<"a">>, <<>>}, {<<"b">>, <<>>}], <<"a=&b=">>}, + {[{<<"a">>, <<"b">>}, {<<"c">>, true}, {<<"d">>, <<"e">>}], + <<"a=b&c&d=e">>}, + {[{<<"a">>, <<"b=c">>}, {<<"d">>, <<"e=f">>}, {<<"g">>, <<"h=i">>}], + <<"a=b%3Dc&d=e%3Df&g=h%3Di">>}, + {[{<<" ">>, true}], <<"+">>}, + {[{<<" ">>, <<" ">>}], <<"+=+">>}, + {[{<<"a b">>, <<"c d">>}], <<"a+b=c+d">>}, + {[{<<" a ">>, <<" b ">>}, {<<" c ">>, <<" d ">>}], + <<"+a+=+b+&+c+=+d+">>}, + {[{<<"%&=">>, <<"%&=">>}, {<<"_-.">>, <<".-_">>}], + <<"%25%26%3D=%25%26%3D&_-.=.-_">>}, + {[{<<"for">>, <<"extend/ranch">>}], <<"for=extend%2Franch">>} + ], + [{lists:flatten(io_lib:format("~p", [Vals])), fun() -> + E = try qs(Vals) of + R -> R + catch _:_ -> + error + end + end} || {Vals, E} <- Tests]. + +qs_identity_test_() -> + Tests = [ + [{<<"+">>, true}], + ?QS_SHORTER, + ?QS_SHORT, + ?QS_LONG, + ?QS_LONGER + ], + [{lists:flatten(io_lib:format("~p", [V])), fun() -> + V = parse_qs(qs(V)) + end} || V <- Tests]. + +horse_qs_shorter() -> + horse:repeat(20000, qs(?QS_SHORTER)). + +horse_qs_short() -> + horse:repeat(20000, qs(?QS_SHORT)). + +horse_qs_long() -> + horse:repeat(20000, qs(?QS_LONG)). + +horse_qs_longer() -> + horse:repeat(20000, qs(?QS_LONGER)). +-endif. + +%% @doc Decode a percent encoded string (x-www-form-urlencoded rules). + +-spec urldecode(B) -> B when B::binary(). +urldecode(B) -> + urldecode(B, <<>>). + +urldecode(<< $%, H, L, Rest/bits >>, Acc) -> + C = (unhex(H) bsl 4 bor unhex(L)), + urldecode(Rest, << Acc/bits, C >>); +urldecode(<< $+, Rest/bits >>, Acc) -> + urldecode(Rest, << Acc/bits, " " >>); +urldecode(<< C, Rest/bits >>, Acc) when C =/= $% -> + urldecode(Rest, << Acc/bits, C >>); +urldecode(<<>>, Acc) -> + Acc. + +unhex($0) -> 0; +unhex($1) -> 1; +unhex($2) -> 2; +unhex($3) -> 3; +unhex($4) -> 4; +unhex($5) -> 5; +unhex($6) -> 6; +unhex($7) -> 7; +unhex($8) -> 8; +unhex($9) -> 9; +unhex($A) -> 10; +unhex($B) -> 11; +unhex($C) -> 12; +unhex($D) -> 13; +unhex($E) -> 14; +unhex($F) -> 15; +unhex($a) -> 10; +unhex($b) -> 11; +unhex($c) -> 12; +unhex($d) -> 13; +unhex($e) -> 14; +unhex($f) -> 15. + +-ifdef(TEST). +urldecode_test_() -> + Tests = [ + {<<"%20">>, <<" ">>}, + {<<"+">>, <<" ">>}, + {<<"%00">>, <<0>>}, + {<<"%fF">>, <<255>>}, + {<<"123">>, <<"123">>}, + {<<"%i5">>, error}, + {<<"%5">>, error} + ], + [{Qs, fun() -> + E = try urldecode(Qs) of + R -> R + catch _:_ -> + error + end + end} || {Qs, E} <- Tests]. + +urldecode_identity_test_() -> + Tests = [ + <<"+">>, + <<"nothingnothingnothingnothing">>, + <<"Small+fast+modular+HTTP+server">>, + <<"Small%2C+fast%2C+modular+HTTP+server.">>, + <<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83" + "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5" + "%BE%8B%E3%80%9C">> + ], + [{V, fun() -> V = urlencode(urldecode(V)) end} || V <- Tests]. + +horse_urldecode() -> + horse:repeat(100000, + urldecode(<<"nothingnothingnothingnothing">>) + ). + +horse_urldecode_plus() -> + horse:repeat(100000, + urldecode(<<"Small+fast+modular+HTTP+server">>) + ). + +horse_urldecode_hex() -> + horse:repeat(100000, + urldecode(<<"Small%2C%20fast%2C%20modular%20HTTP%20server.">>) + ). + +horse_urldecode_jp_hex() -> + horse:repeat(100000, + urldecode(<<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83" + "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5" + "%BE%8B%E3%80%9C">>) + ). + +horse_urldecode_mix() -> + horse:repeat(100000, + urldecode(<<"Small%2C+fast%2C+modular+HTTP+server.">>) + ). +-endif. + +%% @doc Percent encode a string (x-www-form-urlencoded rules). + +-spec urlencode(B) -> B when B::binary(). +urlencode(B) -> + urlencode(B, <<>>). + +urlencode(<< $\s, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $+ >>); +urlencode(<< $-, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $- >>); +urlencode(<< $., Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $. >>); +urlencode(<< $0, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $0 >>); +urlencode(<< $1, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $1 >>); +urlencode(<< $2, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $2 >>); +urlencode(<< $3, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $3 >>); +urlencode(<< $4, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $4 >>); +urlencode(<< $5, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $5 >>); +urlencode(<< $6, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $6 >>); +urlencode(<< $7, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $7 >>); +urlencode(<< $8, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $8 >>); +urlencode(<< $9, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $9 >>); +urlencode(<< $A, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $A >>); +urlencode(<< $B, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $B >>); +urlencode(<< $C, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $C >>); +urlencode(<< $D, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $D >>); +urlencode(<< $E, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $E >>); +urlencode(<< $F, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $F >>); +urlencode(<< $G, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $G >>); +urlencode(<< $H, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $H >>); +urlencode(<< $I, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $I >>); +urlencode(<< $J, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $J >>); +urlencode(<< $K, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $K >>); +urlencode(<< $L, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $L >>); +urlencode(<< $M, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $M >>); +urlencode(<< $N, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $N >>); +urlencode(<< $O, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $O >>); +urlencode(<< $P, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $P >>); +urlencode(<< $Q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Q >>); +urlencode(<< $R, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $R >>); +urlencode(<< $S, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $S >>); +urlencode(<< $T, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $T >>); +urlencode(<< $U, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $U >>); +urlencode(<< $V, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $V >>); +urlencode(<< $W, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $W >>); +urlencode(<< $X, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $X >>); +urlencode(<< $Y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Y >>); +urlencode(<< $Z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Z >>); +urlencode(<< $_, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $_ >>); +urlencode(<< $a, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $a >>); +urlencode(<< $b, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $b >>); +urlencode(<< $c, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $c >>); +urlencode(<< $d, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $d >>); +urlencode(<< $e, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $e >>); +urlencode(<< $f, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $f >>); +urlencode(<< $g, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $g >>); +urlencode(<< $h, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $h >>); +urlencode(<< $i, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $i >>); +urlencode(<< $j, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $j >>); +urlencode(<< $k, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $k >>); +urlencode(<< $l, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $l >>); +urlencode(<< $m, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $m >>); +urlencode(<< $n, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $n >>); +urlencode(<< $o, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $o >>); +urlencode(<< $p, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $p >>); +urlencode(<< $q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $q >>); +urlencode(<< $r, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $r >>); +urlencode(<< $s, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $s >>); +urlencode(<< $t, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $t >>); +urlencode(<< $u, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $u >>); +urlencode(<< $v, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $v >>); +urlencode(<< $w, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $w >>); +urlencode(<< $x, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $x >>); +urlencode(<< $y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $y >>); +urlencode(<< $z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $z >>); +urlencode(<< C, Rest/bits >>, Acc) -> + H = hex(C bsr 4), + L = hex(C band 16#0f), + urlencode(Rest, << Acc/bits, $%, H, L >>); +urlencode(<<>>, Acc) -> + Acc. + +hex( 0) -> $0; +hex( 1) -> $1; +hex( 2) -> $2; +hex( 3) -> $3; +hex( 4) -> $4; +hex( 5) -> $5; +hex( 6) -> $6; +hex( 7) -> $7; +hex( 8) -> $8; +hex( 9) -> $9; +hex(10) -> $A; +hex(11) -> $B; +hex(12) -> $C; +hex(13) -> $D; +hex(14) -> $E; +hex(15) -> $F. + +-ifdef(TEST). +urlencode_test_() -> + Tests = [ + {<<255, 0>>, <<"%FF%00">>}, + {<<255, " ">>, <<"%FF+">>}, + {<<" ">>, <<"+">>}, + {<<"aBc123">>, <<"aBc123">>}, + {<<".-_">>, <<".-_">>} + ], + [{V, fun() -> E = urlencode(V) end} || {V, E} <- Tests]. + +urlencode_identity_test_() -> + Tests = [ + <<"+">>, + <<"nothingnothingnothingnothing">>, + <<"Small fast modular HTTP server">>, + <<"Small, fast, modular HTTP server.">>, + <<227,131,132,227,130,164,227,131,179,227,130,189,227, + 130,166,227,131,171,227,128,156,232,188,170,229,187,187,227, + 129,153,227,130,139,230,151,139,229,190,139,227,128,156>> + ], + [{V, fun() -> V = urldecode(urlencode(V)) end} || V <- Tests]. + +horse_urlencode() -> + horse:repeat(100000, + urlencode(<<"nothingnothingnothingnothing">>) + ). + +horse_urlencode_plus() -> + horse:repeat(100000, + urlencode(<<"Small fast modular HTTP server">>) + ). + +horse_urlencode_jp() -> + horse:repeat(100000, + urlencode(<<227,131,132,227,130,164,227,131,179,227,130,189,227, + 130,166,227,131,171,227,128,156,232,188,170,229,187,187,227, + 129,153,227,130,139,230,151,139,229,190,139,227,128,156>>) + ). + +horse_urlencode_mix() -> + horse:repeat(100000, + urlencode(<<"Small, fast, modular HTTP server.">>) + ). +-endif. diff --git a/src/wsLib/cow_spdy.erl b/src/wsLib/cow_spdy.erl new file mode 100644 index 0000000..8bda45b --- /dev/null +++ b/src/wsLib/cow_spdy.erl @@ -0,0 +1,313 @@ +%% Copyright (c) 2013-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_spdy). + +%% Zstream. +-export([deflate_init/0]). +-export([inflate_init/0]). + +%% Parse. +-export([split/1]). +-export([parse/2]). + +%% Build. +-export([data/3]). +-export([syn_stream/12]). +-export([syn_reply/6]). +-export([rst_stream/2]). +-export([settings/2]). +-export([ping/1]). +-export([goaway/2]). +%% @todo headers +%% @todo window_update + +-include("cow_spdy.hrl"). + +%% Zstream. + +deflate_init() -> + Zdef = zlib:open(), + ok = zlib:deflateInit(Zdef), + _ = zlib:deflateSetDictionary(Zdef, ?ZDICT), + Zdef. + +inflate_init() -> + Zinf = zlib:open(), + ok = zlib:inflateInit(Zinf), + Zinf. + +%% Parse. + +split(Data = << _:40, Length:24, _/bits >>) + when byte_size(Data) >= Length + 8 -> + Length2 = Length + 8, + << Frame:Length2/binary, Rest/bits >> = Data, + {true, Frame, Rest}; +split(_) -> + false. + +parse(<< 0:1, StreamID:31, 0:7, IsFinFlag:1, _:24, Data/bits >>, _) -> + {data, StreamID, from_flag(IsFinFlag), Data}; +parse(<< 1:1, 3:15, 1:16, 0:6, IsUnidirectionalFlag:1, IsFinFlag:1, + _:25, StreamID:31, _:1, AssocToStreamID:31, Priority:3, _:5, + 0:8, Rest/bits >>, Zinf) -> + case parse_headers(Rest, Zinf) of + {ok, Headers, [{<<":host">>, Host}, {<<":method">>, Method}, + {<<":path">>, Path}, {<<":scheme">>, Scheme}, + {<<":version">>, Version}]} -> + {syn_stream, StreamID, AssocToStreamID, from_flag(IsFinFlag), + from_flag(IsUnidirectionalFlag), Priority, Method, + Scheme, Host, Path, Version, Headers}; + _ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 2:16, 0:7, IsFinFlag:1, _:25, + StreamID:31, Rest/bits >>, Zinf) -> + case parse_headers(Rest, Zinf) of + {ok, Headers, [{<<":status">>, Status}, {<<":version">>, Version}]} -> + {syn_reply, StreamID, from_flag(IsFinFlag), + Status, Version, Headers}; + _ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 3:16, 0:8, _:56, StatusCode:32 >>, _) + when StatusCode =:= 0; StatusCode > 11 -> + {error, badprotocol}; +parse(<< 1:1, 3:15, 3:16, 0:8, _:25, StreamID:31, StatusCode:32 >>, _) -> + Status = case StatusCode of + 1 -> protocol_error; + 2 -> invalid_stream; + 3 -> refused_stream; + 4 -> unsupported_version; + 5 -> cancel; + 6 -> internal_error; + 7 -> flow_control_error; + 8 -> stream_in_use; + 9 -> stream_already_closed; + 10 -> invalid_credentials; + 11 -> frame_too_large + end, + {rst_stream, StreamID, Status}; +parse(<< 1:1, 3:15, 4:16, 0:7, ClearSettingsFlag:1, _:24, + NbEntries:32, Rest/bits >>, _) -> + try + Settings = [begin + Is0 = 0, + Key = case ID of + 1 -> upload_bandwidth; + 2 -> download_bandwidth; + 3 -> round_trip_time; + 4 -> max_concurrent_streams; + 5 -> current_cwnd; + 6 -> download_retrans_rate; + 7 -> initial_window_size; + 8 -> client_certificate_vector_size + end, + {Key, Value, from_flag(PersistFlag), from_flag(WasPersistedFlag)} + end || << Is0:6, WasPersistedFlag:1, PersistFlag:1, + ID:24, Value:32 >> <= Rest], + NbEntries = length(Settings), + {settings, from_flag(ClearSettingsFlag), Settings} + catch _:_ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 6:16, 0:8, _:24, PingID:32 >>, _) -> + {ping, PingID}; +parse(<< 1:1, 3:15, 7:16, 0:8, _:56, StatusCode:32 >>, _) + when StatusCode > 2 -> + {error, badprotocol}; +parse(<< 1:1, 3:15, 7:16, 0:8, _:25, LastGoodStreamID:31, + StatusCode:32 >>, _) -> + Status = case StatusCode of + 0 -> ok; + 1 -> protocol_error; + 2 -> internal_error + end, + {goaway, LastGoodStreamID, Status}; +parse(<< 1:1, 3:15, 8:16, 0:7, IsFinFlag:1, _:25, StreamID:31, + Rest/bits >>, Zinf) -> + case parse_headers(Rest, Zinf) of + {ok, Headers, []} -> + {headers, StreamID, from_flag(IsFinFlag), Headers}; + _ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 9:16, 0:8, _:57, 0:31 >>, _) -> + {error, badprotocol}; +parse(<< 1:1, 3:15, 9:16, 0:8, _:25, StreamID:31, + _:1, DeltaWindowSize:31 >>, _) -> + {window_update, StreamID, DeltaWindowSize}; +parse(_, _) -> + {error, badprotocol}. + +parse_headers(Data, Zinf) -> + [<< NbHeaders:32, Rest/bits >>] = inflate(Zinf, Data), + parse_headers(Rest, NbHeaders, [], []). + +parse_headers(<<>>, 0, Headers, SpHeaders) -> + {ok, lists:reverse(Headers), lists:sort(SpHeaders)}; +parse_headers(<<>>, _, _, _) -> + error; +parse_headers(_, 0, _, _) -> + error; +parse_headers(<< 0:32, _/bits >>, _, _, _) -> + error; +parse_headers(<< L1:32, Key:L1/binary, L2:32, Value:L2/binary, Rest/bits >>, + NbHeaders, Acc, SpAcc) -> + case Key of + << $:, _/bits >> -> + parse_headers(Rest, NbHeaders - 1, Acc, + lists:keystore(Key, 1, SpAcc, {Key, Value})); + _ -> + parse_headers(Rest, NbHeaders - 1, [{Key, Value}|Acc], SpAcc) + end. + +inflate(Zinf, Data) -> + try + zlib:inflate(Zinf, Data) + catch _:_ -> + ok = zlib:inflateSetDictionary(Zinf, ?ZDICT), + zlib:inflate(Zinf, <<>>) + end. + +from_flag(0) -> false; +from_flag(1) -> true. + +%% Build. + +data(StreamID, IsFin, Data) -> + IsFinFlag = to_flag(IsFin), + Length = iolist_size(Data), + [<< 0:1, StreamID:31, 0:7, IsFinFlag:1, Length:24 >>, Data]. + +syn_stream(Zdef, StreamID, AssocToStreamID, IsFin, IsUnidirectional, + Priority, Method, Scheme, Host, Path, Version, Headers) -> + IsFinFlag = to_flag(IsFin), + IsUnidirectionalFlag = to_flag(IsUnidirectional), + HeaderBlock = build_headers(Zdef, [ + {<<":method">>, Method}, + {<<":scheme">>, Scheme}, + {<<":host">>, Host}, + {<<":path">>, Path}, + {<<":version">>, Version} + |Headers]), + Length = 10 + iolist_size(HeaderBlock), + [<< 1:1, 3:15, 1:16, 0:6, IsUnidirectionalFlag:1, IsFinFlag:1, + Length:24, 0:1, StreamID:31, 0:1, AssocToStreamID:31, + Priority:3, 0:5, 0:8 >>, HeaderBlock]. + +syn_reply(Zdef, StreamID, IsFin, Status, Version, Headers) -> + IsFinFlag = to_flag(IsFin), + HeaderBlock = build_headers(Zdef, [ + {<<":status">>, Status}, + {<<":version">>, Version} + |Headers]), + Length = 4 + iolist_size(HeaderBlock), + [<< 1:1, 3:15, 2:16, 0:7, IsFinFlag:1, Length:24, + 0:1, StreamID:31 >>, HeaderBlock]. + +rst_stream(StreamID, Status) -> + StatusCode = case Status of + protocol_error -> 1; + invalid_stream -> 2; + refused_stream -> 3; + unsupported_version -> 4; + cancel -> 5; + internal_error -> 6; + flow_control_error -> 7; + stream_in_use -> 8; + stream_already_closed -> 9; + invalid_credentials -> 10; + frame_too_large -> 11 + end, + << 1:1, 3:15, 3:16, 0:8, 8:24, + 0:1, StreamID:31, StatusCode:32 >>. + +settings(ClearSettingsFlag, Settings) -> + IsClearSettingsFlag = to_flag(ClearSettingsFlag), + NbEntries = length(Settings), + Entries = [begin + IsWasPersistedFlag = to_flag(WasPersistedFlag), + IsPersistFlag = to_flag(PersistFlag), + ID = case Key of + upload_bandwidth -> 1; + download_bandwidth -> 2; + round_trip_time -> 3; + max_concurrent_streams -> 4; + current_cwnd -> 5; + download_retrans_rate -> 6; + initial_window_size -> 7; + client_certificate_vector_size -> 8 + end, + << 0:6, IsWasPersistedFlag:1, IsPersistFlag:1, ID:24, Value:32 >> + end || {Key, Value, WasPersistedFlag, PersistFlag} <- Settings], + Length = 4 + iolist_size(Entries), + [<< 1:1, 3:15, 4:16, 0:7, IsClearSettingsFlag:1, Length:24, + NbEntries:32 >>, Entries]. + +-ifdef(TEST). +settings_frame_test() -> + ClearSettingsFlag = false, + Settings = [{max_concurrent_streams,1000,false,false}, + {initial_window_size,10485760,false,false}], + Bin = list_to_binary(cow_spdy:settings(ClearSettingsFlag, Settings)), + P = cow_spdy:parse(Bin, undefined), + P = {settings, ClearSettingsFlag, Settings}, + ok. +-endif. + +ping(PingID) -> + << 1:1, 3:15, 6:16, 0:8, 4:24, PingID:32 >>. + +goaway(LastGoodStreamID, Status) -> + StatusCode = case Status of + ok -> 0; + protocol_error -> 1; + internal_error -> 2 + end, + << 1:1, 3:15, 7:16, 0:8, 8:24, + 0:1, LastGoodStreamID:31, StatusCode:32 >>. + +%% @todo headers +%% @todo window_update + +build_headers(Zdef, Headers) -> + Headers1 = merge_headers(lists:sort(Headers), []), + NbHeaders = length(Headers1), + Headers2 = [begin + L1 = iolist_size(Key), + L2 = iolist_size(Value), + [<< L1:32 >>, Key, << L2:32 >>, Value] + end || {Key, Value} <- Headers1], + zlib:deflate(Zdef, [<< NbHeaders:32 >>, Headers2], full). + +merge_headers([], Acc) -> + lists:reverse(Acc); +merge_headers([{Name, Value1}, {Name, Value2}|Tail], Acc) -> + merge_headers([{Name, [Value1, 0, Value2]}|Tail], Acc); +merge_headers([Head|Tail], Acc) -> + merge_headers(Tail, [Head|Acc]). + +-ifdef(TEST). +merge_headers_test_() -> + Tests = [ + {[{<<"set-cookie">>, <<"session=123">>}, {<<"set-cookie">>, <<"other=456">>}, {<<"content-type">>, <<"text/html">>}], + [{<<"set-cookie">>, [<<"session=123">>, 0, <<"other=456">>]}, {<<"content-type">>, <<"text/html">>}]} + ], + [fun() -> D = merge_headers(R, []) end || {R, D} <- Tests]. +-endif. + +to_flag(false) -> 0; +to_flag(true) -> 1. diff --git a/src/wsLib/cow_spdy.hrl b/src/wsLib/cow_spdy.hrl new file mode 100644 index 0000000..9637b1c --- /dev/null +++ b/src/wsLib/cow_spdy.hrl @@ -0,0 +1,181 @@ +%% Zlib dictionary. + +-define(ZDICT, << + 16#00, 16#00, 16#00, 16#07, 16#6f, 16#70, 16#74, 16#69, + 16#6f, 16#6e, 16#73, 16#00, 16#00, 16#00, 16#04, 16#68, + 16#65, 16#61, 16#64, 16#00, 16#00, 16#00, 16#04, 16#70, + 16#6f, 16#73, 16#74, 16#00, 16#00, 16#00, 16#03, 16#70, + 16#75, 16#74, 16#00, 16#00, 16#00, 16#06, 16#64, 16#65, + 16#6c, 16#65, 16#74, 16#65, 16#00, 16#00, 16#00, 16#05, + 16#74, 16#72, 16#61, 16#63, 16#65, 16#00, 16#00, 16#00, + 16#06, 16#61, 16#63, 16#63, 16#65, 16#70, 16#74, 16#00, + 16#00, 16#00, 16#0e, 16#61, 16#63, 16#63, 16#65, 16#70, + 16#74, 16#2d, 16#63, 16#68, 16#61, 16#72, 16#73, 16#65, + 16#74, 16#00, 16#00, 16#00, 16#0f, 16#61, 16#63, 16#63, + 16#65, 16#70, 16#74, 16#2d, 16#65, 16#6e, 16#63, 16#6f, + 16#64, 16#69, 16#6e, 16#67, 16#00, 16#00, 16#00, 16#0f, + 16#61, 16#63, 16#63, 16#65, 16#70, 16#74, 16#2d, 16#6c, + 16#61, 16#6e, 16#67, 16#75, 16#61, 16#67, 16#65, 16#00, + 16#00, 16#00, 16#0d, 16#61, 16#63, 16#63, 16#65, 16#70, + 16#74, 16#2d, 16#72, 16#61, 16#6e, 16#67, 16#65, 16#73, + 16#00, 16#00, 16#00, 16#03, 16#61, 16#67, 16#65, 16#00, + 16#00, 16#00, 16#05, 16#61, 16#6c, 16#6c, 16#6f, 16#77, + 16#00, 16#00, 16#00, 16#0d, 16#61, 16#75, 16#74, 16#68, + 16#6f, 16#72, 16#69, 16#7a, 16#61, 16#74, 16#69, 16#6f, + 16#6e, 16#00, 16#00, 16#00, 16#0d, 16#63, 16#61, 16#63, + 16#68, 16#65, 16#2d, 16#63, 16#6f, 16#6e, 16#74, 16#72, + 16#6f, 16#6c, 16#00, 16#00, 16#00, 16#0a, 16#63, 16#6f, + 16#6e, 16#6e, 16#65, 16#63, 16#74, 16#69, 16#6f, 16#6e, + 16#00, 16#00, 16#00, 16#0c, 16#63, 16#6f, 16#6e, 16#74, + 16#65, 16#6e, 16#74, 16#2d, 16#62, 16#61, 16#73, 16#65, + 16#00, 16#00, 16#00, 16#10, 16#63, 16#6f, 16#6e, 16#74, + 16#65, 16#6e, 16#74, 16#2d, 16#65, 16#6e, 16#63, 16#6f, + 16#64, 16#69, 16#6e, 16#67, 16#00, 16#00, 16#00, 16#10, + 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, 16#74, 16#2d, + 16#6c, 16#61, 16#6e, 16#67, 16#75, 16#61, 16#67, 16#65, + 16#00, 16#00, 16#00, 16#0e, 16#63, 16#6f, 16#6e, 16#74, + 16#65, 16#6e, 16#74, 16#2d, 16#6c, 16#65, 16#6e, 16#67, + 16#74, 16#68, 16#00, 16#00, 16#00, 16#10, 16#63, 16#6f, + 16#6e, 16#74, 16#65, 16#6e, 16#74, 16#2d, 16#6c, 16#6f, + 16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, + 16#00, 16#0b, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, + 16#74, 16#2d, 16#6d, 16#64, 16#35, 16#00, 16#00, 16#00, + 16#0d, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, 16#74, + 16#2d, 16#72, 16#61, 16#6e, 16#67, 16#65, 16#00, 16#00, + 16#00, 16#0c, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, + 16#74, 16#2d, 16#74, 16#79, 16#70, 16#65, 16#00, 16#00, + 16#00, 16#04, 16#64, 16#61, 16#74, 16#65, 16#00, 16#00, + 16#00, 16#04, 16#65, 16#74, 16#61, 16#67, 16#00, 16#00, + 16#00, 16#06, 16#65, 16#78, 16#70, 16#65, 16#63, 16#74, + 16#00, 16#00, 16#00, 16#07, 16#65, 16#78, 16#70, 16#69, + 16#72, 16#65, 16#73, 16#00, 16#00, 16#00, 16#04, 16#66, + 16#72, 16#6f, 16#6d, 16#00, 16#00, 16#00, 16#04, 16#68, + 16#6f, 16#73, 16#74, 16#00, 16#00, 16#00, 16#08, 16#69, + 16#66, 16#2d, 16#6d, 16#61, 16#74, 16#63, 16#68, 16#00, + 16#00, 16#00, 16#11, 16#69, 16#66, 16#2d, 16#6d, 16#6f, + 16#64, 16#69, 16#66, 16#69, 16#65, 16#64, 16#2d, 16#73, + 16#69, 16#6e, 16#63, 16#65, 16#00, 16#00, 16#00, 16#0d, + 16#69, 16#66, 16#2d, 16#6e, 16#6f, 16#6e, 16#65, 16#2d, + 16#6d, 16#61, 16#74, 16#63, 16#68, 16#00, 16#00, 16#00, + 16#08, 16#69, 16#66, 16#2d, 16#72, 16#61, 16#6e, 16#67, + 16#65, 16#00, 16#00, 16#00, 16#13, 16#69, 16#66, 16#2d, + 16#75, 16#6e, 16#6d, 16#6f, 16#64, 16#69, 16#66, 16#69, + 16#65, 16#64, 16#2d, 16#73, 16#69, 16#6e, 16#63, 16#65, + 16#00, 16#00, 16#00, 16#0d, 16#6c, 16#61, 16#73, 16#74, + 16#2d, 16#6d, 16#6f, 16#64, 16#69, 16#66, 16#69, 16#65, + 16#64, 16#00, 16#00, 16#00, 16#08, 16#6c, 16#6f, 16#63, + 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, 16#00, + 16#0c, 16#6d, 16#61, 16#78, 16#2d, 16#66, 16#6f, 16#72, + 16#77, 16#61, 16#72, 16#64, 16#73, 16#00, 16#00, 16#00, + 16#06, 16#70, 16#72, 16#61, 16#67, 16#6d, 16#61, 16#00, + 16#00, 16#00, 16#12, 16#70, 16#72, 16#6f, 16#78, 16#79, + 16#2d, 16#61, 16#75, 16#74, 16#68, 16#65, 16#6e, 16#74, + 16#69, 16#63, 16#61, 16#74, 16#65, 16#00, 16#00, 16#00, + 16#13, 16#70, 16#72, 16#6f, 16#78, 16#79, 16#2d, 16#61, + 16#75, 16#74, 16#68, 16#6f, 16#72, 16#69, 16#7a, 16#61, + 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, 16#00, 16#05, + 16#72, 16#61, 16#6e, 16#67, 16#65, 16#00, 16#00, 16#00, + 16#07, 16#72, 16#65, 16#66, 16#65, 16#72, 16#65, 16#72, + 16#00, 16#00, 16#00, 16#0b, 16#72, 16#65, 16#74, 16#72, + 16#79, 16#2d, 16#61, 16#66, 16#74, 16#65, 16#72, 16#00, + 16#00, 16#00, 16#06, 16#73, 16#65, 16#72, 16#76, 16#65, + 16#72, 16#00, 16#00, 16#00, 16#02, 16#74, 16#65, 16#00, + 16#00, 16#00, 16#07, 16#74, 16#72, 16#61, 16#69, 16#6c, + 16#65, 16#72, 16#00, 16#00, 16#00, 16#11, 16#74, 16#72, + 16#61, 16#6e, 16#73, 16#66, 16#65, 16#72, 16#2d, 16#65, + 16#6e, 16#63, 16#6f, 16#64, 16#69, 16#6e, 16#67, 16#00, + 16#00, 16#00, 16#07, 16#75, 16#70, 16#67, 16#72, 16#61, + 16#64, 16#65, 16#00, 16#00, 16#00, 16#0a, 16#75, 16#73, + 16#65, 16#72, 16#2d, 16#61, 16#67, 16#65, 16#6e, 16#74, + 16#00, 16#00, 16#00, 16#04, 16#76, 16#61, 16#72, 16#79, + 16#00, 16#00, 16#00, 16#03, 16#76, 16#69, 16#61, 16#00, + 16#00, 16#00, 16#07, 16#77, 16#61, 16#72, 16#6e, 16#69, + 16#6e, 16#67, 16#00, 16#00, 16#00, 16#10, 16#77, 16#77, + 16#77, 16#2d, 16#61, 16#75, 16#74, 16#68, 16#65, 16#6e, + 16#74, 16#69, 16#63, 16#61, 16#74, 16#65, 16#00, 16#00, + 16#00, 16#06, 16#6d, 16#65, 16#74, 16#68, 16#6f, 16#64, + 16#00, 16#00, 16#00, 16#03, 16#67, 16#65, 16#74, 16#00, + 16#00, 16#00, 16#06, 16#73, 16#74, 16#61, 16#74, 16#75, + 16#73, 16#00, 16#00, 16#00, 16#06, 16#32, 16#30, 16#30, + 16#20, 16#4f, 16#4b, 16#00, 16#00, 16#00, 16#07, 16#76, + 16#65, 16#72, 16#73, 16#69, 16#6f, 16#6e, 16#00, 16#00, + 16#00, 16#08, 16#48, 16#54, 16#54, 16#50, 16#2f, 16#31, + 16#2e, 16#31, 16#00, 16#00, 16#00, 16#03, 16#75, 16#72, + 16#6c, 16#00, 16#00, 16#00, 16#06, 16#70, 16#75, 16#62, + 16#6c, 16#69, 16#63, 16#00, 16#00, 16#00, 16#0a, 16#73, + 16#65, 16#74, 16#2d, 16#63, 16#6f, 16#6f, 16#6b, 16#69, + 16#65, 16#00, 16#00, 16#00, 16#0a, 16#6b, 16#65, 16#65, + 16#70, 16#2d, 16#61, 16#6c, 16#69, 16#76, 16#65, 16#00, + 16#00, 16#00, 16#06, 16#6f, 16#72, 16#69, 16#67, 16#69, + 16#6e, 16#31, 16#30, 16#30, 16#31, 16#30, 16#31, 16#32, + 16#30, 16#31, 16#32, 16#30, 16#32, 16#32, 16#30, 16#35, + 16#32, 16#30, 16#36, 16#33, 16#30, 16#30, 16#33, 16#30, + 16#32, 16#33, 16#30, 16#33, 16#33, 16#30, 16#34, 16#33, + 16#30, 16#35, 16#33, 16#30, 16#36, 16#33, 16#30, 16#37, + 16#34, 16#30, 16#32, 16#34, 16#30, 16#35, 16#34, 16#30, + 16#36, 16#34, 16#30, 16#37, 16#34, 16#30, 16#38, 16#34, + 16#30, 16#39, 16#34, 16#31, 16#30, 16#34, 16#31, 16#31, + 16#34, 16#31, 16#32, 16#34, 16#31, 16#33, 16#34, 16#31, + 16#34, 16#34, 16#31, 16#35, 16#34, 16#31, 16#36, 16#34, + 16#31, 16#37, 16#35, 16#30, 16#32, 16#35, 16#30, 16#34, + 16#35, 16#30, 16#35, 16#32, 16#30, 16#33, 16#20, 16#4e, + 16#6f, 16#6e, 16#2d, 16#41, 16#75, 16#74, 16#68, 16#6f, + 16#72, 16#69, 16#74, 16#61, 16#74, 16#69, 16#76, 16#65, + 16#20, 16#49, 16#6e, 16#66, 16#6f, 16#72, 16#6d, 16#61, + 16#74, 16#69, 16#6f, 16#6e, 16#32, 16#30, 16#34, 16#20, + 16#4e, 16#6f, 16#20, 16#43, 16#6f, 16#6e, 16#74, 16#65, + 16#6e, 16#74, 16#33, 16#30, 16#31, 16#20, 16#4d, 16#6f, + 16#76, 16#65, 16#64, 16#20, 16#50, 16#65, 16#72, 16#6d, + 16#61, 16#6e, 16#65, 16#6e, 16#74, 16#6c, 16#79, 16#34, + 16#30, 16#30, 16#20, 16#42, 16#61, 16#64, 16#20, 16#52, + 16#65, 16#71, 16#75, 16#65, 16#73, 16#74, 16#34, 16#30, + 16#31, 16#20, 16#55, 16#6e, 16#61, 16#75, 16#74, 16#68, + 16#6f, 16#72, 16#69, 16#7a, 16#65, 16#64, 16#34, 16#30, + 16#33, 16#20, 16#46, 16#6f, 16#72, 16#62, 16#69, 16#64, + 16#64, 16#65, 16#6e, 16#34, 16#30, 16#34, 16#20, 16#4e, + 16#6f, 16#74, 16#20, 16#46, 16#6f, 16#75, 16#6e, 16#64, + 16#35, 16#30, 16#30, 16#20, 16#49, 16#6e, 16#74, 16#65, + 16#72, 16#6e, 16#61, 16#6c, 16#20, 16#53, 16#65, 16#72, + 16#76, 16#65, 16#72, 16#20, 16#45, 16#72, 16#72, 16#6f, + 16#72, 16#35, 16#30, 16#31, 16#20, 16#4e, 16#6f, 16#74, + 16#20, 16#49, 16#6d, 16#70, 16#6c, 16#65, 16#6d, 16#65, + 16#6e, 16#74, 16#65, 16#64, 16#35, 16#30, 16#33, 16#20, + 16#53, 16#65, 16#72, 16#76, 16#69, 16#63, 16#65, 16#20, + 16#55, 16#6e, 16#61, 16#76, 16#61, 16#69, 16#6c, 16#61, + 16#62, 16#6c, 16#65, 16#4a, 16#61, 16#6e, 16#20, 16#46, + 16#65, 16#62, 16#20, 16#4d, 16#61, 16#72, 16#20, 16#41, + 16#70, 16#72, 16#20, 16#4d, 16#61, 16#79, 16#20, 16#4a, + 16#75, 16#6e, 16#20, 16#4a, 16#75, 16#6c, 16#20, 16#41, + 16#75, 16#67, 16#20, 16#53, 16#65, 16#70, 16#74, 16#20, + 16#4f, 16#63, 16#74, 16#20, 16#4e, 16#6f, 16#76, 16#20, + 16#44, 16#65, 16#63, 16#20, 16#30, 16#30, 16#3a, 16#30, + 16#30, 16#3a, 16#30, 16#30, 16#20, 16#4d, 16#6f, 16#6e, + 16#2c, 16#20, 16#54, 16#75, 16#65, 16#2c, 16#20, 16#57, + 16#65, 16#64, 16#2c, 16#20, 16#54, 16#68, 16#75, 16#2c, + 16#20, 16#46, 16#72, 16#69, 16#2c, 16#20, 16#53, 16#61, + 16#74, 16#2c, 16#20, 16#53, 16#75, 16#6e, 16#2c, 16#20, + 16#47, 16#4d, 16#54, 16#63, 16#68, 16#75, 16#6e, 16#6b, + 16#65, 16#64, 16#2c, 16#74, 16#65, 16#78, 16#74, 16#2f, + 16#68, 16#74, 16#6d, 16#6c, 16#2c, 16#69, 16#6d, 16#61, + 16#67, 16#65, 16#2f, 16#70, 16#6e, 16#67, 16#2c, 16#69, + 16#6d, 16#61, 16#67, 16#65, 16#2f, 16#6a, 16#70, 16#67, + 16#2c, 16#69, 16#6d, 16#61, 16#67, 16#65, 16#2f, 16#67, + 16#69, 16#66, 16#2c, 16#61, 16#70, 16#70, 16#6c, 16#69, + 16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#2f, 16#78, + 16#6d, 16#6c, 16#2c, 16#61, 16#70, 16#70, 16#6c, 16#69, + 16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#2f, 16#78, + 16#68, 16#74, 16#6d, 16#6c, 16#2b, 16#78, 16#6d, 16#6c, + 16#2c, 16#74, 16#65, 16#78, 16#74, 16#2f, 16#70, 16#6c, + 16#61, 16#69, 16#6e, 16#2c, 16#74, 16#65, 16#78, 16#74, + 16#2f, 16#6a, 16#61, 16#76, 16#61, 16#73, 16#63, 16#72, + 16#69, 16#70, 16#74, 16#2c, 16#70, 16#75, 16#62, 16#6c, + 16#69, 16#63, 16#70, 16#72, 16#69, 16#76, 16#61, 16#74, + 16#65, 16#6d, 16#61, 16#78, 16#2d, 16#61, 16#67, 16#65, + 16#3d, 16#67, 16#7a, 16#69, 16#70, 16#2c, 16#64, 16#65, + 16#66, 16#6c, 16#61, 16#74, 16#65, 16#2c, 16#73, 16#64, + 16#63, 16#68, 16#63, 16#68, 16#61, 16#72, 16#73, 16#65, + 16#74, 16#3d, 16#75, 16#74, 16#66, 16#2d, 16#38, 16#63, + 16#68, 16#61, 16#72, 16#73, 16#65, 16#74, 16#3d, 16#69, + 16#73, 16#6f, 16#2d, 16#38, 16#38, 16#35, 16#39, 16#2d, + 16#31, 16#2c, 16#75, 16#74, 16#66, 16#2d, 16#2c, 16#2a, + 16#2c, 16#65, 16#6e, 16#71, 16#3d, 16#30, 16#2e >>). diff --git a/src/wsLib/cow_sse.erl b/src/wsLib/cow_sse.erl new file mode 100644 index 0000000..7aa98ce --- /dev/null +++ b/src/wsLib/cow_sse.erl @@ -0,0 +1,348 @@ +%% Copyright (c) 2017-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_sse). + +-export([init/0]). +-export([parse/2]). +-export([events/1]). +-export([event/1]). + +-record(state, { + state_name = bom :: bom | events, + buffer = <<>> :: binary(), + last_event_id = <<>> :: binary(), + last_event_id_set = false :: boolean(), + event_type = <<>> :: binary(), + data = [] :: iolist(), + retry = undefined :: undefined | non_neg_integer() +}). +-type state() :: #state{}. +-export_type([state/0]). + +-type parsed_event() :: #{ + last_event_id := binary(), + event_type := binary(), + data := iolist() +}. + +-type event() :: #{ + comment => iodata(), + data => iodata(), + event => iodata() | atom(), + id => iodata(), + retry => non_neg_integer() +}. +-export_type([event/0]). + +-spec init() -> state(). +init() -> + #state{}. + +%% @todo Add a function to retrieve the retry value from the state. + +-spec parse(binary(), state()) + -> {event, parsed_event(), State} | {more, State}. +parse(Data0, State=#state{state_name=bom, buffer=Buffer}) -> + Data1 = case Buffer of + <<>> -> Data0; + _ -> << Buffer/binary, Data0/binary >> + end, + case Data1 of + %% Skip the BOM. + << 16#fe, 16#ff, Data/bits >> -> + parse_event(Data, State#state{state_name=events, buffer= <<>>}); + %% Not enough data to know wether we have a BOM. + << 16#fe >> -> + {more, State#state{buffer=Data1}}; + <<>> -> + {more, State}; + %% No BOM. + _ -> + parse_event(Data1, State#state{state_name=events, buffer= <<>>}) + end; +%% Try to process data from the buffer if there is no new input. +parse(<<>>, State=#state{buffer=Buffer}) -> + parse_event(Buffer, State#state{buffer= <<>>}); +%% Otherwise process the input data as-is. +parse(Data0, State=#state{buffer=Buffer}) -> + Data = case Buffer of + <<>> -> Data0; + _ -> << Buffer/binary, Data0/binary >> + end, + parse_event(Data, State). + +parse_event(Data, State0) -> + case binary:split(Data, [<<"\r\n">>, <<"\r">>, <<"\n">>]) of + [Line, Rest] -> + case parse_line(Line, State0) of + {ok, State} -> + parse_event(Rest, State); + {event, Event, State} -> + {event, Event, State#state{buffer=Rest}} + end; + [_] -> + {more, State0#state{buffer=Data}} + end. + +%% Dispatch events on empty line. +parse_line(<<>>, State) -> + dispatch_event(State); +%% Ignore comments. +parse_line(<< $:, _/bits >>, State) -> + {ok, State}; +%% Normal line. +parse_line(Line, State) -> + case binary:split(Line, [<<":\s">>, <<":">>]) of + [Field, Value] -> + process_field(Field, Value, State); + [Field] -> + process_field(Field, <<>>, State) + end. + +process_field(<<"event">>, Value, State) -> + {ok, State#state{event_type=Value}}; +process_field(<<"data">>, Value, State=#state{data=Data}) -> + {ok, State#state{data=[<<$\n>>, Value|Data]}}; +process_field(<<"id">>, Value, State) -> + {ok, State#state{last_event_id=Value, last_event_id_set=true}}; +process_field(<<"retry">>, Value, State) -> + try + {ok, State#state{retry=binary_to_integer(Value)}} + catch _:_ -> + {ok, State} + end; +process_field(_, _, State) -> + {ok, State}. + +%% Data is an empty string; abort. +dispatch_event(State=#state{last_event_id_set=false, data=[]}) -> + {ok, State#state{event_type= <<>>}}; +%% Data is an empty string but we have a last_event_id: +%% propagate it on its own so that the caller knows the +%% most recent ID. +dispatch_event(State=#state{last_event_id=LastEventID, data=[]}) -> + {event, #{ + last_event_id => LastEventID + }, State#state{last_event_id_set=false, event_type= <<>>}}; +%% Dispatch the event. +%% +%% Always remove the last linebreak from the data. +dispatch_event(State=#state{last_event_id=LastEventID, + event_type=EventType, data=[_|Data]}) -> + {event, #{ + last_event_id => LastEventID, + event_type => case EventType of + <<>> -> <<"message">>; + _ -> EventType + end, + data => lists:reverse(Data) + }, State#state{last_event_id_set=false, event_type= <<>>, data=[]}}. + +-ifdef(TEST). +parse_example1_test() -> + {event, #{ + event_type := <<"message">>, + last_event_id := <<>>, + data := Data + }, State} = parse(<< + "data: YHOO\n" + "data: +2\n" + "data: 10\n" + "\n">>, init()), + <<"YHOO\n+2\n10">> = iolist_to_binary(Data), + {more, _} = parse(<<>>, State), + ok. + +parse_example2_test() -> + {event, #{ + event_type := <<"message">>, + last_event_id := <<"1">>, + data := Data1 + }, State0} = parse(<< + ": test stream\n" + "\n" + "data: first event\n" + "id: 1\n" + "\n" + "data:second event\n" + "id\n" + "\n" + "data: third event\n" + "\n">>, init()), + <<"first event">> = iolist_to_binary(Data1), + {event, #{ + event_type := <<"message">>, + last_event_id := <<>>, + data := Data2 + }, State1} = parse(<<>>, State0), + <<"second event">> = iolist_to_binary(Data2), + {event, #{ + event_type := <<"message">>, + last_event_id := <<>>, + data := Data3 + }, State} = parse(<<>>, State1), + <<" third event">> = iolist_to_binary(Data3), + {more, _} = parse(<<>>, State), + ok. + +parse_example3_test() -> + {event, #{ + event_type := <<"message">>, + last_event_id := <<>>, + data := Data1 + }, State0} = parse(<< + "data\n" + "\n" + "data\n" + "data\n" + "\n" + "data:\n">>, init()), + <<>> = iolist_to_binary(Data1), + {event, #{ + event_type := <<"message">>, + last_event_id := <<>>, + data := Data2 + }, State} = parse(<<>>, State0), + <<"\n">> = iolist_to_binary(Data2), + {more, _} = parse(<<>>, State), + ok. + +parse_example4_test() -> + {event, Event, State0} = parse(<< + "data:test\n" + "\n" + "data: test\n" + "\n">>, init()), + {event, Event, State} = parse(<<>>, State0), + {more, _} = parse(<<>>, State), + ok. + +parse_id_without_data_test() -> + {event, Event1, State0} = parse(<< + "id: 1\n" + "\n" + "data: data\n" + "\n" + "id: 2\n" + "\n">>, init()), + 1 = maps:size(Event1), + #{last_event_id := <<"1">>} = Event1, + {event, #{ + event_type := <<"message">>, + last_event_id := <<"1">>, + data := Data + }, State1} = parse(<<>>, State0), + <<"data">> = iolist_to_binary(Data), + {event, Event2, State} = parse(<<>>, State1), + 1 = maps:size(Event2), + #{last_event_id := <<"2">>} = Event2, + {more, _} = parse(<<>>, State), + ok. + +parse_repeated_id_without_data_test() -> + {event, Event1, State0} = parse(<< + "id: 1\n" + "\n" + "event: message\n" %% This will be ignored since there's no data. + "\n" + "id: 1\n" + "\n" + "id: 2\n" + "\n">>, init()), + {event, Event1, State1} = parse(<<>>, State0), + 1 = maps:size(Event1), + #{last_event_id := <<"1">>} = Event1, + {event, Event2, State} = parse(<<>>, State1), + 1 = maps:size(Event2), + #{last_event_id := <<"2">>} = Event2, + {more, _} = parse(<<>>, State), + ok. + +parse_split_event_test() -> + {more, State} = parse(<< + "data: AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA">>, init()), + {event, _, _} = parse(<<"==\n\n">>, State), + ok. +-endif. + +-spec events([event()]) -> iolist(). +events(Events) -> + [event(Event) || Event <- Events]. + +-spec event(event()) -> iolist(). +event(Event) -> + [ + event_comment(Event), + event_id(Event), + event_name(Event), + event_data(Event), + event_retry(Event), + $\n + ]. + +event_comment(#{comment := Comment}) -> + prefix_lines(Comment, <<>>); +event_comment(_) -> + []. + +event_id(#{id := ID}) -> + nomatch = binary:match(iolist_to_binary(ID), <<"\n">>), + [<<"id: ">>, ID, $\n]; +event_id(_) -> + []. + +event_name(#{event := Name0}) -> + Name = if + is_atom(Name0) -> atom_to_binary(Name0, utf8); + true -> iolist_to_binary(Name0) + end, + nomatch = binary:match(Name, <<"\n">>), + [<<"event: ">>, Name, $\n]; +event_name(_) -> + []. + +event_data(#{data := Data}) -> + prefix_lines(Data, <<"data">>); +event_data(_) -> + []. + +event_retry(#{retry := Retry}) -> + [<<"retry: ">>, integer_to_binary(Retry), $\n]; +event_retry(_) -> + []. + +prefix_lines(IoData, Prefix) -> + Lines = binary:split(iolist_to_binary(IoData), <<"\n">>, [global]), + [[Prefix, <<": ">>, Line, $\n] || Line <- Lines]. + +-ifdef(TEST). +event_test() -> + _ = event(#{}), + _ = event(#{comment => "test"}), + _ = event(#{data => "test"}), + _ = event(#{data => "test\ntest\ntest"}), + _ = event(#{data => "test\ntest\ntest\n"}), + _ = event(#{data => <<"test\ntest\ntest">>}), + _ = event(#{data => [<<"test">>, $\n, <<"test">>, [$\n, "test"]]}), + _ = event(#{event => test}), + _ = event(#{event => "test"}), + _ = event(#{id => "test"}), + _ = event(#{retry => 5000}), + _ = event(#{event => "test", data => "test"}), + _ = event(#{id => "test", event => "test", data => "test"}), + ok. +-endif. diff --git a/src/wsLib/cow_uri.erl b/src/wsLib/cow_uri.erl new file mode 100644 index 0000000..c0d9903 --- /dev/null +++ b/src/wsLib/cow_uri.erl @@ -0,0 +1,339 @@ +%% Copyright (c) 2016-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_uri). + +-export([urldecode/1]). +-export([urlencode/1]). + +%% @doc Decode a percent encoded string. (RFC3986 2.1) + +-spec urldecode(B) -> B when B::binary(). +urldecode(B) -> + urldecode(B, <<>>). + +urldecode(<< $%, H, L, Rest/bits >>, Acc) -> + C = (unhex(H) bsl 4 bor unhex(L)), + urldecode(Rest, << Acc/bits, C >>); +urldecode(<< $!, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $! >>); +urldecode(<< $$, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $$ >>); +urldecode(<< $&, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $& >>); +urldecode(<< $', Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $' >>); +urldecode(<< $(, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $( >>); +urldecode(<< $), Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $) >>); +urldecode(<< $*, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $* >>); +urldecode(<< $+, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $+ >>); +urldecode(<< $,, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $, >>); +urldecode(<< $-, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $- >>); +urldecode(<< $., Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $. >>); +urldecode(<< $0, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $0 >>); +urldecode(<< $1, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $1 >>); +urldecode(<< $2, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $2 >>); +urldecode(<< $3, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $3 >>); +urldecode(<< $4, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $4 >>); +urldecode(<< $5, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $5 >>); +urldecode(<< $6, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $6 >>); +urldecode(<< $7, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $7 >>); +urldecode(<< $8, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $8 >>); +urldecode(<< $9, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $9 >>); +urldecode(<< $:, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $: >>); +urldecode(<< $;, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $; >>); +urldecode(<< $=, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $= >>); +urldecode(<< $@, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $@ >>); +urldecode(<< $A, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $A >>); +urldecode(<< $B, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $B >>); +urldecode(<< $C, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $C >>); +urldecode(<< $D, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $D >>); +urldecode(<< $E, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $E >>); +urldecode(<< $F, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $F >>); +urldecode(<< $G, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $G >>); +urldecode(<< $H, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $H >>); +urldecode(<< $I, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $I >>); +urldecode(<< $J, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $J >>); +urldecode(<< $K, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $K >>); +urldecode(<< $L, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $L >>); +urldecode(<< $M, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $M >>); +urldecode(<< $N, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $N >>); +urldecode(<< $O, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $O >>); +urldecode(<< $P, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $P >>); +urldecode(<< $Q, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $Q >>); +urldecode(<< $R, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $R >>); +urldecode(<< $S, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $S >>); +urldecode(<< $T, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $T >>); +urldecode(<< $U, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $U >>); +urldecode(<< $V, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $V >>); +urldecode(<< $W, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $W >>); +urldecode(<< $X, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $X >>); +urldecode(<< $Y, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $Y >>); +urldecode(<< $Z, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $Z >>); +urldecode(<< $_, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $_ >>); +urldecode(<< $a, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $a >>); +urldecode(<< $b, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $b >>); +urldecode(<< $c, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $c >>); +urldecode(<< $d, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $d >>); +urldecode(<< $e, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $e >>); +urldecode(<< $f, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $f >>); +urldecode(<< $g, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $g >>); +urldecode(<< $h, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $h >>); +urldecode(<< $i, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $i >>); +urldecode(<< $j, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $j >>); +urldecode(<< $k, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $k >>); +urldecode(<< $l, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $l >>); +urldecode(<< $m, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $m >>); +urldecode(<< $n, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $n >>); +urldecode(<< $o, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $o >>); +urldecode(<< $p, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $p >>); +urldecode(<< $q, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $q >>); +urldecode(<< $r, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $r >>); +urldecode(<< $s, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $s >>); +urldecode(<< $t, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $t >>); +urldecode(<< $u, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $u >>); +urldecode(<< $v, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $v >>); +urldecode(<< $w, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $w >>); +urldecode(<< $x, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $x >>); +urldecode(<< $y, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $y >>); +urldecode(<< $z, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $z >>); +urldecode(<< $~, Rest/bits >>, Acc) -> urldecode(Rest, << Acc/bits, $~ >>); +urldecode(<<>>, Acc) -> Acc. + +unhex($0) -> 0; +unhex($1) -> 1; +unhex($2) -> 2; +unhex($3) -> 3; +unhex($4) -> 4; +unhex($5) -> 5; +unhex($6) -> 6; +unhex($7) -> 7; +unhex($8) -> 8; +unhex($9) -> 9; +unhex($A) -> 10; +unhex($B) -> 11; +unhex($C) -> 12; +unhex($D) -> 13; +unhex($E) -> 14; +unhex($F) -> 15; +unhex($a) -> 10; +unhex($b) -> 11; +unhex($c) -> 12; +unhex($d) -> 13; +unhex($e) -> 14; +unhex($f) -> 15. + +-ifdef(TEST). +urldecode_test_() -> + Tests = [ + {<<"%20">>, <<" ">>}, + {<<"+">>, <<"+">>}, + {<<"%00">>, <<0>>}, + {<<"%fF">>, <<255>>}, + {<<"123">>, <<"123">>}, + {<<"%i5">>, error}, + {<<"%5">>, error} + ], + [{Qs, fun() -> + E = try urldecode(Qs) of + R -> R + catch _:_ -> + error + end + end} || {Qs, E} <- Tests]. + +urldecode_identity_test_() -> + Tests = [ + <<"%20">>, + <<"+">>, + <<"nothingnothingnothingnothing">>, + <<"Small+fast+modular+HTTP+server">>, + <<"Small%20fast%20modular%20HTTP%20server">>, + <<"Small%2F+fast%2F+modular+HTTP+server.">>, + <<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83" + "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5" + "%BE%8B%E3%80%9C">> + ], + [{V, fun() -> V = urlencode(urldecode(V)) end} || V <- Tests]. + +horse_urldecode() -> + horse:repeat(100000, + urldecode(<<"nothingnothingnothingnothing">>) + ). + +horse_urldecode_hex() -> + horse:repeat(100000, + urldecode(<<"Small%2C%20fast%2C%20modular%20HTTP%20server.">>) + ). + +horse_urldecode_jp_hex() -> + horse:repeat(100000, + urldecode(<<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83" + "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5" + "%BE%8B%E3%80%9C">>) + ). +-endif. + +%% @doc Percent encode a string. (RFC3986 2.1) +%% +%% This function is meant to be used for path components. + +-spec urlencode(B) -> B when B::binary(). +urlencode(B) -> + urlencode(B, <<>>). + +urlencode(<< $!, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $! >>); +urlencode(<< $$, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $$ >>); +urlencode(<< $&, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $& >>); +urlencode(<< $', Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $' >>); +urlencode(<< $(, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $( >>); +urlencode(<< $), Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $) >>); +urlencode(<< $*, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $* >>); +urlencode(<< $+, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $+ >>); +urlencode(<< $,, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $, >>); +urlencode(<< $-, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $- >>); +urlencode(<< $., Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $. >>); +urlencode(<< $0, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $0 >>); +urlencode(<< $1, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $1 >>); +urlencode(<< $2, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $2 >>); +urlencode(<< $3, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $3 >>); +urlencode(<< $4, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $4 >>); +urlencode(<< $5, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $5 >>); +urlencode(<< $6, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $6 >>); +urlencode(<< $7, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $7 >>); +urlencode(<< $8, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $8 >>); +urlencode(<< $9, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $9 >>); +urlencode(<< $:, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $: >>); +urlencode(<< $;, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $; >>); +urlencode(<< $=, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $= >>); +urlencode(<< $@, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $@ >>); +urlencode(<< $A, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $A >>); +urlencode(<< $B, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $B >>); +urlencode(<< $C, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $C >>); +urlencode(<< $D, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $D >>); +urlencode(<< $E, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $E >>); +urlencode(<< $F, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $F >>); +urlencode(<< $G, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $G >>); +urlencode(<< $H, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $H >>); +urlencode(<< $I, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $I >>); +urlencode(<< $J, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $J >>); +urlencode(<< $K, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $K >>); +urlencode(<< $L, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $L >>); +urlencode(<< $M, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $M >>); +urlencode(<< $N, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $N >>); +urlencode(<< $O, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $O >>); +urlencode(<< $P, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $P >>); +urlencode(<< $Q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Q >>); +urlencode(<< $R, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $R >>); +urlencode(<< $S, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $S >>); +urlencode(<< $T, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $T >>); +urlencode(<< $U, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $U >>); +urlencode(<< $V, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $V >>); +urlencode(<< $W, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $W >>); +urlencode(<< $X, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $X >>); +urlencode(<< $Y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Y >>); +urlencode(<< $Z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Z >>); +urlencode(<< $_, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $_ >>); +urlencode(<< $a, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $a >>); +urlencode(<< $b, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $b >>); +urlencode(<< $c, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $c >>); +urlencode(<< $d, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $d >>); +urlencode(<< $e, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $e >>); +urlencode(<< $f, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $f >>); +urlencode(<< $g, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $g >>); +urlencode(<< $h, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $h >>); +urlencode(<< $i, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $i >>); +urlencode(<< $j, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $j >>); +urlencode(<< $k, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $k >>); +urlencode(<< $l, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $l >>); +urlencode(<< $m, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $m >>); +urlencode(<< $n, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $n >>); +urlencode(<< $o, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $o >>); +urlencode(<< $p, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $p >>); +urlencode(<< $q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $q >>); +urlencode(<< $r, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $r >>); +urlencode(<< $s, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $s >>); +urlencode(<< $t, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $t >>); +urlencode(<< $u, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $u >>); +urlencode(<< $v, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $v >>); +urlencode(<< $w, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $w >>); +urlencode(<< $x, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $x >>); +urlencode(<< $y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $y >>); +urlencode(<< $z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $z >>); +urlencode(<< $~, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $~ >>); +urlencode(<< C, Rest/bits >>, Acc) -> + H = hex(C bsr 4), + L = hex(C band 16#0f), + urlencode(Rest, << Acc/bits, $%, H, L >>); +urlencode(<<>>, Acc) -> + Acc. + +hex( 0) -> $0; +hex( 1) -> $1; +hex( 2) -> $2; +hex( 3) -> $3; +hex( 4) -> $4; +hex( 5) -> $5; +hex( 6) -> $6; +hex( 7) -> $7; +hex( 8) -> $8; +hex( 9) -> $9; +hex(10) -> $A; +hex(11) -> $B; +hex(12) -> $C; +hex(13) -> $D; +hex(14) -> $E; +hex(15) -> $F. + +-ifdef(TEST). +urlencode_test_() -> + Tests = [ + {<<255, 0>>, <<"%FF%00">>}, + {<<255, " ">>, <<"%FF%20">>}, + {<<"+">>, <<"+">>}, + {<<"aBc123">>, <<"aBc123">>}, + {<<"!$&'()*+,:;=@-._~">>, <<"!$&'()*+,:;=@-._~">>} + ], + [{V, fun() -> E = urlencode(V) end} || {V, E} <- Tests]. + +urlencode_identity_test_() -> + Tests = [ + <<"+">>, + <<"nothingnothingnothingnothing">>, + <<"Small fast modular HTTP server">>, + <<"Small, fast, modular HTTP server.">>, + <<227,131,132,227,130,164,227,131,179,227,130,189,227, + 130,166,227,131,171,227,128,156,232,188,170,229,187,187,227, + 129,153,227,130,139,230,151,139,229,190,139,227,128,156>> + ], + [{V, fun() -> V = urldecode(urlencode(V)) end} || V <- Tests]. + +horse_urlencode() -> + horse:repeat(100000, + urlencode(<<"nothingnothingnothingnothing">>) + ). + +horse_urlencode_spaces() -> + horse:repeat(100000, + urlencode(<<"Small fast modular HTTP server">>) + ). + +horse_urlencode_jp() -> + horse:repeat(100000, + urlencode(<<227,131,132,227,130,164,227,131,179,227,130,189,227, + 130,166,227,131,171,227,128,156,232,188,170,229,187,187,227, + 129,153,227,130,139,230,151,139,229,190,139,227,128,156>>) + ). + +horse_urlencode_mix() -> + horse:repeat(100000, + urlencode(<<"Small, fast, modular HTTP server.">>) + ). +-endif. diff --git a/src/wsLib/cow_uri_template.erl b/src/wsLib/cow_uri_template.erl new file mode 100644 index 0000000..eac784f --- /dev/null +++ b/src/wsLib/cow_uri_template.erl @@ -0,0 +1,356 @@ +%% Copyright (c) 2019, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% This is a full level 4 implementation of URI Templates +%% as defined by RFC6570. + +-module(cow_uri_template). + +-export([parse/1]). +-export([expand/2]). + +-type op() :: simple_string_expansion + | reserved_expansion + | fragment_expansion + | label_expansion_with_dot_prefix + | path_segment_expansion + | path_style_parameter_expansion + | form_style_query_expansion + | form_style_query_continuation. + +-type var_list() :: [ + {no_modifier, binary()} + | {{prefix_modifier, pos_integer()}, binary()} + | {explode_modifier, binary()} +]. + +-type uri_template() :: [ + binary() | {expr, op(), var_list()} +]. +-export_type([uri_template/0]). + +-type variables() :: #{ + binary() => binary() + | integer() + | float() + | [binary()] + | #{binary() => binary()} +}. + +-include("cow_inline.hrl"). +-include("cow_parse.hrl"). + +%% Parse a URI template. + +-spec parse(binary()) -> uri_template(). +parse(URITemplate) -> + parse(URITemplate, <<>>). + +parse(<<>>, <<>>) -> + []; +parse(<<>>, Acc) -> + [Acc]; +parse(<<${,R/bits>>, <<>>) -> + parse_expr(R); +parse(<<${,R/bits>>, Acc) -> + [Acc|parse_expr(R)]; +%% @todo Probably should reject unallowed characters so that +%% we don't produce invalid URIs. +parse(<>, Acc) when C =/= $} -> + parse(R, <>). + +parse_expr(<<$+,R/bits>>) -> + parse_var_list(R, reserved_expansion, []); +parse_expr(<<$#,R/bits>>) -> + parse_var_list(R, fragment_expansion, []); +parse_expr(<<$.,R/bits>>) -> + parse_var_list(R, label_expansion_with_dot_prefix, []); +parse_expr(<<$/,R/bits>>) -> + parse_var_list(R, path_segment_expansion, []); +parse_expr(<<$;,R/bits>>) -> + parse_var_list(R, path_style_parameter_expansion, []); +parse_expr(<<$?,R/bits>>) -> + parse_var_list(R, form_style_query_expansion, []); +parse_expr(<<$&,R/bits>>) -> + parse_var_list(R, form_style_query_continuation, []); +parse_expr(R) -> + parse_var_list(R, simple_string_expansion, []). + +parse_var_list(<>, Op, List) + when ?IS_ALPHANUM(C) or (C =:= $_) -> + parse_varname(R, Op, List, <>). + +parse_varname(<>, Op, List, Name) + when ?IS_ALPHANUM(C) or (C =:= $_) or (C =:= $.) or (C =:= $%) -> + parse_varname(R, Op, List, <>); +parse_varname(<<$:,C,R/bits>>, Op, List, Name) + when (C =:= $1) or (C =:= $2) or (C =:= $3) or (C =:= $4) or (C =:= $5) + or (C =:= $6) or (C =:= $7) or (C =:= $8) or (C =:= $9) -> + parse_prefix_modifier(R, Op, List, Name, <>); +parse_varname(<<$*,$,,R/bits>>, Op, List, Name) -> + parse_var_list(R, Op, [{explode_modifier, Name}|List]); +parse_varname(<<$*,$},R/bits>>, Op, List, Name) -> + [{expr, Op, lists:reverse([{explode_modifier, Name}|List])}|parse(R, <<>>)]; +parse_varname(<<$,,R/bits>>, Op, List, Name) -> + parse_var_list(R, Op, [{no_modifier, Name}|List]); +parse_varname(<<$},R/bits>>, Op, List, Name) -> + [{expr, Op, lists:reverse([{no_modifier, Name}|List])}|parse(R, <<>>)]. + +parse_prefix_modifier(<>, Op, List, Name, Acc) + when ?IS_DIGIT(C), byte_size(Acc) < 4 -> + parse_prefix_modifier(R, Op, List, Name, <>); +parse_prefix_modifier(<<$,,R/bits>>, Op, List, Name, Acc) -> + parse_var_list(R, Op, [{{prefix_modifier, binary_to_integer(Acc)}, Name}|List]); +parse_prefix_modifier(<<$},R/bits>>, Op, List, Name, Acc) -> + [{expr, Op, lists:reverse([{{prefix_modifier, binary_to_integer(Acc)}, Name}|List])}|parse(R, <<>>)]. + +%% Expand a URI template (after parsing it if necessary). + +-spec expand(binary() | uri_template(), variables()) -> iodata(). +expand(URITemplate, Vars) when is_binary(URITemplate) -> + expand(parse(URITemplate), Vars); +expand(URITemplate, Vars) -> + expand1(URITemplate, Vars). + +expand1([], _) -> + []; +expand1([Literal|Tail], Vars) when is_binary(Literal) -> + [Literal|expand1(Tail, Vars)]; +expand1([{expr, simple_string_expansion, VarList}|Tail], Vars) -> + [simple_string_expansion(VarList, Vars)|expand1(Tail, Vars)]; +expand1([{expr, reserved_expansion, VarList}|Tail], Vars) -> + [reserved_expansion(VarList, Vars)|expand1(Tail, Vars)]; +expand1([{expr, fragment_expansion, VarList}|Tail], Vars) -> + [fragment_expansion(VarList, Vars)|expand1(Tail, Vars)]; +expand1([{expr, label_expansion_with_dot_prefix, VarList}|Tail], Vars) -> + [label_expansion_with_dot_prefix(VarList, Vars)|expand1(Tail, Vars)]; +expand1([{expr, path_segment_expansion, VarList}|Tail], Vars) -> + [path_segment_expansion(VarList, Vars)|expand1(Tail, Vars)]; +expand1([{expr, path_style_parameter_expansion, VarList}|Tail], Vars) -> + [path_style_parameter_expansion(VarList, Vars)|expand1(Tail, Vars)]; +expand1([{expr, form_style_query_expansion, VarList}|Tail], Vars) -> + [form_style_query_expansion(VarList, Vars)|expand1(Tail, Vars)]; +expand1([{expr, form_style_query_continuation, VarList}|Tail], Vars) -> + [form_style_query_continuation(VarList, Vars)|expand1(Tail, Vars)]. + +simple_string_expansion(VarList, Vars) -> + lists:join($,, [ + apply_modifier(Modifier, unreserved, $,, Value) + || {Modifier, _Name, Value} <- lookup_variables(VarList, Vars)]). + +reserved_expansion(VarList, Vars) -> + lists:join($,, [ + apply_modifier(Modifier, reserved, $,, Value) + || {Modifier, _Name, Value} <- lookup_variables(VarList, Vars)]). + +fragment_expansion(VarList, Vars) -> + case reserved_expansion(VarList, Vars) of + [] -> []; + Expanded -> [$#, Expanded] + end. + +label_expansion_with_dot_prefix(VarList, Vars) -> + segment_expansion(VarList, Vars, $.). + +path_segment_expansion(VarList, Vars) -> + segment_expansion(VarList, Vars, $/). + +segment_expansion(VarList, Vars, Sep) -> + Expanded = lists:join(Sep, [ + apply_modifier(Modifier, unreserved, Sep, Value) + || {Modifier, _Name, Value} <- lookup_variables(VarList, Vars)]), + case Expanded of + [] -> []; + [[]] -> []; + _ -> [Sep, Expanded] + end. + +path_style_parameter_expansion(VarList, Vars) -> + parameter_expansion(VarList, Vars, $;, $;, trim). + +form_style_query_expansion(VarList, Vars) -> + parameter_expansion(VarList, Vars, $?, $&, no_trim). + +form_style_query_continuation(VarList, Vars) -> + parameter_expansion(VarList, Vars, $&, $&, no_trim). + +parameter_expansion(VarList, Vars, LeadingSep, Sep, Trim) -> + Expanded = lists:join(Sep, [ + apply_parameter_modifier(Modifier, unreserved, Sep, Trim, Name, Value) + || {Modifier, Name, Value} <- lookup_variables(VarList, Vars)]), + case Expanded of + [] -> []; + [[]] -> []; + _ -> [LeadingSep, Expanded] + end. + +lookup_variables([], _) -> + []; +lookup_variables([{Modifier, Name}|Tail], Vars) -> + case Vars of + #{Name := Value} -> [{Modifier, Name, Value}|lookup_variables(Tail, Vars)]; + _ -> lookup_variables(Tail, Vars) + end. + +apply_modifier(no_modifier, AllowedChars, _, List) when is_list(List) -> + lists:join($,, [urlencode(Value, AllowedChars) || Value <- List]); +apply_modifier(explode_modifier, AllowedChars, ExplodeSep, List) when is_list(List) -> + lists:join(ExplodeSep, [urlencode(Value, AllowedChars) || Value <- List]); +apply_modifier(Modifier, AllowedChars, ExplodeSep, Map) when is_map(Map) -> + {JoinSep, KVSep} = case Modifier of + no_modifier -> {$,, $,}; + explode_modifier -> {ExplodeSep, $=} + end, + lists:reverse(lists:join(JoinSep, + maps:fold(fun(Key, Value, Acc) -> + [[ + urlencode(Key, AllowedChars), + KVSep, + urlencode(Value, AllowedChars) + ]|Acc] + end, [], Map) + )); +apply_modifier({prefix_modifier, MaxLen}, AllowedChars, _, Value) -> + urlencode(string:slice(binarize(Value), 0, MaxLen), AllowedChars); +apply_modifier(_, AllowedChars, _, Value) -> + urlencode(binarize(Value), AllowedChars). + +apply_parameter_modifier(_, _, _, _, _, []) -> + []; +apply_parameter_modifier(_, _, _, _, _, Map) when Map =:= #{} -> + []; +apply_parameter_modifier(no_modifier, AllowedChars, _, _, Name, List) when is_list(List) -> + [ + Name, + $=, + lists:join($,, [urlencode(Value, AllowedChars) || Value <- List]) + ]; +apply_parameter_modifier(explode_modifier, AllowedChars, ExplodeSep, _, Name, List) when is_list(List) -> + lists:join(ExplodeSep, [[ + Name, + $=, + urlencode(Value, AllowedChars) + ] || Value <- List]); +apply_parameter_modifier(Modifier, AllowedChars, ExplodeSep, _, Name, Map) when is_map(Map) -> + {JoinSep, KVSep} = case Modifier of + no_modifier -> {$,, $,}; + explode_modifier -> {ExplodeSep, $=} + end, + [ + case Modifier of + no_modifier -> + [ + Name, + $= + ]; + explode_modifier -> + [] + end, + lists:reverse(lists:join(JoinSep, + maps:fold(fun(Key, Value, Acc) -> + [[ + urlencode(Key, AllowedChars), + KVSep, + urlencode(Value, AllowedChars) + ]|Acc] + end, [], Map) + )) + ]; +apply_parameter_modifier(Modifier, AllowedChars, _, Trim, Name, Value0) -> + Value1 = binarize(Value0), + Value = case Modifier of + {prefix_modifier, MaxLen} -> + string:slice(Value1, 0, MaxLen); + no_modifier -> + Value1 + end, + [ + Name, + case Value of + <<>> when Trim =:= trim -> + []; + <<>> when Trim =:= no_trim -> + $=; + _ -> + [ + $=, + urlencode(Value, AllowedChars) + ] + end + ]. + +binarize(Value) when is_integer(Value) -> + integer_to_binary(Value); +binarize(Value) when is_float(Value) -> + float_to_binary(Value, [{decimals, 10}, compact]); +binarize(Value) -> + Value. + +urlencode(Value, unreserved) -> + urlencode_unreserved(Value, <<>>); +urlencode(Value, reserved) -> + urlencode_reserved(Value, <<>>). + +urlencode_unreserved(<>, Acc) + when ?IS_URI_UNRESERVED(C) -> + urlencode_unreserved(R, <>); +urlencode_unreserved(<>, Acc) -> + urlencode_unreserved(R, <>); +urlencode_unreserved(<<>>, Acc) -> + Acc. + +urlencode_reserved(<>, Acc) + when ?IS_URI_UNRESERVED(C) or ?IS_URI_GEN_DELIMS(C) or ?IS_URI_SUB_DELIMS(C) -> + urlencode_reserved(R, <>); +urlencode_reserved(<>, Acc) -> + urlencode_reserved(R, <>); +urlencode_reserved(<<>>, Acc) -> + Acc. + +-ifdef(TEST). +expand_uritemplate_test_() -> + Files = filelib:wildcard("deps/uritemplate-tests/*.json"), + lists:flatten([begin + {ok, JSON} = file:read_file(File), + Tests = jsx:decode(JSON, [return_maps]), + [begin + %% Erlang doesn't have a NULL value. + Vars = maps:remove(<<"undef">>, Vars0), + [ + {iolist_to_binary(io_lib:format("~s - ~s: ~s => ~s", + [filename:basename(File), Section, URITemplate, + if + is_list(Expected) -> lists:join(<<" OR ">>, Expected); + true -> Expected + end + ])), + fun() -> + case Expected of + false -> + {'EXIT', _} = (catch expand(URITemplate, Vars)); + [_|_] -> + Result = iolist_to_binary(expand(URITemplate, Vars)), + io:format("~p", [Result]), + true = lists:member(Result, Expected); + _ -> + Expected = iolist_to_binary(expand(URITemplate, Vars)) + end + end} + || [URITemplate, Expected] <- Cases] + end || {Section, #{ + <<"variables">> := Vars0, + <<"testcases">> := Cases + }} <- maps:to_list(Tests)] + end || File <- Files]). +-endif. diff --git a/src/wsLib/cow_ws.erl b/src/wsLib/cow_ws.erl new file mode 100644 index 0000000..3bb46c5 --- /dev/null +++ b/src/wsLib/cow_ws.erl @@ -0,0 +1,741 @@ +%% Copyright (c) 2015-2018, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_ws). + +-export([key/0]). +-export([encode_key/1]). + +-export([negotiate_permessage_deflate/3]). +-export([negotiate_x_webkit_deflate_frame/3]). + +-export([validate_permessage_deflate/3]). + +-export([parse_header/3]). +-export([parse_payload/9]). +-export([make_frame/4]). + +-export([frame/2]). +-export([masked_frame/2]). + +-type close_code() :: 1000..1003 | 1006..1011 | 3000..4999. +-export_type([close_code/0]). + +-type extensions() :: map(). +-export_type([extensions/0]). + +-type deflate_opts() :: #{ + %% Compression parameters. + level => zlib:zlevel(), + mem_level => zlib:zmemlevel(), + strategy => zlib:zstrategy(), + + %% Whether the compression context will carry over between frames. + server_context_takeover => takeover | no_takeover, + client_context_takeover => takeover | no_takeover, + + %% LZ77 sliding window size limits. + server_max_window_bits => 8..15, + client_max_window_bits => 8..15 +}. +-export_type([deflate_opts/0]). + +-type frag_state() :: undefined | {fin | nofin, text | binary, rsv()}. +-export_type([frag_state/0]). + +-type frame() :: close | ping | pong + | {text | binary | close | ping | pong, iodata()} + | {close, close_code(), iodata()} + | {fragment, fin | nofin, text | binary | continuation, iodata()}. +-export_type([frame/0]). + +-type frame_type() :: fragment | text | binary | close | ping | pong. +-export_type([frame_type/0]). + +-type mask_key() :: undefined | 0..16#ffffffff. +-export_type([mask_key/0]). + +-type rsv() :: <<_:3>>. +-export_type([rsv/0]). + +-type utf8_state() :: 0..8 | undefined. +-export_type([utf8_state/0]). + +%% @doc Generate a key for the Websocket handshake request. + +-spec key() -> binary(). +key() -> + base64:encode(crypto:strong_rand_bytes(16)). + +%% @doc Encode the key into the accept value for the Websocket handshake response. + +-spec encode_key(binary()) -> binary(). +encode_key(Key) -> + base64:encode(crypto:hash(sha, [Key, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"])). + +%% @doc Negotiate the permessage-deflate extension. + +-spec negotiate_permessage_deflate( + [binary() | {binary(), binary()}], Exts, deflate_opts()) + -> ignore | {ok, iolist(), Exts} when Exts::extensions(). +%% Ignore if deflate already negotiated. +negotiate_permessage_deflate(_, #{deflate := _}, _) -> + ignore; +negotiate_permessage_deflate(Params, Extensions, Opts) -> + case lists:usort(Params) of + %% Ignore if multiple parameters with the same name. + Params2 when length(Params) =/= length(Params2) -> + ignore; + Params2 -> + negotiate_permessage_deflate1(Params2, Extensions, Opts) + end. + +negotiate_permessage_deflate1(Params, Extensions, Opts) -> + %% We are allowed to send back no_takeover even if the client + %% accepts takeover. Therefore we use no_takeover if any of + %% the inputs have it. + ServerTakeover = maps:get(server_context_takeover, Opts, takeover), + ClientTakeover = maps:get(client_context_takeover, Opts, takeover), + %% We can send back window bits smaller than or equal to what + %% the client sends us. + ServerMaxWindowBits = maps:get(server_max_window_bits, Opts, 15), + ClientMaxWindowBits = maps:get(client_max_window_bits, Opts, 15), + %% We may need to send back no_context_takeover depending on configuration. + RespParams0 = case ServerTakeover of + takeover -> []; + no_takeover -> [<<"; server_no_context_takeover">>] + end, + RespParams1 = case ClientTakeover of + takeover -> RespParams0; + no_takeover -> [<<"; client_no_context_takeover">>|RespParams0] + end, + Negotiated0 = #{ + server_context_takeover => ServerTakeover, + client_context_takeover => ClientTakeover, + server_max_window_bits => ServerMaxWindowBits, + client_max_window_bits => ClientMaxWindowBits + }, + case negotiate_params(Params, Negotiated0, RespParams1) of + ignore -> + ignore; + {#{server_max_window_bits := SB}, _} when SB > ServerMaxWindowBits -> + ignore; + {#{client_max_window_bits := CB}, _} when CB > ClientMaxWindowBits -> + ignore; + {Negotiated, RespParams2} -> + %% We add the configured max window bits if necessary. + RespParams = case Negotiated of + #{server_max_window_bits_set := true} -> RespParams2; + _ when ServerMaxWindowBits =:= 15 -> RespParams2; + _ -> [<<"; server_max_window_bits=">>, + integer_to_binary(ServerMaxWindowBits)|RespParams2] + end, + {Inflate, Deflate} = init_permessage_deflate( + maps:get(client_max_window_bits, Negotiated), + maps:get(server_max_window_bits, Negotiated), Opts), + {ok, [<<"permessage-deflate">>, RespParams], Extensions#{ + deflate => Deflate, + deflate_takeover => maps:get(server_context_takeover, Negotiated), + inflate => Inflate, + inflate_takeover => maps:get(client_context_takeover, Negotiated)}} + end. + +negotiate_params([], Negotiated, RespParams) -> + {Negotiated, RespParams}; +%% We must only send the client_max_window_bits parameter if the +%% request explicitly indicated the client supports it. +negotiate_params([<<"client_max_window_bits">>|Tail], Negotiated, RespParams) -> + CB = maps:get(client_max_window_bits, Negotiated), + negotiate_params(Tail, Negotiated#{client_max_window_bits_set => true}, + [<<"; client_max_window_bits=">>, integer_to_binary(CB)|RespParams]); +negotiate_params([{<<"client_max_window_bits">>, Max}|Tail], Negotiated, RespParams) -> + CB0 = maps:get(client_max_window_bits, Negotiated, undefined), + case parse_max_window_bits(Max) of + error -> + ignore; + CB when CB =< CB0 -> + negotiate_params(Tail, Negotiated#{client_max_window_bits => CB}, + [<<"; client_max_window_bits=">>, Max|RespParams]); + %% When the client sends window bits larger than the server wants + %% to use, we use what the server defined. + _ -> + negotiate_params(Tail, Negotiated, + [<<"; client_max_window_bits=">>, integer_to_binary(CB0)|RespParams]) + end; +negotiate_params([{<<"server_max_window_bits">>, Max}|Tail], Negotiated, RespParams) -> + SB0 = maps:get(server_max_window_bits, Negotiated, undefined), + case parse_max_window_bits(Max) of + error -> + ignore; + SB when SB =< SB0 -> + negotiate_params(Tail, Negotiated#{ + server_max_window_bits => SB, + server_max_window_bits_set => true}, + [<<"; server_max_window_bits=">>, Max|RespParams]); + %% When the client sends window bits larger than the server wants + %% to use, we use what the server defined. The parameter will be + %% set only when this function returns. + _ -> + negotiate_params(Tail, Negotiated, RespParams) + end; +%% We only need to send the no_context_takeover parameter back +%% here if we didn't already define it via configuration. +negotiate_params([<<"client_no_context_takeover">>|Tail], Negotiated, RespParams) -> + case maps:get(client_context_takeover, Negotiated) of + no_takeover -> + negotiate_params(Tail, Negotiated, RespParams); + takeover -> + negotiate_params(Tail, Negotiated#{client_context_takeover => no_takeover}, + [<<"; client_no_context_takeover">>|RespParams]) + end; +negotiate_params([<<"server_no_context_takeover">>|Tail], Negotiated, RespParams) -> + case maps:get(server_context_takeover, Negotiated) of + no_takeover -> + negotiate_params(Tail, Negotiated, RespParams); + takeover -> + negotiate_params(Tail, Negotiated#{server_context_takeover => no_takeover}, + [<<"; server_no_context_takeover">>|RespParams]) + end; +%% Ignore if unknown parameter; ignore if parameter with invalid or missing value. +negotiate_params(_, _, _) -> + ignore. + +parse_max_window_bits(<<"8">>) -> 8; +parse_max_window_bits(<<"9">>) -> 9; +parse_max_window_bits(<<"10">>) -> 10; +parse_max_window_bits(<<"11">>) -> 11; +parse_max_window_bits(<<"12">>) -> 12; +parse_max_window_bits(<<"13">>) -> 13; +parse_max_window_bits(<<"14">>) -> 14; +parse_max_window_bits(<<"15">>) -> 15; +parse_max_window_bits(_) -> error. + +%% A negative WindowBits value indicates that zlib headers are not used. +init_permessage_deflate(InflateWindowBits, DeflateWindowBits, Opts) -> + Inflate = zlib:open(), + ok = zlib:inflateInit(Inflate, -InflateWindowBits), + Deflate = zlib:open(), + %% zlib 1.2.11+ now rejects -8. It used to transform it to -9. + %% We need to use 9 when 8 is requested for interoperability. + DeflateWindowBits2 = case DeflateWindowBits of + 8 -> 9; + _ -> DeflateWindowBits + end, + ok = zlib:deflateInit(Deflate, + maps:get(level, Opts, best_compression), + deflated, + -DeflateWindowBits2, + maps:get(mem_level, Opts, 8), + maps:get(strategy, Opts, default)), + %% Set the owner pid of the zlib contexts if requested. + case Opts of + #{owner := Pid} -> set_owner(Pid, Inflate, Deflate); + _ -> ok + end, + {Inflate, Deflate}. + +-ifdef(OTP_RELEASE). +%% Using is_port/1 on a zlib context results in a Dialyzer warning in OTP 21. +%% This function helps silence that warning while staying compatible +%% with all supported versions. + +set_owner(Pid, Inflate, Deflate) -> + zlib:set_controlling_process(Inflate, Pid), + zlib:set_controlling_process(Deflate, Pid). +-else. +%% The zlib port became a reference in OTP 20.1+. There +%% was however no way to change the controlling process +%% until the OTP 20.1.3 patch version. Since we can't +%% enable compression for 20.1, 20.1.1 and 20.1.2 we +%% explicitly crash. The caller should ignore this extension. + +set_owner(Pid, Inflate, Deflate) when is_port(Inflate) -> + true = erlang:port_connect(Inflate, Pid), + true = unlink(Inflate), + true = erlang:port_connect(Deflate, Pid), + true = unlink(Deflate), + ok; +set_owner(Pid, Inflate, Deflate) -> + case erlang:function_exported(zlib, set_controlling_process, 2) of + true -> + zlib:set_controlling_process(Inflate, Pid), + zlib:set_controlling_process(Deflate, Pid); + false -> + exit({error, incompatible_zlib_version, + 'OTP 20.1, 20.1.1 and 20.1.2 are missing required functionality.'}) + end. +-endif. + +%% @doc Negotiate the x-webkit-deflate-frame extension. +%% +%% The implementation is very basic and none of the parameters +%% are currently supported. + +-spec negotiate_x_webkit_deflate_frame( + [binary() | {binary(), binary()}], Exts, deflate_opts()) + -> ignore | {ok, binary(), Exts} when Exts::extensions(). +negotiate_x_webkit_deflate_frame(_, #{deflate := _}, _) -> + ignore; +negotiate_x_webkit_deflate_frame(_Params, Extensions, Opts) -> + % Since we are negotiating an unconstrained deflate-frame + % then we must be willing to accept frames using the + % maximum window size which is 2^15. + {Inflate, Deflate} = init_permessage_deflate(15, 15, Opts), + {ok, <<"x-webkit-deflate-frame">>, + Extensions#{ + deflate => Deflate, + deflate_takeover => takeover, + inflate => Inflate, + inflate_takeover => takeover}}. + +%% @doc Validate the negotiated permessage-deflate extension. + +%% Error when more than one deflate extension was negotiated. +validate_permessage_deflate(_, #{deflate := _}, _) -> + error; +validate_permessage_deflate(Params, Extensions, Opts) -> + case lists:usort(Params) of + %% Error if multiple parameters with the same name. + Params2 when length(Params) =/= length(Params2) -> + error; + Params2 -> + case parse_response_permessage_deflate_params(Params2, 15, takeover, 15, takeover) of + error -> + error; + {ClientWindowBits, ClientTakeOver, ServerWindowBits, ServerTakeOver} -> + {Inflate, Deflate} = init_permessage_deflate(ServerWindowBits, ClientWindowBits, Opts), + {ok, Extensions#{ + deflate => Deflate, + deflate_takeover => ClientTakeOver, + inflate => Inflate, + inflate_takeover => ServerTakeOver}} + end + end. + +parse_response_permessage_deflate_params([], CB, CTO, SB, STO) -> + {CB, CTO, SB, STO}; +parse_response_permessage_deflate_params([{<<"client_max_window_bits">>, Max}|Tail], _, CTO, SB, STO) -> + case parse_max_window_bits(Max) of + error -> error; + CB -> parse_response_permessage_deflate_params(Tail, CB, CTO, SB, STO) + end; +parse_response_permessage_deflate_params([<<"client_no_context_takeover">>|Tail], CB, _, SB, STO) -> + parse_response_permessage_deflate_params(Tail, CB, no_takeover, SB, STO); +parse_response_permessage_deflate_params([{<<"server_max_window_bits">>, Max}|Tail], CB, CTO, _, STO) -> + case parse_max_window_bits(Max) of + error -> error; + SB -> parse_response_permessage_deflate_params(Tail, CB, CTO, SB, STO) + end; +parse_response_permessage_deflate_params([<<"server_no_context_takeover">>|Tail], CB, CTO, SB, _) -> + parse_response_permessage_deflate_params(Tail, CB, CTO, SB, no_takeover); +%% Error if unknown parameter; error if parameter with invalid or missing value. +parse_response_permessage_deflate_params(_, _, _, _, _) -> + error. + +%% @doc Parse and validate the Websocket frame header. +%% +%% This function also updates the fragmentation state according to +%% information found in the frame's header. + +-spec parse_header(binary(), extensions(), frag_state()) + -> error | more | {frame_type(), frag_state(), rsv(), non_neg_integer(), mask_key(), binary()}. +%% RSV bits MUST be 0 unless an extension is negotiated +%% that defines meanings for non-zero values. +parse_header(<< _:1, Rsv:3, _/bits >>, Extensions, _) when Extensions =:= #{}, Rsv =/= 0 -> error; +%% Last 2 RSV bits MUST be 0 if deflate-frame extension is used. +parse_header(<< _:2, 1:1, _/bits >>, #{deflate := _}, _) -> error; +parse_header(<< _:3, 1:1, _/bits >>, #{deflate := _}, _) -> error; +%% Invalid opcode. Note that these opcodes may be used by extensions. +parse_header(<< _:4, 3:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 4:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 5:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 6:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 7:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 11:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 12:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 13:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 14:4, _/bits >>, _, _) -> error; +parse_header(<< _:4, 15:4, _/bits >>, _, _) -> error; +%% Control frames MUST NOT be fragmented. +parse_header(<< 0:1, _:3, Opcode:4, _/bits >>, _, _) when Opcode >= 8 -> error; +%% A frame MUST NOT use the zero opcode unless fragmentation was initiated. +parse_header(<< _:4, 0:4, _/bits >>, _, undefined) -> error; +%% Non-control opcode when expecting control message or next fragment. +parse_header(<< _:4, 1:4, _/bits >>, _, {_, _, _}) -> error; +parse_header(<< _:4, 2:4, _/bits >>, _, {_, _, _}) -> error; +parse_header(<< _:4, 3:4, _/bits >>, _, {_, _, _}) -> error; +parse_header(<< _:4, 4:4, _/bits >>, _, {_, _, _}) -> error; +parse_header(<< _:4, 5:4, _/bits >>, _, {_, _, _}) -> error; +parse_header(<< _:4, 6:4, _/bits >>, _, {_, _, _}) -> error; +parse_header(<< _:4, 7:4, _/bits >>, _, {_, _, _}) -> error; +%% Close control frame length MUST be 0 or >= 2. +parse_header(<< _:4, 8:4, _:1, 1:7, _/bits >>, _, _) -> error; +%% Close control frame with incomplete close code. Need more data. +parse_header(Data = << _:4, 8:4, 0:1, Len:7, _/bits >>, _, _) when Len > 1, byte_size(Data) < 4 -> more; +parse_header(Data = << _:4, 8:4, 1:1, Len:7, _/bits >>, _, _) when Len > 1, byte_size(Data) < 8 -> more; +%% 7 bits payload length. +parse_header(<< Fin:1, Rsv:3/bits, Opcode:4, 0:1, Len:7, Rest/bits >>, _, FragState) when Len < 126 -> + parse_header(Opcode, Fin, FragState, Rsv, Len, undefined, Rest); +parse_header(<< Fin:1, Rsv:3/bits, Opcode:4, 1:1, Len:7, MaskKey:32, Rest/bits >>, _, FragState) when Len < 126 -> + parse_header(Opcode, Fin, FragState, Rsv, Len, MaskKey, Rest); +%% 16 bits payload length. +parse_header(<< Fin:1, Rsv:3/bits, Opcode:4, 0:1, 126:7, Len:16, Rest/bits >>, _, FragState) when Len > 125, Opcode < 8 -> + parse_header(Opcode, Fin, FragState, Rsv, Len, undefined, Rest); +parse_header(<< Fin:1, Rsv:3/bits, Opcode:4, 1:1, 126:7, Len:16, MaskKey:32, Rest/bits >>, _, FragState) when Len > 125, Opcode < 8 -> + parse_header(Opcode, Fin, FragState, Rsv, Len, MaskKey, Rest); +%% 63 bits payload length. +parse_header(<< Fin:1, Rsv:3/bits, Opcode:4, 0:1, 127:7, 0:1, Len:63, Rest/bits >>, _, FragState) when Len > 16#ffff, Opcode < 8 -> + parse_header(Opcode, Fin, FragState, Rsv, Len, undefined, Rest); +parse_header(<< Fin:1, Rsv:3/bits, Opcode:4, 1:1, 127:7, 0:1, Len:63, MaskKey:32, Rest/bits >>, _, FragState) when Len > 16#ffff, Opcode < 8 -> + parse_header(Opcode, Fin, FragState, Rsv, Len, MaskKey, Rest); +%% When payload length is over 63 bits, the most significant bit MUST be 0. +parse_header(<< _:9, 127:7, 1:1, _/bits >>, _, _) -> error; +%% For the next two clauses, it can be one of the following: +%% +%% * The minimal number of bytes MUST be used to encode the length +%% * All control frames MUST have a payload length of 125 bytes or less +parse_header(<< _:8, 0:1, 126:7, _:16, _/bits >>, _, _) -> error; +parse_header(<< _:8, 1:1, 126:7, _:48, _/bits >>, _, _) -> error; +parse_header(<< _:8, 0:1, 127:7, _:64, _/bits >>, _, _) -> error; +parse_header(<< _:8, 1:1, 127:7, _:96, _/bits >>, _, _) -> error; +%% Need more data. +parse_header(_, _, _) -> more. + +parse_header(Opcode, Fin, FragState, Rsv, Len, MaskKey, Rest) -> + Type = opcode_to_frame_type(Opcode), + Type2 = case Fin of + 0 -> fragment; + 1 -> Type + end, + {Type2, frag_state(Type, Fin, Rsv, FragState), Rsv, Len, MaskKey, Rest}. + +opcode_to_frame_type(0) -> fragment; +opcode_to_frame_type(1) -> text; +opcode_to_frame_type(2) -> binary; +opcode_to_frame_type(8) -> close; +opcode_to_frame_type(9) -> ping; +opcode_to_frame_type(10) -> pong. + +frag_state(Type, 0, Rsv, undefined) -> {nofin, Type, Rsv}; +frag_state(fragment, 0, _, FragState = {nofin, _, _}) -> FragState; +frag_state(fragment, 1, _, {nofin, Type, Rsv}) -> {fin, Type, Rsv}; +frag_state(_, 1, _, FragState) -> FragState. + +%% @doc Parse and validate the frame's payload. +%% +%% Validation is only required for text and close frames which feature +%% a UTF-8 payload. + +-spec parse_payload(binary(), mask_key(), utf8_state(), non_neg_integer(), + frame_type(), non_neg_integer(), frag_state(), extensions(), rsv()) + -> {ok, binary(), utf8_state(), binary()} + | {ok, close_code(), binary(), utf8_state(), binary()} + | {more, binary(), utf8_state()} + | {more, close_code(), binary(), utf8_state()} + | {error, badframe | badencoding}. +%% Empty last frame of compressed message. +parse_payload(Data, _, Utf8State, _, _, 0, {fin, _, << 1:1, 0:2 >>}, + #{inflate := Inflate, inflate_takeover := TakeOver}, _) -> + _ = zlib:inflate(Inflate, << 0, 0, 255, 255 >>), + case TakeOver of + no_takeover -> zlib:inflateReset(Inflate); + takeover -> ok + end, + {ok, <<>>, Utf8State, Data}; +%% Compressed fragmented frame. +parse_payload(Data, MaskKey, Utf8State, ParsedLen, Type, Len, FragState = {_, _, << 1:1, 0:2 >>}, + #{inflate := Inflate, inflate_takeover := TakeOver}, _) -> + {Data2, Rest, Eof} = split_payload(Data, Len), + Payload = inflate_frame(unmask(Data2, MaskKey, ParsedLen), Inflate, TakeOver, FragState, Eof), + validate_payload(Payload, Rest, Utf8State, ParsedLen, Type, FragState, Eof); +%% Compressed frame. +parse_payload(Data, MaskKey, Utf8State, ParsedLen, Type, Len, FragState, + #{inflate := Inflate, inflate_takeover := TakeOver}, << 1:1, 0:2 >>) when Type =:= text; Type =:= binary -> + {Data2, Rest, Eof} = split_payload(Data, Len), + Payload = inflate_frame(unmask(Data2, MaskKey, ParsedLen), Inflate, TakeOver, FragState, Eof), + validate_payload(Payload, Rest, Utf8State, ParsedLen, Type, FragState, Eof); +%% Empty frame. +parse_payload(Data, _, Utf8State, 0, _, 0, _, _, _) + when Utf8State =:= 0; Utf8State =:= undefined -> + {ok, <<>>, Utf8State, Data}; +%% Start of close frame. +parse_payload(Data, MaskKey, Utf8State, 0, Type = close, Len, FragState, _, << 0:3 >>) -> + {<< MaskedCode:2/binary, Data2/bits >>, Rest, Eof} = split_payload(Data, Len), + << CloseCode:16 >> = unmask(MaskedCode, MaskKey, 0), + case validate_close_code(CloseCode) of + ok -> + Payload = unmask(Data2, MaskKey, 2), + case validate_payload(Payload, Rest, Utf8State, 2, Type, FragState, Eof) of + {ok, _, Utf8State2, _} -> {ok, CloseCode, Payload, Utf8State2, Rest}; + {more, _, Utf8State2} -> {more, CloseCode, Payload, Utf8State2}; + Error -> Error + end; + error -> + {error, badframe} + end; +%% Normal frame. +parse_payload(Data, MaskKey, Utf8State, ParsedLen, Type, Len, FragState, _, << 0:3 >>) -> + {Data2, Rest, Eof} = split_payload(Data, Len), + Payload = unmask(Data2, MaskKey, ParsedLen), + validate_payload(Payload, Rest, Utf8State, ParsedLen, Type, FragState, Eof). + +split_payload(Data, Len) -> + case byte_size(Data) of + Len -> + {Data, <<>>, true}; + DataLen when DataLen < Len -> + {Data, <<>>, false}; + _ -> + << Data2:Len/binary, Rest/bits >> = Data, + {Data2, Rest, true} + end. + +validate_close_code(Code) -> + if + Code < 1000 -> error; + Code =:= 1004 -> error; + Code =:= 1005 -> error; + Code =:= 1006 -> error; + Code > 1011, Code < 3000 -> error; + Code > 4999 -> error; + true -> ok + end. + +unmask(Data, undefined, _) -> + Data; +unmask(Data, MaskKey, 0) -> + mask(Data, MaskKey, <<>>); +%% We unmask on the fly so we need to continue from the right mask byte. +unmask(Data, MaskKey, UnmaskedLen) -> + Left = UnmaskedLen rem 4, + Right = 4 - Left, + MaskKey2 = (MaskKey bsl (Left * 8)) + (MaskKey bsr (Right * 8)), + mask(Data, MaskKey2, <<>>). + +mask(<<>>, _, Unmasked) -> + Unmasked; +mask(<< O:32, Rest/bits >>, MaskKey, Acc) -> + T = O bxor MaskKey, + mask(Rest, MaskKey, << Acc/binary, T:32 >>); +mask(<< O:24 >>, MaskKey, Acc) -> + << MaskKey2:24, _:8 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:24 >>; +mask(<< O:16 >>, MaskKey, Acc) -> + << MaskKey2:16, _:16 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:16 >>; +mask(<< O:8 >>, MaskKey, Acc) -> + << MaskKey2:8, _:24 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:8 >>. + +inflate_frame(Data, Inflate, TakeOver, FragState, true) + when FragState =:= undefined; element(1, FragState) =:= fin -> + Data2 = zlib:inflate(Inflate, << Data/binary, 0, 0, 255, 255 >>), + case TakeOver of + no_takeover -> zlib:inflateReset(Inflate); + takeover -> ok + end, + iolist_to_binary(Data2); +inflate_frame(Data, Inflate, _T, _F, _E) -> + iolist_to_binary(zlib:inflate(Inflate, Data)). + +%% The Utf8State variable can be set to 'undefined' to disable the validation. +validate_payload(Payload, _, undefined, _, _, _, false) -> + {more, Payload, undefined}; +validate_payload(Payload, Rest, undefined, _, _, _, true) -> + {ok, Payload, undefined, Rest}; +%% Text frames and close control frames MUST have a payload that is valid UTF-8. +validate_payload(Payload, Rest, Utf8State, _, Type, _, Eof) when Type =:= text; Type =:= close -> + case validate_utf8(Payload, Utf8State) of + 1 -> {error, badencoding}; + Utf8State2 when not Eof -> {more, Payload, Utf8State2}; + 0 when Eof -> {ok, Payload, 0, Rest}; + _ -> {error, badencoding} + end; +validate_payload(Payload, Rest, Utf8State, _, fragment, {Fin, text, _}, Eof) -> + case validate_utf8(Payload, Utf8State) of + 1 -> {error, badencoding}; + 0 when Eof -> {ok, Payload, 0, Rest}; + Utf8State2 when Eof, Fin =:= nofin -> {ok, Payload, Utf8State2, Rest}; + Utf8State2 when not Eof -> {more, Payload, Utf8State2}; + _ -> {error, badencoding} + end; +validate_payload(Payload, _, Utf8State, _, _, _, false) -> + {more, Payload, Utf8State}; +validate_payload(Payload, Rest, Utf8State, _, _, _, true) -> + {ok, Payload, Utf8State, Rest}. + +%% Based on the Flexible and Economical UTF-8 Decoder algorithm by +%% Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). +%% +%% The original algorithm has been unrolled into all combinations of values for C and State +%% each with a clause. The common clauses were then grouped together. +%% +%% This function returns 0 on success, 1 on error, and 2..8 on incomplete data. +validate_utf8(<<>>, State) -> State; +validate_utf8(<< C, Rest/bits >>, 0) when C < 128 -> validate_utf8(Rest, 0); +validate_utf8(<< C, Rest/bits >>, 2) when C >= 128, C < 144 -> validate_utf8(Rest, 0); +validate_utf8(<< C, Rest/bits >>, 3) when C >= 128, C < 144 -> validate_utf8(Rest, 2); +validate_utf8(<< C, Rest/bits >>, 5) when C >= 128, C < 144 -> validate_utf8(Rest, 2); +validate_utf8(<< C, Rest/bits >>, 7) when C >= 128, C < 144 -> validate_utf8(Rest, 3); +validate_utf8(<< C, Rest/bits >>, 8) when C >= 128, C < 144 -> validate_utf8(Rest, 3); +validate_utf8(<< C, Rest/bits >>, 2) when C >= 144, C < 160 -> validate_utf8(Rest, 0); +validate_utf8(<< C, Rest/bits >>, 3) when C >= 144, C < 160 -> validate_utf8(Rest, 2); +validate_utf8(<< C, Rest/bits >>, 5) when C >= 144, C < 160 -> validate_utf8(Rest, 2); +validate_utf8(<< C, Rest/bits >>, 6) when C >= 144, C < 160 -> validate_utf8(Rest, 3); +validate_utf8(<< C, Rest/bits >>, 7) when C >= 144, C < 160 -> validate_utf8(Rest, 3); +validate_utf8(<< C, Rest/bits >>, 2) when C >= 160, C < 192 -> validate_utf8(Rest, 0); +validate_utf8(<< C, Rest/bits >>, 3) when C >= 160, C < 192 -> validate_utf8(Rest, 2); +validate_utf8(<< C, Rest/bits >>, 4) when C >= 160, C < 192 -> validate_utf8(Rest, 2); +validate_utf8(<< C, Rest/bits >>, 6) when C >= 160, C < 192 -> validate_utf8(Rest, 3); +validate_utf8(<< C, Rest/bits >>, 7) when C >= 160, C < 192 -> validate_utf8(Rest, 3); +validate_utf8(<< C, Rest/bits >>, 0) when C >= 194, C < 224 -> validate_utf8(Rest, 2); +validate_utf8(<< 224, Rest/bits >>, 0) -> validate_utf8(Rest, 4); +validate_utf8(<< C, Rest/bits >>, 0) when C >= 225, C < 237 -> validate_utf8(Rest, 3); +validate_utf8(<< 237, Rest/bits >>, 0) -> validate_utf8(Rest, 5); +validate_utf8(<< C, Rest/bits >>, 0) when C =:= 238; C =:= 239 -> validate_utf8(Rest, 3); +validate_utf8(<< 240, Rest/bits >>, 0) -> validate_utf8(Rest, 6); +validate_utf8(<< C, Rest/bits >>, 0) when C =:= 241; C =:= 242; C =:= 243 -> validate_utf8(Rest, 7); +validate_utf8(<< 244, Rest/bits >>, 0) -> validate_utf8(Rest, 8); +validate_utf8(_, _) -> 1. + +%% @doc Return a frame tuple from parsed state and data. + +-spec make_frame(frame_type(), binary(), close_code(), frag_state()) -> frame(). +%% Fragmented frame. +make_frame(fragment, Payload, _, {Fin, Type, _}) -> {fragment, Fin, Type, Payload}; +make_frame(text, Payload, _, _) -> {text, Payload}; +make_frame(binary, Payload, _, _) -> {binary, Payload}; +make_frame(close, <<>>, undefined, _) -> close; +make_frame(close, Payload, CloseCode, _) -> {close, CloseCode, Payload}; +make_frame(ping, <<>>, _, _) -> ping; +make_frame(ping, Payload, _, _) -> {ping, Payload}; +make_frame(pong, <<>>, _, _) -> pong; +make_frame(pong, Payload, _, _) -> {pong, Payload}. + +%% @doc Construct an unmasked Websocket frame. + +-spec frame(frame(), extensions()) -> iodata(). +%% Control frames. Control packets must not be > 125 in length. +frame(close, _) -> + << 1:1, 0:3, 8:4, 0:8 >>; +frame(ping, _) -> + << 1:1, 0:3, 9:4, 0:8 >>; +frame(pong, _) -> + << 1:1, 0:3, 10:4, 0:8 >>; +frame({close, Payload}, Extensions) -> + frame({close, 1000, Payload}, Extensions); +frame({close, StatusCode, Payload}, _) -> + Len = 2 + iolist_size(Payload), + true = Len =< 125, + [<< 1:1, 0:3, 8:4, 0:1, Len:7, StatusCode:16 >>, Payload]; +frame({ping, Payload}, _) -> + Len = iolist_size(Payload), + true = Len =< 125, + [<< 1:1, 0:3, 9:4, 0:1, Len:7 >>, Payload]; +frame({pong, Payload}, _) -> + Len = iolist_size(Payload), + true = Len =< 125, + [<< 1:1, 0:3, 10:4, 0:1, Len:7 >>, Payload]; +%% Data frames, deflate-frame extension. +frame({text, Payload}, #{deflate := Deflate, deflate_takeover := TakeOver}) + when Deflate =/= false -> + Payload2 = deflate_frame(Payload, Deflate, TakeOver), + Len = payload_length(Payload2), + [<< 1:1, 1:1, 0:2, 1:4, 0:1, Len/bits >>, Payload2]; +frame({binary, Payload}, #{deflate := Deflate, deflate_takeover := TakeOver}) + when Deflate =/= false -> + Payload2 = deflate_frame(Payload, Deflate, TakeOver), + Len = payload_length(Payload2), + [<< 1:1, 1:1, 0:2, 2:4, 0:1, Len/bits >>, Payload2]; +%% Data frames. +frame({text, Payload}, _) -> + Len = payload_length(Payload), + [<< 1:1, 0:3, 1:4, 0:1, Len/bits >>, Payload]; +frame({binary, Payload}, _) -> + Len = payload_length(Payload), + [<< 1:1, 0:3, 2:4, 0:1, Len/bits >>, Payload]. + +%% @doc Construct a masked Websocket frame. +%% +%% We use a mask key of 0 if there is no payload for close, ping and pong frames. + +-spec masked_frame(frame(), extensions()) -> iodata(). +%% Control frames. Control packets must not be > 125 in length. +masked_frame(close, _) -> + << 1:1, 0:3, 8:4, 1:1, 0:39 >>; +masked_frame(ping, _) -> + << 1:1, 0:3, 9:4, 1:1, 0:39 >>; +masked_frame(pong, _) -> + << 1:1, 0:3, 10:4, 1:1, 0:39 >>; +masked_frame({close, Payload}, Extensions) -> + frame({close, 1000, Payload}, Extensions); +masked_frame({close, StatusCode, Payload}, _) -> + Len = 2 + iolist_size(Payload), + true = Len =< 125, + MaskKeyBin = << MaskKey:32 >> = crypto:strong_rand_bytes(4), + [<< 1:1, 0:3, 8:4, 1:1, Len:7 >>, MaskKeyBin, mask(iolist_to_binary([<< StatusCode:16 >>, Payload]), MaskKey, <<>>)]; +masked_frame({ping, Payload}, _) -> + Len = iolist_size(Payload), + true = Len =< 125, + MaskKeyBin = << MaskKey:32 >> = crypto:strong_rand_bytes(4), + [<< 1:1, 0:3, 9:4, 1:1, Len:7 >>, MaskKeyBin, mask(iolist_to_binary(Payload), MaskKey, <<>>)]; +masked_frame({pong, Payload}, _) -> + Len = iolist_size(Payload), + true = Len =< 125, + MaskKeyBin = << MaskKey:32 >> = crypto:strong_rand_bytes(4), + [<< 1:1, 0:3, 10:4, 1:1, Len:7 >>, MaskKeyBin, mask(iolist_to_binary(Payload), MaskKey, <<>>)]; +%% Data frames, deflate-frame extension. +masked_frame({text, Payload}, #{deflate := Deflate, deflate_takeover := TakeOver}) + when Deflate =/= false -> + MaskKeyBin = << MaskKey:32 >> = crypto:strong_rand_bytes(4), + Payload2 = mask(deflate_frame(Payload, Deflate, TakeOver), MaskKey, <<>>), + Len = payload_length(Payload2), + [<< 1:1, 1:1, 0:2, 1:4, 1:1, Len/bits >>, MaskKeyBin, Payload2]; +masked_frame({binary, Payload}, #{deflate := Deflate, deflate_takeover := TakeOver}) + when Deflate =/= false -> + MaskKeyBin = << MaskKey:32 >> = crypto:strong_rand_bytes(4), + Payload2 = mask(deflate_frame(Payload, Deflate, TakeOver), MaskKey, <<>>), + Len = payload_length(Payload2), + [<< 1:1, 1:1, 0:2, 2:4, 1:1, Len/bits >>, MaskKeyBin, Payload2]; +%% Data frames. +masked_frame({text, Payload}, _) -> + MaskKeyBin = << MaskKey:32 >> = crypto:strong_rand_bytes(4), + Len = payload_length(Payload), + [<< 1:1, 0:3, 1:4, 1:1, Len/bits >>, MaskKeyBin, mask(iolist_to_binary(Payload), MaskKey, <<>>)]; +masked_frame({binary, Payload}, _) -> + MaskKeyBin = << MaskKey:32 >> = crypto:strong_rand_bytes(4), + Len = payload_length(Payload), + [<< 1:1, 0:3, 2:4, 1:1, Len/bits >>, MaskKeyBin, mask(iolist_to_binary(Payload), MaskKey, <<>>)]. + +payload_length(Payload) -> + case iolist_size(Payload) of + N when N =< 125 -> << N:7 >>; + N when N =< 16#ffff -> << 126:7, N:16 >>; + N when N =< 16#7fffffffffffffff -> << 127:7, N:64 >> + end. + +deflate_frame(Payload, Deflate, TakeOver) -> + Deflated = iolist_to_binary(zlib:deflate(Deflate, Payload, sync)), + case TakeOver of + no_takeover -> zlib:deflateReset(Deflate); + takeover -> ok + end, + Len = byte_size(Deflated) - 4, + case Deflated of + << Body:Len/binary, 0:8, 0:8, 255:8, 255:8 >> -> Body; + _ -> Deflated + end. diff --git a/src/wsNet/ranch.erl b/src/wsNet/ranch.erl new file mode 100644 index 0000000..c9cc035 --- /dev/null +++ b/src/wsNet/ranch.erl @@ -0,0 +1,625 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% Copyright (c) 2020-2021, Jan Uhlig +%% Copyright (c) 2021, Maria Scott +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch). + +-export([start_listener/5]). +-export([normalize_opts/1]). +-export([stop_listener/1]). +-export([suspend_listener/1]). +-export([resume_listener/1]). +-export([stop_all_acceptors/0]). +-export([restart_all_acceptors/0]). +-export([child_spec/5]). +-export([handshake/1]). +-export([handshake/2]). +-export([handshake_continue/1]). +-export([handshake_continue/2]). +-export([handshake_cancel/1]). +-export([recv_proxy_header/2]). +-export([remove_connection/1]). +-export([get_status/1]). +-export([get_addr/1]). +-export([get_port/1]). +-export([get_max_connections/1]). +-export([set_max_connections/2]). +-export([get_transport_options/1]). +-export([set_transport_options/2]). +-export([get_protocol_options/1]). +-export([set_protocol_options/2]). +-export([info/0]). +-export([info/1]). +-export([procs/2]). +-export([wait_for_connections/3]). +-export([wait_for_connections/4]). +-export([filter_options/4]). +-export([set_option_default/3]). +-export([require/1]). +-export([log/4]). + +-type max_conns() :: non_neg_integer() | infinity. +-export_type([max_conns/0]). + +-type opts() :: any() | transport_opts(any()). +-export_type([opts/0]). + +-type alarm(Type, Callback) :: #{ + type := Type, + callback := Callback, + treshold := non_neg_integer(), + cooldown => non_neg_integer() +}. + +-type alarm_num_connections() :: alarm(num_connections, fun((ref(), term(), pid(), [pid()]) -> any())). + +-type transport_opts(SocketOpts) :: #{ + alarms => #{term() => alarm_num_connections()}, + connection_type => worker | supervisor, + handshake_timeout => timeout(), + logger => module(), + max_connections => max_conns(), + num_acceptors => pos_integer(), + num_conns_sups => pos_integer(), + num_listen_sockets => pos_integer(), + post_listen_callback => fun((term()) -> ok | {error, term()}), + shutdown => timeout() | brutal_kill, + socket_opts => SocketOpts +}. +-export_type([transport_opts/1]). + +-type ref() :: any(). +-export_type([ref/0]). + +-spec start_listener(ref(), module(), opts(), module(), any()) + -> supervisor:startchild_ret(). +start_listener(Ref, Transport, TransOpts0, Protocol, ProtoOpts) + when is_atom(Transport), is_atom(Protocol) -> + TransOpts = normalize_opts(TransOpts0), + _ = code:ensure_loaded(Transport), + case {erlang:function_exported(Transport, name, 0), validate_transport_opts(TransOpts)} of + {true, ok} -> + ChildSpec = #{id => {ranch_listener_sup, Ref}, start => {ranch_listener_sup, start_link, [ + Ref, Transport, TransOpts, Protocol, ProtoOpts + ]}, type => supervisor}, + maybe_started(supervisor:start_child(ranch_sup, ChildSpec)); + {false, _} -> + {error, {bad_transport, Transport}}; + {_, TransOptsError} -> + TransOptsError + end. + +-spec normalize_opts(opts()) -> transport_opts(any()). +normalize_opts(Map) when is_map(Map) -> + Map; +normalize_opts(Any) -> + #{socket_opts => Any}. + +-spec validate_transport_opts(transport_opts(any())) -> ok | {error, any()}. +validate_transport_opts(Opts) -> + maps:fold(fun + (Key, Value, ok) -> + case validate_transport_opt(Key, Value, Opts) of + true -> + ok; + false -> + {error, {bad_option, Key}} + end; + (_, _, Acc) -> + Acc + end, ok, Opts). + +-spec validate_transport_opt(any(), any(), transport_opts(any())) -> boolean(). +validate_transport_opt(connection_type, worker, _) -> + true; +validate_transport_opt(connection_type, supervisor, _) -> + true; +validate_transport_opt(handshake_timeout, infinity, _) -> + true; +validate_transport_opt(handshake_timeout, Value, _) -> + is_integer(Value) andalso Value >= 0; +validate_transport_opt(max_connections, infinity, _) -> + true; +validate_transport_opt(max_connections, Value, _) -> + is_integer(Value) andalso Value >= 0; +validate_transport_opt(alarms, Alarms, _) -> + maps:fold( + fun + (_, Opts, true) -> + validate_alarm(Opts); + (_, _, false) -> + false + end, + true, + Alarms); +validate_transport_opt(logger, Value, _) -> + is_atom(Value); +validate_transport_opt(num_acceptors, Value, _) -> + is_integer(Value) andalso Value > 0; +validate_transport_opt(num_conns_sups, Value, _) -> + is_integer(Value) andalso Value > 0; +validate_transport_opt(num_listen_sockets, Value, Opts) -> + is_integer(Value) andalso Value > 0 + andalso Value =< maps:get(num_acceptors, Opts, 10); +validate_transport_opt(post_listen_callback, Value, _) -> + is_function(Value, 1); +validate_transport_opt(shutdown, brutal_kill, _) -> + true; +validate_transport_opt(shutdown, infinity, _) -> + true; +validate_transport_opt(shutdown, Value, _) -> + is_integer(Value) andalso Value >= 0; +validate_transport_opt(socket_opts, _, _) -> + true; +validate_transport_opt(_, _, _) -> + false. + +validate_alarm(Alarm = #{type := num_connections, treshold := Treshold, + callback := Callback}) -> + is_integer(Treshold) andalso Treshold >= 0 + andalso is_function(Callback, 4) + andalso case Alarm of + #{cooldown := Cooldown} -> + is_integer(Cooldown) andalso Cooldown >= 0; + _ -> + true + end; +validate_alarm(_) -> + false. + +maybe_started({error, {{shutdown, + {failed_to_start_child, ranch_acceptors_sup, + {listen_error, _, Reason}}}, _}} = Error) -> + start_error(Reason, Error); +maybe_started(Res) -> + Res. + +start_error(E=eaddrinuse, _) -> {error, E}; +start_error(E=eacces, _) -> {error, E}; +start_error(E=no_cert, _) -> {error, E}; +start_error(_, Error) -> Error. + +-spec stop_listener(ref()) -> ok | {error, not_found}. +stop_listener(Ref) -> + [_, Transport, _, _, _] = ranch_server:get_listener_start_args(Ref), + TransOpts = get_transport_options(Ref), + case supervisor:terminate_child(ranch_sup, {ranch_listener_sup, Ref}) of + ok -> + _ = supervisor:delete_child(ranch_sup, {ranch_listener_sup, Ref}), + ranch_server:cleanup_listener_opts(Ref), + Transport:cleanup(TransOpts); + {error, Reason} -> + {error, Reason} + end. + +-spec suspend_listener(ref()) -> ok | {error, any()}. +suspend_listener(Ref) -> + case get_status(Ref) of + running -> + ListenerSup = ranch_server:get_listener_sup(Ref), + ok = ranch_server:set_addr(Ref, {undefined, undefined}), + supervisor:terminate_child(ListenerSup, ranch_acceptors_sup); + suspended -> + ok + end. + +-spec resume_listener(ref()) -> ok | {error, any()}. +resume_listener(Ref) -> + case get_status(Ref) of + running -> + ok; + suspended -> + ListenerSup = ranch_server:get_listener_sup(Ref), + Res = supervisor:restart_child(ListenerSup, ranch_acceptors_sup), + maybe_resumed(Res) + end. + +maybe_resumed(Error={error, {listen_error, _, Reason}}) -> + start_error(Reason, Error); +maybe_resumed({ok, _}) -> + ok; +maybe_resumed({ok, _, _}) -> + ok; +maybe_resumed(Res) -> + Res. + +-spec stop_all_acceptors() -> ok. +stop_all_acceptors() -> + _ = [ok = do_acceptors(Pid, terminate_child) + || {_, Pid} <- ranch_server:get_listener_sups()], + ok. + +-spec restart_all_acceptors() -> ok. +restart_all_acceptors() -> + _ = [ok = do_acceptors(Pid, restart_child) + || {_, Pid} <- ranch_server:get_listener_sups()], + ok. + +do_acceptors(ListenerSup, F) -> + ListenerChildren = supervisor:which_children(ListenerSup), + case lists:keyfind(ranch_acceptors_sup, 1, ListenerChildren) of + {_, AcceptorsSup, _, _} when is_pid(AcceptorsSup) -> + AcceptorChildren = supervisor:which_children(AcceptorsSup), + %% @todo What about errors? + _ = [supervisor:F(AcceptorsSup, AcceptorId) + || {AcceptorId, _, _, _} <- AcceptorChildren], + ok; + {_, Atom, _, _} -> + {error, Atom} + end. + +-spec child_spec(ref(), module(), opts(), module(), any()) + -> supervisor:child_spec(). +child_spec(Ref, Transport, TransOpts0, Protocol, ProtoOpts) -> + TransOpts = normalize_opts(TransOpts0), + #{id => {ranch_embedded_sup, Ref}, start => {ranch_embedded_sup, start_link, [ + Ref, Transport, TransOpts, Protocol, ProtoOpts + ]}, type => supervisor}. + +-spec handshake(ref()) -> {ok, ranch_transport:socket()} | {continue, any()}. +handshake(Ref) -> + handshake1(Ref, undefined). + +-spec handshake(ref(), any()) -> {ok, ranch_transport:socket()} | {continue, any()}. +handshake(Ref, Opts) -> + handshake1(Ref, {opts, Opts}). + +handshake1(Ref, Opts) -> + receive {handshake, Ref, Transport, CSocket, Timeout} -> + Handshake = handshake_transport(Transport, handshake, CSocket, Opts, Timeout), + handshake_result(Handshake, Ref, Transport, CSocket, Timeout) + end. + +-spec handshake_continue(ref()) -> {ok, ranch_transport:socket()}. +handshake_continue(Ref) -> + handshake_continue1(Ref, undefined). + +-spec handshake_continue(ref(), any()) -> {ok, ranch_transport:socket()}. +handshake_continue(Ref, Opts) -> + handshake_continue1(Ref, {opts, Opts}). + +handshake_continue1(Ref, Opts) -> + receive {handshake_continue, Ref, Transport, CSocket, Timeout} -> + Handshake = handshake_transport(Transport, handshake_continue, CSocket, Opts, Timeout), + handshake_result(Handshake, Ref, Transport, CSocket, Timeout) + end. + +handshake_transport(Transport, Fun, CSocket, undefined, Timeout) -> + Transport:Fun(CSocket, Timeout); +handshake_transport(Transport, Fun, CSocket, {opts, Opts}, Timeout) -> + Transport:Fun(CSocket, Opts, Timeout). + +handshake_result(Result, Ref, Transport, CSocket, Timeout) -> + case Result of + OK = {ok, _} -> + OK; + {ok, CSocket2, Info} -> + self() ! {handshake_continue, Ref, Transport, CSocket2, Timeout}, + {continue, Info}; + {error, {tls_alert, _}} -> + ok = Transport:close(CSocket), + exit(normal); + {error, Reason} when Reason =:= timeout; Reason =:= closed -> + ok = Transport:close(CSocket), + exit(normal); + {error, Reason} -> + ok = Transport:close(CSocket), + error(Reason) + end. + +-spec handshake_cancel(ref()) -> ok. +handshake_cancel(Ref) -> + receive {handshake_continue, Ref, Transport, CSocket, _} -> + Transport:handshake_cancel(CSocket) + end. + +%% Unlike handshake/2 this function always return errors because +%% the communication between the proxy and the server are expected +%% to be reliable. If there is a problem while receiving the proxy +%% header, we probably want to know about it. +-spec recv_proxy_header(ref(), timeout()) + -> {ok, ranch_proxy_header:proxy_info()} + | {error, closed | atom()} + | {error, protocol_error, atom()}. +recv_proxy_header(Ref, Timeout) -> + receive HandshakeState={handshake, Ref, Transport, CSocket, _} -> + self() ! HandshakeState, + Transport:recv_proxy_header(CSocket, Timeout) + end. + +-spec remove_connection(ref()) -> ok. +remove_connection(Ref) -> + ListenerSup = ranch_server:get_listener_sup(Ref), + {_, ConnsSupSup, _, _} = lists:keyfind(ranch_conns_sup_sup, 1, + supervisor:which_children(ListenerSup)), + _ = [ConnsSup ! {remove_connection, Ref, self()} || + {_, ConnsSup, _, _} <- supervisor:which_children(ConnsSupSup)], + ok. + +-spec get_status(ref()) -> running | suspended. +get_status(Ref) -> + ListenerSup = ranch_server:get_listener_sup(Ref), + Children = supervisor:which_children(ListenerSup), + case lists:keyfind(ranch_acceptors_sup, 1, Children) of + {_, undefined, _, _} -> + suspended; + _ -> + running + end. + +-spec get_addr(ref()) -> {inet:ip_address(), inet:port_number()} | + {local, binary()} | {undefined, undefined}. +get_addr(Ref) -> + ranch_server:get_addr(Ref). + +-spec get_port(ref()) -> inet:port_number() | undefined. +get_port(Ref) -> + case get_addr(Ref) of + {local, _} -> + undefined; + {_, Port} -> + Port + end. + +-spec get_connections(ref(), active|all) -> non_neg_integer(). +get_connections(Ref, active) -> + SupCounts = [ranch_conns_sup:active_connections(ConnsSup) || + {_, ConnsSup} <- ranch_server:get_connections_sups(Ref)], + lists:sum(SupCounts); +get_connections(Ref, all) -> + SupCounts = [proplists:get_value(active, supervisor:count_children(ConnsSup)) || + {_, ConnsSup} <- ranch_server:get_connections_sups(Ref)], + lists:sum(SupCounts). + +-spec get_max_connections(ref()) -> max_conns(). +get_max_connections(Ref) -> + ranch_server:get_max_connections(Ref). + +-spec set_max_connections(ref(), max_conns()) -> ok. +set_max_connections(Ref, MaxConnections) -> + ranch_server:set_max_connections(Ref, MaxConnections). + +-spec get_transport_options(ref()) -> transport_opts(any()). +get_transport_options(Ref) -> + ranch_server:get_transport_options(Ref). + +-spec set_transport_options(ref(), opts()) -> ok | {error, term()}. +set_transport_options(Ref, TransOpts0) -> + TransOpts = normalize_opts(TransOpts0), + case validate_transport_opts(TransOpts) of + ok -> + ok = ranch_server:set_transport_options(Ref, TransOpts), + ok = apply_transport_options(Ref, TransOpts); + TransOptsError -> + TransOptsError + end. + +apply_transport_options(Ref, TransOpts) -> + _ = [ConnsSup ! {set_transport_options, TransOpts} + || {_, ConnsSup} <- ranch_server:get_connections_sups(Ref)], + ok. + +-spec get_protocol_options(ref()) -> any(). +get_protocol_options(Ref) -> + ranch_server:get_protocol_options(Ref). + +-spec set_protocol_options(ref(), any()) -> ok. +set_protocol_options(Ref, Opts) -> + ranch_server:set_protocol_options(Ref, Opts). + +-spec info() -> #{ref() := #{atom() := term()}}. +info() -> + lists:foldl( + fun ({Ref, Pid}, Acc) -> + Acc#{Ref => listener_info(Ref, Pid)} + end, + #{}, + ranch_server:get_listener_sups() + ). + +-spec info(ref()) -> #{atom() := term()}. +info(Ref) -> + Pid = ranch_server:get_listener_sup(Ref), + listener_info(Ref, Pid). + +listener_info(Ref, Pid) -> + [_, Transport, _, Protocol, _] = ranch_server:get_listener_start_args(Ref), + Status = get_status(Ref), + {IP, Port} = case get_addr(Ref) of + Addr = {local, _} -> + {Addr, undefined}; + Addr -> + Addr + end, + MaxConns = get_max_connections(Ref), + TransOpts = ranch_server:get_transport_options(Ref), + ProtoOpts = get_protocol_options(Ref), + #{ + pid => Pid, + status => Status, + ip => IP, + port => Port, + max_connections => MaxConns, + active_connections => get_connections(Ref, active), + all_connections => get_connections(Ref, all), + transport => Transport, + transport_options => TransOpts, + protocol => Protocol, + protocol_options => ProtoOpts, + metrics => metrics(Ref) + }. + +-spec procs(ref(), acceptors | connections) -> [pid()]. +procs(Ref, Type) -> + ListenerSup = ranch_server:get_listener_sup(Ref), + procs1(ListenerSup, Type). + +procs1(ListenerSup, acceptors) -> + {_, SupPid, _, _} = lists:keyfind(ranch_acceptors_sup, 1, + supervisor:which_children(ListenerSup)), + try + [Pid || {_, Pid, _, _} <- supervisor:which_children(SupPid)] + catch exit:{noproc, _} -> + [] + end; +procs1(ListenerSup, connections) -> + {_, SupSupPid, _, _} = lists:keyfind(ranch_conns_sup_sup, 1, + supervisor:which_children(ListenerSup)), + Conns= + lists:map(fun ({_, SupPid, _, _}) -> + [Pid || {_, Pid, _, _} <- supervisor:which_children(SupPid)] + end, + supervisor:which_children(SupSupPid) + ), + lists:flatten(Conns). + +-spec metrics(ref()) -> #{}. +metrics(Ref) -> + Counters = ranch_server:get_stats_counters(Ref), + CounterInfo = counters:info(Counters), + NumCounters = maps:get(size, CounterInfo), + NumConnsSups = NumCounters div 2, + lists:foldl( + fun (Id, Acc) -> + Acc#{ + {conns_sup, Id, accept} => counters:get(Counters, 2*Id-1), + {conns_sup, Id, terminate} => counters:get(Counters, 2*Id) + } + end, + #{}, + lists:seq(1, NumConnsSups) + ). + +-spec wait_for_connections + (ref(), '>' | '>=' | '==' | '=<', non_neg_integer()) -> ok; + (ref(), '<', pos_integer()) -> ok. +wait_for_connections(Ref, Op, NumConns) -> + wait_for_connections(Ref, Op, NumConns, 1000). + +-spec wait_for_connections + (ref(), '>' | '>=' | '==' | '=<', non_neg_integer(), non_neg_integer()) -> ok; + (ref(), '<', pos_integer(), non_neg_integer()) -> ok. +wait_for_connections(Ref, Op, NumConns, Interval) -> + validate_op(Op, NumConns), + validate_num_conns(NumConns), + validate_interval(Interval), + wait_for_connections_loop(Ref, Op, NumConns, Interval). + +validate_op('>', _) -> ok; +validate_op('>=', _) -> ok; +validate_op('==', _) -> ok; +validate_op('=<', _) -> ok; +validate_op('<', NumConns) when NumConns > 0 -> ok; +validate_op(_, _) -> error(badarg). + +validate_num_conns(NumConns) when is_integer(NumConns), NumConns >= 0 -> ok; +validate_num_conns(_) -> error(badarg). + +validate_interval(Interval) when is_integer(Interval), Interval >= 0 -> ok; +validate_interval(_) -> error(badarg). + +wait_for_connections_loop(Ref, Op, NumConns, Interval) -> + CurConns = try + get_connections(Ref, all) + catch _:_ -> + 0 + end, + case erlang:Op(CurConns, NumConns) of + true -> + ok; + false when Interval =:= 0 -> + wait_for_connections_loop(Ref, Op, NumConns, Interval); + false -> + timer:sleep(Interval), + wait_for_connections_loop(Ref, Op, NumConns, Interval) + end. + +-spec filter_options([inet | inet6 | {atom(), any()} | {raw, any(), any(), any()}], + [atom()], Acc, module()) -> Acc when Acc :: [any()]. +filter_options(UserOptions, DisallowedKeys, DefaultOptions, Logger) -> + AllowedOptions = filter_user_options(UserOptions, DisallowedKeys, Logger), + lists:foldl(fun merge_options/2, DefaultOptions, AllowedOptions). + +%% 2-tuple options. +filter_user_options([Opt = {Key, _}|Tail], DisallowedKeys, Logger) -> + case lists:member(Key, DisallowedKeys) of + false -> + [Opt|filter_user_options(Tail, DisallowedKeys, Logger)]; + true -> + filter_options_warning(Opt, Logger), + filter_user_options(Tail, DisallowedKeys, Logger) + end; +%% Special option forms. +filter_user_options([inet|Tail], DisallowedKeys, Logger) -> + [inet|filter_user_options(Tail, DisallowedKeys, Logger)]; +filter_user_options([inet6|Tail], DisallowedKeys, Logger) -> + [inet6|filter_user_options(Tail, DisallowedKeys, Logger)]; +filter_user_options([Opt = {raw, _, _, _}|Tail], DisallowedKeys, Logger) -> + [Opt|filter_user_options(Tail, DisallowedKeys, Logger)]; +filter_user_options([Opt|Tail], DisallowedKeys, Logger) -> + filter_options_warning(Opt, Logger), + filter_user_options(Tail, DisallowedKeys, Logger); +filter_user_options([], _, _) -> + []. + +filter_options_warning(Opt, Logger) -> + log(warning, + "Transport option ~p unknown or invalid.~n", + [Opt], Logger). + +merge_options({Key, _} = Option, OptionList) -> + lists:keystore(Key, 1, OptionList, Option); +merge_options(Option, OptionList) -> + [Option|OptionList]. + +-spec set_option_default(Opts, atom(), any()) + -> Opts when Opts :: [{atom(), any()}]. +set_option_default(Opts, Key, Value) -> + case lists:keymember(Key, 1, Opts) of + true -> Opts; + false -> [{Key, Value}|Opts] + end. + +-spec require([atom()]) -> ok. +require([]) -> + ok; +require([App|Tail]) -> + case application:start(App) of + ok -> ok; + {error, {already_started, App}} -> ok + end, + require(Tail). + +-spec log(logger:level(), io:format(), list(), module() | #{logger => module()}) -> ok. +log(Level, Format, Args, Logger) when is_atom(Logger) -> + log(Level, Format, Args, #{logger => Logger}); +log(Level, Format, Args, #{logger := Logger}) + when Logger =/= error_logger -> + _ = Logger:Level(Format, Args), + ok; +%% Because error_logger does not have all the levels +%% we accept we have to do some mapping to error_logger functions. +log(Level, Format, Args, _) -> + Function = case Level of + emergency -> error_msg; + alert -> error_msg; + critical -> error_msg; + error -> error_msg; + warning -> warning_msg; + notice -> warning_msg; + info -> info_msg; + debug -> info_msg + end, + error_logger:Function(Format, Args). diff --git a/src/wsNet/ranch_acceptor.erl b/src/wsNet/ranch_acceptor.erl new file mode 100644 index 0000000..7d684cd --- /dev/null +++ b/src/wsNet/ranch_acceptor.erl @@ -0,0 +1,72 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_acceptor). + +-export([start_link/5]). +-export([init/4]). +-export([loop/5]). + +-spec start_link(ranch:ref(), pos_integer(), inet:socket(), module(), module()) + -> {ok, pid()}. +start_link(Ref, AcceptorId, LSocket, Transport, Logger) -> + ConnsSup = ranch_server:get_connections_sup(Ref, AcceptorId), + Pid = spawn_link(?MODULE, init, [LSocket, Transport, Logger, ConnsSup]), + {ok, Pid}. + +-spec init(inet:socket(), module(), module(), pid()) -> no_return(). +init(LSocket, Transport, Logger, ConnsSup) -> + MonitorRef = monitor(process, ConnsSup), + loop(LSocket, Transport, Logger, ConnsSup, MonitorRef). + +-spec loop(inet:socket(), module(), module(), pid(), reference()) -> no_return(). +loop(LSocket, Transport, Logger, ConnsSup, MonitorRef) -> + _ = case Transport:accept(LSocket, infinity) of + {ok, CSocket} -> + case Transport:controlling_process(CSocket, ConnsSup) of + ok -> + %% This call will not return until process has been started + %% AND we are below the maximum number of connections. + ranch_conns_sup:start_protocol(ConnsSup, MonitorRef, + CSocket); + {error, _} -> + Transport:close(CSocket) + end; + %% Reduce the accept rate if we run out of file descriptors. + %% We can't accept anymore anyway, so we might as well wait + %% a little for the situation to resolve itself. + {error, emfile} -> + ranch:log(warning, + "Ranch acceptor reducing accept rate: out of file descriptors~n", + [], Logger), + receive after 100 -> ok end; + %% Exit if the listening socket got closed. + {error, closed} -> + exit(closed); + %% Continue otherwise. + {error, _} -> + ok + end, + flush(Logger), + ?MODULE:loop(LSocket, Transport, Logger, ConnsSup, MonitorRef). + +flush(Logger) -> + receive Msg -> + ranch:log(warning, + "Ranch acceptor received unexpected message: ~p~n", + [Msg], Logger), + flush(Logger) + after 0 -> + ok + end. diff --git a/src/wsNet/ranch_acceptors_sup.erl b/src/wsNet/ranch_acceptors_sup.erl new file mode 100644 index 0000000..52c68dc --- /dev/null +++ b/src/wsNet/ranch_acceptors_sup.erl @@ -0,0 +1,103 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% Copyright (c) 2020-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_acceptors_sup). +-behaviour(supervisor). + +-export([start_link/3]). +-export([init/1]). + +-spec start_link(ranch:ref(), module(), module()) + -> {ok, pid()}. +start_link(Ref, Transport, Logger) -> + supervisor:start_link(?MODULE, [Ref, Transport, Logger]). + +-spec init([term()]) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}. +init([Ref, Transport, Logger]) -> + TransOpts = ranch_server:get_transport_options(Ref), + NumAcceptors = maps:get(num_acceptors, TransOpts, 10), + NumListenSockets = maps:get(num_listen_sockets, TransOpts, 1), + LSockets = case get(lsockets) of + undefined -> + LSockets1 = start_listen_sockets(Ref, NumListenSockets, Transport, TransOpts, Logger), + put(lsockets, LSockets1), + LSockets1; + LSockets1 -> + LSockets1 + end, + Procs = [begin + LSocketId = (AcceptorId rem NumListenSockets) + 1, + {_, LSocket} = lists:keyfind(LSocketId, 1, LSockets), + #{ + id => {acceptor, self(), AcceptorId}, + start => {ranch_acceptor, start_link, [Ref, AcceptorId, LSocket, Transport, Logger]}, + shutdown => brutal_kill + } + end || AcceptorId <- lists:seq(1, NumAcceptors)], + {ok, {#{intensity => 1 + ceil(math:log2(NumAcceptors))}, Procs}}. + +-spec start_listen_sockets(any(), pos_integer(), module(), map(), module()) + -> [{pos_integer(), inet:socket()}]. +start_listen_sockets(Ref, NumListenSockets, Transport, TransOpts0, Logger) when NumListenSockets > 0 -> + BaseSocket = start_listen_socket(Ref, Transport, TransOpts0, Logger), + {ok, Addr} = Transport:sockname(BaseSocket), + ExtraSockets = case Addr of + {local, _} when NumListenSockets > 1 -> + listen_error(Ref, Transport, TransOpts0, reuseport_local, Logger); + {local, _} -> + []; + {_, Port} -> + SocketOpts = maps:get(socket_opts, TransOpts0, []), + SocketOpts1 = lists:keystore(port, 1, SocketOpts, {port, Port}), + TransOpts1 = TransOpts0#{socket_opts => SocketOpts1}, + [{N, start_listen_socket(Ref, Transport, TransOpts1, Logger)} + || N <- lists:seq(2, NumListenSockets)] + end, + ranch_server:set_addr(Ref, Addr), + [{1, BaseSocket}|ExtraSockets]. + +-spec start_listen_socket(any(), module(), map(), module()) -> inet:socket(). +start_listen_socket(Ref, Transport, TransOpts, Logger) -> + case Transport:listen(TransOpts) of + {ok, Socket} -> + PostListenCb = maps:get(post_listen_callback, TransOpts, fun (_) -> ok end), + case PostListenCb(Socket) of + ok -> + Socket; + {error, Reason} -> + listen_error(Ref, Transport, TransOpts, Reason, Logger) + end; + {error, Reason} -> + listen_error(Ref, Transport, TransOpts, Reason, Logger) + end. + +-spec listen_error(any(), module(), any(), atom(), module()) -> no_return(). +listen_error(Ref, Transport, TransOpts0, Reason, Logger) -> + SocketOpts0 = maps:get(socket_opts, TransOpts0, []), + SocketOpts1 = [{cert, '...'}|proplists:delete(cert, SocketOpts0)], + SocketOpts2 = [{key, '...'}|proplists:delete(key, SocketOpts1)], + SocketOpts = [{cacerts, '...'}|proplists:delete(cacerts, SocketOpts2)], + TransOpts = TransOpts0#{socket_opts => SocketOpts}, + ranch:log(error, + "Failed to start Ranch listener ~p in ~p:listen(~999999p) for reason ~p (~s)~n", + [Ref, Transport, TransOpts, Reason, format_error(Reason)], Logger), + exit({listen_error, Ref, Reason}). + +format_error(no_cert) -> + "no certificate provided; see cert, certfile, sni_fun or sni_hosts options"; +format_error(reuseport_local) -> + "num_listen_sockets must be set to 1 for local sockets"; +format_error(Reason) -> + inet:format_error(Reason). diff --git a/src/wsNet/ranch_app.erl b/src/wsNet/ranch_app.erl new file mode 100644 index 0000000..f6aeb26 --- /dev/null +++ b/src/wsNet/ranch_app.erl @@ -0,0 +1,48 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). +-export([profile_output/0]). + +-spec start(application:start_type(), term()) -> {ok, pid()} | {error, term()}. +start(_, _) -> + _ = consider_profiling(), + ranch_server = ets:new(ranch_server, [ + ordered_set, public, named_table]), + ranch_sup:start_link(). + +-spec stop(term()) -> ok. +stop(_) -> + ok. + +-spec profile_output() -> ok. +profile_output() -> + eprof:stop_profiling(), + eprof:log("procs.profile"), + eprof:analyze(procs), + eprof:log("total.profile"), + eprof:analyze(total). + +consider_profiling() -> + case application:get_env(profile) of + {ok, true} -> + {ok, _Pid} = eprof:start(), + eprof:start_profiling([self()]); + _ -> + not_profiling + end. diff --git a/src/wsNet/ranch_conns_sup.erl b/src/wsNet/ranch_conns_sup.erl new file mode 100644 index 0000000..649f856 --- /dev/null +++ b/src/wsNet/ranch_conns_sup.erl @@ -0,0 +1,508 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% Copyright (c) 2021, Maria Scott +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Make sure to never reload this module outside a release upgrade, +%% as calling l(ranch_conns_sup) twice will kill the process and all +%% the currently open connections. +-module(ranch_conns_sup). + +%% API. +-export([start_link/6]). +-export([start_protocol/3]). +-export([active_connections/1]). + +%% Supervisor internals. +-export([init/7]). +-export([system_continue/3]). +-export([system_terminate/4]). +-export([system_code_change/4]). + +-type conn_type() :: worker | supervisor. +-type shutdown() :: brutal_kill | timeout(). + +-record(state, { + parent = undefined :: pid(), + ref :: ranch:ref(), + id :: pos_integer(), + conn_type :: conn_type(), + shutdown :: shutdown(), + transport = undefined :: module(), + protocol = undefined :: module(), + opts :: any(), + handshake_timeout :: timeout(), + max_conns = undefined :: ranch:max_conns(), + stats_counters_ref :: counters:counters_ref(), + alarms = #{} :: #{term() => {map(), undefined | reference()}}, + logger = undefined :: module() +}). + +%% API. + +-spec start_link(ranch:ref(), pos_integer(), module(), any(), module(), module()) -> {ok, pid()}. +start_link(Ref, Id, Transport, TransOpts, Protocol, Logger) -> + proc_lib:start_link(?MODULE, init, + [self(), Ref, Id, Transport, TransOpts, Protocol, Logger]). + +%% We can safely assume we are on the same node as the supervisor. +%% +%% We can also safely avoid having a monitor and a timeout here +%% because only three things can happen: +%% * The supervisor died; rest_for_one strategy killed all acceptors +%% so this very calling process is going to di-- +%% * There's too many connections, the supervisor will resume the +%% acceptor only when we get below the limit again. +%% * The supervisor is overloaded, there's either too many acceptors +%% or the max_connections limit is too large. It's better if we +%% don't keep accepting connections because this leaves +%% more room for the situation to be resolved. +%% +%% We do not need the reply, we only need the ok from the supervisor +%% to continue. The supervisor sends its own pid when the acceptor can +%% continue. +-spec start_protocol(pid(), reference(), inet:socket()) -> ok. +start_protocol(SupPid, MonitorRef, Socket) -> + SupPid ! {?MODULE, start_protocol, self(), Socket}, + receive + SupPid -> + ok; + {'DOWN', MonitorRef, process, SupPid, Reason} -> + error(Reason) + end. + +%% We can't make the above assumptions here. This function might be +%% called from anywhere. +-spec active_connections(pid()) -> non_neg_integer(). +active_connections(SupPid) -> + Tag = erlang:monitor(process, SupPid), + catch erlang:send(SupPid, {?MODULE, active_connections, self(), Tag}, + [noconnect]), + receive + {Tag, Ret} -> + erlang:demonitor(Tag, [flush]), + Ret; + {'DOWN', Tag, _, _, noconnection} -> + exit({nodedown, node(SupPid)}); + {'DOWN', Tag, _, _, Reason} -> + exit(Reason) + after 5000 -> + erlang:demonitor(Tag, [flush]), + exit(timeout) + end. + +%% Supervisor internals. + +-spec init(pid(), ranch:ref(), pos_integer(), module(), any(), module(), module()) -> no_return(). +init(Parent, Ref, Id, Transport, TransOpts, Protocol, Logger) -> + process_flag(trap_exit, true), + ok = ranch_server:set_connections_sup(Ref, Id, self()), + MaxConns = ranch_server:get_max_connections(Ref), + Alarms = get_alarms(TransOpts), + ConnType = maps:get(connection_type, TransOpts, worker), + Shutdown = maps:get(shutdown, TransOpts, 5000), + HandshakeTimeout = maps:get(handshake_timeout, TransOpts, 5000), + ProtoOpts = ranch_server:get_protocol_options(Ref), + StatsCounters = ranch_server:get_stats_counters(Ref), + ok = proc_lib:init_ack(Parent, {ok, self()}), + loop(#state{parent=Parent, ref=Ref, id=Id, conn_type=ConnType, + shutdown=Shutdown, transport=Transport, protocol=Protocol, + opts=ProtoOpts, stats_counters_ref=StatsCounters, + handshake_timeout=HandshakeTimeout, + max_conns=MaxConns, alarms=Alarms, + logger=Logger}, 0, 0, []). + +loop(State=#state{parent=Parent, ref=Ref, id=Id, conn_type=ConnType, + transport=Transport, protocol=Protocol, opts=Opts, stats_counters_ref=StatsCounters, + alarms=Alarms, max_conns=MaxConns, logger=Logger}, CurConns, NbChildren, Sleepers) -> + receive + {?MODULE, start_protocol, To, Socket} -> + try Protocol:start_link(Ref, Transport, Opts) of + {ok, Pid} -> + inc_accept(StatsCounters, Id, 1), + handshake(State, CurConns, NbChildren, Sleepers, To, Socket, Pid, Pid); + {ok, SupPid, ProtocolPid} when ConnType =:= supervisor -> + inc_accept(StatsCounters, Id, 1), + handshake(State, CurConns, NbChildren, Sleepers, To, Socket, SupPid, ProtocolPid); + Ret -> + To ! self(), + ranch:log(error, + "Ranch listener ~p connection process start failure; " + "~p:start_link/3 returned: ~999999p~n", + [Ref, Protocol, Ret], Logger), + Transport:close(Socket), + loop(State, CurConns, NbChildren, Sleepers) + catch Class:Reason -> + To ! self(), + ranch:log(error, + "Ranch listener ~p connection process start failure; " + "~p:start_link/3 crashed with reason: ~p:~999999p~n", + [Ref, Protocol, Class, Reason], Logger), + Transport:close(Socket), + loop(State, CurConns, NbChildren, Sleepers) + end; + {?MODULE, active_connections, To, Tag} -> + To ! {Tag, CurConns}, + loop(State, CurConns, NbChildren, Sleepers); + %% Remove a connection from the count of connections. + {remove_connection, Ref, Pid} -> + case put(Pid, removed) of + active when Sleepers =:= [] -> + loop(State, CurConns - 1, NbChildren, Sleepers); + active -> + [To|Sleepers2] = Sleepers, + To ! self(), + loop(State, CurConns - 1, NbChildren, Sleepers2); + removed -> + loop(State, CurConns, NbChildren, Sleepers); + undefined -> + _ = erase(Pid), + loop(State, CurConns, NbChildren, Sleepers) + end; + %% Upgrade the max number of connections allowed concurrently. + %% We resume all sleeping acceptors if this number increases. + {set_max_conns, MaxConns2} when MaxConns2 > MaxConns -> + _ = [To ! self() || To <- Sleepers], + loop(State#state{max_conns=MaxConns2}, + CurConns, NbChildren, []); + {set_max_conns, MaxConns2} -> + loop(State#state{max_conns=MaxConns2}, + CurConns, NbChildren, Sleepers); + %% Upgrade the transport options. + {set_transport_options, TransOpts} -> + set_transport_options(State, CurConns, NbChildren, Sleepers, TransOpts); + %% Upgrade the protocol options. + {set_protocol_options, Opts2} -> + loop(State#state{opts=Opts2}, + CurConns, NbChildren, Sleepers); + {timeout, _, {activate_alarm, AlarmName}} when is_map_key(AlarmName, Alarms) -> + {AlarmOpts, _} = maps:get(AlarmName, Alarms), + NewAlarm = trigger_alarm(Ref, AlarmName, {AlarmOpts, undefined}, CurConns), + loop(State#state{alarms=Alarms#{AlarmName => NewAlarm}}, CurConns, NbChildren, Sleepers); + {timeout, _, {activate_alarm, _}} -> + loop(State, CurConns, NbChildren, Sleepers); + {'EXIT', Parent, Reason} -> + terminate(State, Reason, NbChildren); + {'EXIT', Pid, Reason} when Sleepers =:= [] -> + case erase(Pid) of + active -> + inc_terminate(StatsCounters, Id, 1), + report_error(Logger, Ref, Protocol, Pid, Reason), + loop(State, CurConns - 1, NbChildren - 1, Sleepers); + removed -> + inc_terminate(StatsCounters, Id, 1), + report_error(Logger, Ref, Protocol, Pid, Reason), + loop(State, CurConns, NbChildren - 1, Sleepers); + undefined -> + loop(State, CurConns, NbChildren, Sleepers) + end; + %% Resume a sleeping acceptor if needed. + {'EXIT', Pid, Reason} -> + case erase(Pid) of + active when CurConns > MaxConns -> + inc_terminate(StatsCounters, Id, 1), + report_error(Logger, Ref, Protocol, Pid, Reason), + loop(State, CurConns - 1, NbChildren - 1, Sleepers); + active -> + inc_terminate(StatsCounters, Id, 1), + report_error(Logger, Ref, Protocol, Pid, Reason), + [To|Sleepers2] = Sleepers, + To ! self(), + loop(State, CurConns - 1, NbChildren - 1, Sleepers2); + removed -> + inc_terminate(StatsCounters, Id, 1), + report_error(Logger, Ref, Protocol, Pid, Reason), + loop(State, CurConns, NbChildren - 1, Sleepers); + undefined -> + loop(State, CurConns, NbChildren, Sleepers) + end; + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], + {State, CurConns, NbChildren, Sleepers}); + %% Calls from the supervisor module. + {'$gen_call', {To, Tag}, which_children} -> + Children = [{Protocol, Pid, ConnType, [Protocol]} + || {Pid, Type} <- get(), + Type =:= active orelse Type =:= removed], + To ! {Tag, Children}, + loop(State, CurConns, NbChildren, Sleepers); + {'$gen_call', {To, Tag}, count_children} -> + Counts = case ConnType of + worker -> [{supervisors, 0}, {workers, NbChildren}]; + supervisor -> [{supervisors, NbChildren}, {workers, 0}] + end, + Counts2 = [{specs, 1}, {active, NbChildren}|Counts], + To ! {Tag, Counts2}, + loop(State, CurConns, NbChildren, Sleepers); + {'$gen_call', {To, Tag}, _} -> + To ! {Tag, {error, ?MODULE}}, + loop(State, CurConns, NbChildren, Sleepers); + Msg -> + ranch:log(error, + "Ranch listener ~p received unexpected message ~p~n", + [Ref, Msg], Logger), + loop(State, CurConns, NbChildren, Sleepers) + end. + +handshake(State=#state{ref=Ref, transport=Transport, handshake_timeout=HandshakeTimeout, + max_conns=MaxConns, alarms=Alarms0}, CurConns, NbChildren, Sleepers, To, Socket, SupPid, ProtocolPid) -> + case Transport:controlling_process(Socket, ProtocolPid) of + ok -> + ProtocolPid ! {handshake, Ref, Transport, Socket, HandshakeTimeout}, + put(SupPid, active), + CurConns2 = CurConns + 1, + Sleepers2 = if CurConns2 < MaxConns -> + To ! self(), + Sleepers; + true -> + [To|Sleepers] + end, + Alarms1 = trigger_alarms(Ref, Alarms0, CurConns2), + loop(State#state{alarms=Alarms1}, CurConns2, NbChildren + 1, Sleepers2); + {error, _} -> + Transport:close(Socket), + %% Only kill the supervised pid, because the connection's pid, + %% when different, is supposed to be sitting under it and linked. + exit(SupPid, kill), + To ! self(), + loop(State, CurConns, NbChildren, Sleepers) + end. + +trigger_alarms(Ref, Alarms, CurConns) -> + maps:map( + fun + (AlarmName, Alarm) -> + trigger_alarm(Ref, AlarmName, Alarm, CurConns) + end, + Alarms + ). + +trigger_alarm(Ref, AlarmName, {Opts=#{treshold := Treshold, callback := Callback}, undefined}, CurConns) when CurConns >= Treshold -> + ActiveConns = [Pid || {Pid, active} <- get()], + case Callback of + {Mod, Fun} -> + spawn(Mod, Fun, [Ref, AlarmName, self(), ActiveConns]); + _ -> + Self = self(), + spawn(fun () -> Callback(Ref, AlarmName, Self, ActiveConns) end) + end, + {Opts, schedule_activate_alarm(AlarmName, Opts)}; +trigger_alarm(_, _, Alarm, _) -> + Alarm. + +schedule_activate_alarm(AlarmName, #{cooldown := Cooldown}) when Cooldown > 0 -> + erlang:start_timer(Cooldown, self(), {activate_alarm, AlarmName}); +schedule_activate_alarm(_, _) -> + undefined. + +get_alarms(#{alarms := Alarms}) when is_map(Alarms) -> + maps:fold( + fun + (Name, Opts = #{type := num_connections, cooldown := _}, Acc) -> + Acc#{Name => {Opts, undefined}}; + (Name, Opts = #{type := num_connections}, Acc) -> + Acc#{Name => {Opts#{cooldown => 5000}, undefined}}; + (_, _, Acc) -> Acc + end, + #{}, + Alarms + ); +get_alarms(_) -> + #{}. + +set_transport_options(State=#state{max_conns=MaxConns0}, CurConns, NbChildren, Sleepers0, TransOpts) -> + MaxConns1 = maps:get(max_connections, TransOpts, 1024), + HandshakeTimeout = maps:get(handshake_timeout, TransOpts, 5000), + Shutdown = maps:get(shutdown, TransOpts, 5000), + Sleepers1 = case MaxConns1 > MaxConns0 of + true -> + _ = [To ! self() || To <- Sleepers0], + []; + false -> + Sleepers0 + end, + State1=set_alarm_option(State, TransOpts, CurConns), + loop(State1#state{max_conns=MaxConns1, handshake_timeout=HandshakeTimeout, shutdown=Shutdown}, + CurConns, NbChildren, Sleepers1). + +set_alarm_option(State=#state{ref=Ref, alarms=OldAlarms}, TransOpts, CurConns) -> + NewAlarms0 = get_alarms(TransOpts), + NewAlarms1 = merge_alarms(OldAlarms, NewAlarms0), + NewAlarms2 = trigger_alarms(Ref, NewAlarms1, CurConns), + State#state{alarms=NewAlarms2}. + +merge_alarms(Old, New) -> + OldList = lists:sort(maps:to_list(Old)), + NewList = lists:sort(maps:to_list(New)), + Merged = merge_alarms(OldList, NewList, []), + maps:from_list(Merged). + +merge_alarms([], News, Acc) -> + News ++ Acc; +merge_alarms([{_, {_, undefined}}|Olds], [], Acc) -> + merge_alarms(Olds, [], Acc); +merge_alarms([{_, {_, Timer}}|Olds], [], Acc) -> + _ = cancel_alarm_reactivation_timer(Timer), + merge_alarms(Olds, [], Acc); +merge_alarms([{Name, {OldOpts, Timer}}|Olds], [{Name, {NewOpts, _}}|News], Acc) -> + merge_alarms(Olds, News, [{Name, {NewOpts, adapt_alarm_timer(Name, Timer, OldOpts, NewOpts)}}|Acc]); +merge_alarms([{OldName, {_, Timer}}|Olds], News=[{NewName, _}|_], Acc) when OldName < NewName -> + _ = cancel_alarm_reactivation_timer(Timer), + merge_alarms(Olds, News, Acc); +merge_alarms(Olds, [New|News], Acc) -> + merge_alarms(Olds, News, [New|Acc]). + +%% Not in cooldown. +adapt_alarm_timer(_, undefined, _, _) -> + undefined; +%% Cooldown unchanged. +adapt_alarm_timer(_, Timer, #{cooldown := Cooldown}, #{cooldown := Cooldown}) -> + Timer; +%% Cooldown changed to no cooldown, cancel cooldown timer. +adapt_alarm_timer(_, Timer, _, #{cooldown := 0}) -> + _ = cancel_alarm_reactivation_timer(Timer), + undefined; +%% Cooldown changed, cancel current and start new timer taking the already elapsed time into account. +adapt_alarm_timer(Name, Timer, #{cooldown := OldCooldown}, #{cooldown := NewCooldown}) -> + OldTimeLeft = cancel_alarm_reactivation_timer(Timer), + case NewCooldown-OldCooldown+OldTimeLeft of + NewTimeLeft when NewTimeLeft>0 -> + erlang:start_timer(NewTimeLeft, self(), {activate_alarm, Name}); + _ -> + undefined + end. + +cancel_alarm_reactivation_timer(Timer) -> + case erlang:cancel_timer(Timer) of + %% Timer had already expired when we tried to cancel it, so we flush the + %% reactivation message it sent and return 0 as remaining time. + false -> + ok = receive {timeout, Timer, {activate_alarm, _}} -> ok after 0 -> ok end, + 0; + %% Timer has not yet expired, we return the amount of time that was remaining. + TimeLeft -> + TimeLeft + end. + +-spec terminate(#state{}, any(), non_neg_integer()) -> no_return(). +terminate(#state{shutdown=brutal_kill, id=Id, + stats_counters_ref=StatsCounters}, Reason, NbChildren) -> + kill_children(get_keys(active)), + kill_children(get_keys(removed)), + inc_terminate(StatsCounters, Id, NbChildren), + exit(Reason); +%% Attempt to gracefully shutdown all children. +terminate(#state{shutdown=Shutdown, id=Id, + stats_counters_ref=StatsCounters}, Reason, NbChildren) -> + shutdown_children(get_keys(active)), + shutdown_children(get_keys(removed)), + _ = if + Shutdown =:= infinity -> + ok; + true -> + erlang:send_after(Shutdown, self(), kill) + end, + wait_children(NbChildren), + inc_terminate(StatsCounters, Id, NbChildren), + exit(Reason). + +inc_accept(StatsCounters, Id, N) -> + %% Accepts are counted in the odd indexes. + counters:add(StatsCounters, 2*Id-1, N). + +inc_terminate(StatsCounters, Id, N) -> + %% Terminates are counted in the even indexes. + counters:add(StatsCounters, 2*Id, N). + +%% Kill all children and then exit. We unlink first to avoid +%% getting a message for each child getting killed. +kill_children(Pids) -> + _ = [begin + unlink(P), + exit(P, kill) + end || P <- Pids], + ok. + +%% Monitor processes so we can know which ones have shutdown +%% before the timeout. Unlink so we avoid receiving an extra +%% message. Then send a shutdown exit signal. +shutdown_children(Pids) -> + _ = [begin + monitor(process, P), + unlink(P), + exit(P, shutdown) + end || P <- Pids], + ok. + +wait_children(0) -> + ok; +wait_children(NbChildren) -> + receive + {'DOWN', _, process, Pid, _} -> + case erase(Pid) of + active -> wait_children(NbChildren - 1); + removed -> wait_children(NbChildren - 1); + _ -> wait_children(NbChildren) + end; + kill -> + Active = get_keys(active), + _ = [exit(P, kill) || P <- Active], + Removed = get_keys(removed), + _ = [exit(P, kill) || P <- Removed], + ok + end. + +-spec system_continue(_, _, any()) -> no_return(). +system_continue(_, _, {State, CurConns, NbChildren, Sleepers}) -> + loop(State, CurConns, NbChildren, Sleepers). + +-spec system_terminate(any(), _, _, _) -> no_return(). +system_terminate(Reason, _, _, {State, _, NbChildren, _}) -> + terminate(State, Reason, NbChildren). + +-spec system_code_change(any(), _, _, _) -> {ok, any()}. +system_code_change({#state{parent=Parent, ref=Ref, conn_type=ConnType, + shutdown=Shutdown, transport=Transport, protocol=Protocol, + opts=Opts, handshake_timeout=HandshakeTimeout, + max_conns=MaxConns, logger=Logger}, CurConns, NbChildren, + Sleepers}, _, {down, _}, _) -> + {ok, {{state, Parent, Ref, ConnType, Shutdown, Transport, Protocol, + Opts, HandshakeTimeout, MaxConns, Logger}, CurConns, NbChildren, + Sleepers}}; +system_code_change({{state, Parent, Ref, ConnType, Shutdown, Transport, Protocol, + Opts, HandshakeTimeout, MaxConns, Logger}, CurConns, NbChildren, + Sleepers}, _, _, _) -> + Self = self(), + [Id] = [Id || {Id, Pid} <- ranch_server:get_connections_sups(Ref), Pid=:=Self], + StatsCounters = ranch_server:get_stats_counters(Ref), + {ok, {#state{parent=Parent, ref=Ref, id=Id, conn_type=ConnType, shutdown=Shutdown, + transport=Transport, protocol=Protocol, opts=Opts, + handshake_timeout=HandshakeTimeout, max_conns=MaxConns, + stats_counters_ref=StatsCounters, + logger=Logger}, CurConns, NbChildren, Sleepers}}; +system_code_change(Misc, _, _, _) -> + {ok, Misc}. + +%% We use ~999999p here instead of ~w because the latter doesn't +%% support printable strings. +report_error(_, _, _, _, normal) -> + ok; +report_error(_, _, _, _, shutdown) -> + ok; +report_error(_, _, _, _, {shutdown, _}) -> + ok; +report_error(Logger, Ref, Protocol, Pid, Reason) -> + ranch:log(error, + "Ranch listener ~p had connection process started with " + "~p:start_link/3 at ~p exit with reason: ~999999p~n", + [Ref, Protocol, Pid, Reason], Logger). diff --git a/src/wsNet/ranch_conns_sup_sup.erl b/src/wsNet/ranch_conns_sup_sup.erl new file mode 100644 index 0000000..c532cec --- /dev/null +++ b/src/wsNet/ranch_conns_sup_sup.erl @@ -0,0 +1,42 @@ +%% Copyright (c) 2019-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_conns_sup_sup). + +-behaviour(supervisor). + +-export([start_link/4]). +-export([init/1]). + +-spec start_link(ranch:ref(), module(), module(), module()) -> {ok, pid()}. +start_link(Ref, Transport, Protocol, Logger) -> + ok = ranch_server:cleanup_connections_sups(Ref), + supervisor:start_link(?MODULE, { + Ref, Transport, Protocol, Logger + }). + +-spec init({ranch:ref(), module(), module(), module()}) + -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}. +init({Ref, Transport, Protocol, Logger}) -> + TransOpts = ranch_server:get_transport_options(Ref), + NumAcceptors = maps:get(num_acceptors, TransOpts, 10), + NumConnsSups = maps:get(num_conns_sups, TransOpts, NumAcceptors), + StatsCounters = counters:new(2*NumConnsSups, []), + ok = ranch_server:set_stats_counters(Ref, StatsCounters), + ChildSpecs = [#{ + id => {ranch_conns_sup, N}, + start => {ranch_conns_sup, start_link, [Ref, N, Transport, TransOpts, Protocol, Logger]}, + type => supervisor + } || N <- lists:seq(1, NumConnsSups)], + {ok, {#{intensity => 1 + ceil(math:log2(NumConnsSups))}, ChildSpecs}}. diff --git a/src/wsNet/ranch_crc32c.erl b/src/wsNet/ranch_crc32c.erl new file mode 100644 index 0000000..9512b87 --- /dev/null +++ b/src/wsNet/ranch_crc32c.erl @@ -0,0 +1,115 @@ +%% Copyright (c) 2018-2021, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_crc32c). + +-export([crc32c/1]). +-export([crc32c/2]). + +-define(CRC32C_TABLE, { + 16#00000000, 16#F26B8303, 16#E13B70F7, 16#1350F3F4, + 16#C79A971F, 16#35F1141C, 16#26A1E7E8, 16#D4CA64EB, + 16#8AD958CF, 16#78B2DBCC, 16#6BE22838, 16#9989AB3B, + 16#4D43CFD0, 16#BF284CD3, 16#AC78BF27, 16#5E133C24, + 16#105EC76F, 16#E235446C, 16#F165B798, 16#030E349B, + 16#D7C45070, 16#25AFD373, 16#36FF2087, 16#C494A384, + 16#9A879FA0, 16#68EC1CA3, 16#7BBCEF57, 16#89D76C54, + 16#5D1D08BF, 16#AF768BBC, 16#BC267848, 16#4E4DFB4B, + 16#20BD8EDE, 16#D2D60DDD, 16#C186FE29, 16#33ED7D2A, + 16#E72719C1, 16#154C9AC2, 16#061C6936, 16#F477EA35, + 16#AA64D611, 16#580F5512, 16#4B5FA6E6, 16#B93425E5, + 16#6DFE410E, 16#9F95C20D, 16#8CC531F9, 16#7EAEB2FA, + 16#30E349B1, 16#C288CAB2, 16#D1D83946, 16#23B3BA45, + 16#F779DEAE, 16#05125DAD, 16#1642AE59, 16#E4292D5A, + 16#BA3A117E, 16#4851927D, 16#5B016189, 16#A96AE28A, + 16#7DA08661, 16#8FCB0562, 16#9C9BF696, 16#6EF07595, + 16#417B1DBC, 16#B3109EBF, 16#A0406D4B, 16#522BEE48, + 16#86E18AA3, 16#748A09A0, 16#67DAFA54, 16#95B17957, + 16#CBA24573, 16#39C9C670, 16#2A993584, 16#D8F2B687, + 16#0C38D26C, 16#FE53516F, 16#ED03A29B, 16#1F682198, + 16#5125DAD3, 16#A34E59D0, 16#B01EAA24, 16#42752927, + 16#96BF4DCC, 16#64D4CECF, 16#77843D3B, 16#85EFBE38, + 16#DBFC821C, 16#2997011F, 16#3AC7F2EB, 16#C8AC71E8, + 16#1C661503, 16#EE0D9600, 16#FD5D65F4, 16#0F36E6F7, + 16#61C69362, 16#93AD1061, 16#80FDE395, 16#72966096, + 16#A65C047D, 16#5437877E, 16#4767748A, 16#B50CF789, + 16#EB1FCBAD, 16#197448AE, 16#0A24BB5A, 16#F84F3859, + 16#2C855CB2, 16#DEEEDFB1, 16#CDBE2C45, 16#3FD5AF46, + 16#7198540D, 16#83F3D70E, 16#90A324FA, 16#62C8A7F9, + 16#B602C312, 16#44694011, 16#5739B3E5, 16#A55230E6, + 16#FB410CC2, 16#092A8FC1, 16#1A7A7C35, 16#E811FF36, + 16#3CDB9BDD, 16#CEB018DE, 16#DDE0EB2A, 16#2F8B6829, + 16#82F63B78, 16#709DB87B, 16#63CD4B8F, 16#91A6C88C, + 16#456CAC67, 16#B7072F64, 16#A457DC90, 16#563C5F93, + 16#082F63B7, 16#FA44E0B4, 16#E9141340, 16#1B7F9043, + 16#CFB5F4A8, 16#3DDE77AB, 16#2E8E845F, 16#DCE5075C, + 16#92A8FC17, 16#60C37F14, 16#73938CE0, 16#81F80FE3, + 16#55326B08, 16#A759E80B, 16#B4091BFF, 16#466298FC, + 16#1871A4D8, 16#EA1A27DB, 16#F94AD42F, 16#0B21572C, + 16#DFEB33C7, 16#2D80B0C4, 16#3ED04330, 16#CCBBC033, + 16#A24BB5A6, 16#502036A5, 16#4370C551, 16#B11B4652, + 16#65D122B9, 16#97BAA1BA, 16#84EA524E, 16#7681D14D, + 16#2892ED69, 16#DAF96E6A, 16#C9A99D9E, 16#3BC21E9D, + 16#EF087A76, 16#1D63F975, 16#0E330A81, 16#FC588982, + 16#B21572C9, 16#407EF1CA, 16#532E023E, 16#A145813D, + 16#758FE5D6, 16#87E466D5, 16#94B49521, 16#66DF1622, + 16#38CC2A06, 16#CAA7A905, 16#D9F75AF1, 16#2B9CD9F2, + 16#FF56BD19, 16#0D3D3E1A, 16#1E6DCDEE, 16#EC064EED, + 16#C38D26C4, 16#31E6A5C7, 16#22B65633, 16#D0DDD530, + 16#0417B1DB, 16#F67C32D8, 16#E52CC12C, 16#1747422F, + 16#49547E0B, 16#BB3FFD08, 16#A86F0EFC, 16#5A048DFF, + 16#8ECEE914, 16#7CA56A17, 16#6FF599E3, 16#9D9E1AE0, + 16#D3D3E1AB, 16#21B862A8, 16#32E8915C, 16#C083125F, + 16#144976B4, 16#E622F5B7, 16#F5720643, 16#07198540, + 16#590AB964, 16#AB613A67, 16#B831C993, 16#4A5A4A90, + 16#9E902E7B, 16#6CFBAD78, 16#7FAB5E8C, 16#8DC0DD8F, + 16#E330A81A, 16#115B2B19, 16#020BD8ED, 16#F0605BEE, + 16#24AA3F05, 16#D6C1BC06, 16#C5914FF2, 16#37FACCF1, + 16#69E9F0D5, 16#9B8273D6, 16#88D28022, 16#7AB90321, + 16#AE7367CA, 16#5C18E4C9, 16#4F48173D, 16#BD23943E, + 16#F36E6F75, 16#0105EC76, 16#12551F82, 16#E03E9C81, + 16#34F4F86A, 16#C69F7B69, 16#D5CF889D, 16#27A40B9E, + 16#79B737BA, 16#8BDCB4B9, 16#988C474D, 16#6AE7C44E, + 16#BE2DA0A5, 16#4C4623A6, 16#5F16D052, 16#AD7D5351 +}). + +%% The interface mirrors erlang:crc32/1,2. +-spec crc32c(iodata()) -> non_neg_integer(). +crc32c(Data) -> + do_crc32c(16#ffffffff, iolist_to_binary(Data)). + +-spec crc32c(CRC, iodata()) -> CRC when CRC::non_neg_integer(). +crc32c(OldCrc, Data) -> + do_crc32c(OldCrc bxor 16#ffffffff, iolist_to_binary(Data)). + +do_crc32c(OldCrc, <>) -> + do_crc32c((OldCrc bsr 8) bxor element(1 + ((OldCrc bxor C) band 16#ff), ?CRC32C_TABLE), + Rest); +do_crc32c(OldCrc, <<>>) -> + OldCrc bxor 16#ffffffff. + +-ifdef(TEST). +crc32c_test_() -> + Tests = [ + %% Tests from RFC3720 B.4. + {<<0:32/unit:8>>, 16#8a9136aa}, + {iolist_to_binary([16#ff || _ <- lists:seq(1, 32)]), 16#62a8ab43}, + {iolist_to_binary([N || N <- lists:seq(0, 16#1f)]), 16#46dd794e}, + {iolist_to_binary([N || N <- lists:seq(16#1f, 0, -1)]), 16#113fdb5c}, + {<<16#01c00000:32, 0:32, 0:32, 0:32, 16#14000000:32, 16#00000400:32, 16#00000014:32, + 16#00000018:32, 16#28000000:32, 0:32, 16#02000000:32, 0:32>>, 16#d9963a56} + ], + [{iolist_to_binary(io_lib:format("16#~8.16.0b", [R])), + fun() -> R = crc32c(V) end} || {V, R} <- Tests]. +-endif. diff --git a/src/wsNet/ranch_embedded_sup.erl b/src/wsNet/ranch_embedded_sup.erl new file mode 100644 index 0000000..f9f8d61 --- /dev/null +++ b/src/wsNet/ranch_embedded_sup.erl @@ -0,0 +1,36 @@ +%% Copyright (c) 2019-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_embedded_sup). + +-behavior(supervisor). + +-export([start_link/5]). +-export([init/1]). + +-spec start_link(ranch:ref(), module(), any(), module(), any()) + -> {ok, pid()}. +start_link(Ref, Transport, TransOpts, Protocol, ProtoOpts) -> + supervisor:start_link(?MODULE, {Ref, Transport, TransOpts, Protocol, ProtoOpts}). + +-spec init({ranch:ref(), module(), any(), module(), any()}) + -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}. +init({Ref, Transport, TransOpts, Protocol, ProtoOpts}) -> + Proxy = #{id => ranch_server_proxy, + start => {ranch_server_proxy, start_link, []}, + shutdown => brutal_kill}, + Listener = #{id => {ranch_listener_sup, Ref}, + start => {ranch_listener_sup, start_link, [Ref, Transport, TransOpts, Protocol, ProtoOpts]}, + type => supervisor}, + {ok, {#{strategy => rest_for_one}, [Proxy, Listener]}}. diff --git a/src/wsNet/ranch_listener_sup.erl b/src/wsNet/ranch_listener_sup.erl new file mode 100644 index 0000000..b33e42d --- /dev/null +++ b/src/wsNet/ranch_listener_sup.erl @@ -0,0 +1,48 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_listener_sup). +-behaviour(supervisor). + +-export([start_link/5]). +-export([init/1]). + +-spec start_link(ranch:ref(), module(), any(), module(), any()) + -> {ok, pid()}. +start_link(Ref, Transport, TransOpts, Protocol, ProtoOpts) -> + MaxConns = maps:get(max_connections, TransOpts, 1024), + Logger = maps:get(logger, TransOpts, logger), + ranch_server:set_new_listener_opts(Ref, MaxConns, TransOpts, ProtoOpts, + [Ref, Transport, TransOpts, Protocol, ProtoOpts]), + supervisor:start_link(?MODULE, { + Ref, Transport, Protocol, Logger + }). + +-spec init({ranch:ref(), module(), module(), module()}) + -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}. +init({Ref, Transport, Protocol, Logger}) -> + ok = ranch_server:set_listener_sup(Ref, self()), + ChildSpecs = [ + #{ + id => ranch_conns_sup_sup, + start => {ranch_conns_sup_sup, start_link, [Ref, Transport, Protocol, Logger]}, + type => supervisor + }, + #{ + id => ranch_acceptors_sup, + start => {ranch_acceptors_sup, start_link, [Ref, Transport, Logger]}, + type => supervisor + } + ], + {ok, {#{strategy => rest_for_one}, ChildSpecs}}. diff --git a/src/wsNet/ranch_protocol.erl b/src/wsNet/ranch_protocol.erl new file mode 100644 index 0000000..4562348 --- /dev/null +++ b/src/wsNet/ranch_protocol.erl @@ -0,0 +1,23 @@ +%% Copyright (c) 2012-2021, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_protocol). + +%% Start a new connection process for the given socket. +-callback start_link( + Ref::ranch:ref(), + Transport::module(), + ProtocolOptions::any()) + -> {ok, ConnectionPid::pid()} + | {ok, SupPid::pid(), ConnectionPid::pid()}. diff --git a/src/wsNet/ranch_proxy_header.erl b/src/wsNet/ranch_proxy_header.erl new file mode 100644 index 0000000..8f73dba --- /dev/null +++ b/src/wsNet/ranch_proxy_header.erl @@ -0,0 +1,1007 @@ +%% Copyright (c) 2018-2021, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_proxy_header). + +-export([parse/1]). +-export([header/1]). +-export([header/2]). +-export([to_connection_info/1]). + +-type proxy_info() :: #{ + %% Mandatory part. + version := 1 | 2, + command := local | proxy, + transport_family => undefined | ipv4 | ipv6 | unix, + transport_protocol => undefined | stream | dgram, + %% Addresses. + src_address => inet:ip_address() | binary(), + src_port => inet:port_number(), + dest_address => inet:ip_address() | binary(), + dest_port => inet:port_number(), + %% Extra TLV-encoded data. + alpn => binary(), %% US-ASCII. + authority => binary(), %% UTF-8. + ssl => #{ + client := [ssl | cert_conn | cert_sess], + verified := boolean(), + version => binary(), %% US-ASCII. + cipher => binary(), %% US-ASCII. + sig_alg => binary(), %% US-ASCII. + key_alg => binary(), %% US-ASCII. + cn => binary() %% UTF-8. + }, + netns => binary(), %% US-ASCII. + %% Unknown TLVs can't be parsed so the raw data is given. + raw_tlvs => [{0..255, binary()}] +}. +-export_type([proxy_info/0]). + +-type build_opts() :: #{ + checksum => crc32c, + padding => pos_integer() %% >= 3 +}. + +%% Parsing. + +-spec parse(Data) -> {ok, proxy_info(), Data} | {error, atom()} when Data::binary(). +parse(<<"\r\n\r\n\0\r\nQUIT\n", Rest/bits>>) -> + parse_v2(Rest); +parse(<<"PROXY ", Rest/bits>>) -> + parse_v1(Rest); +parse(_) -> + {error, 'The PROXY protocol header signature was not recognized. (PP 2.1, PP 2.2)'}. + +-ifdef(TEST). +parse_unrecognized_header_test() -> + {error, _} = parse(<<"GET / HTTP/1.1\r\n">>), + ok. +-endif. + +%% Human-readable header format (Version 1). +parse_v1(<<"TCP4 ", Rest/bits>>) -> + parse_v1(Rest, ipv4); +parse_v1(<<"TCP6 ", Rest/bits>>) -> + parse_v1(Rest, ipv6); +parse_v1(<<"UNKNOWN\r\n", Rest/bits>>) -> + {ok, #{ + version => 1, + command => proxy, + transport_family => undefined, + transport_protocol => undefined + }, Rest}; +parse_v1(<<"UNKNOWN ", Rest0/bits>>) -> + case binary:split(Rest0, <<"\r\n">>) of + [_, Rest] -> + {ok, #{ + version => 1, + command => proxy, + transport_family => undefined, + transport_protocol => undefined + }, Rest}; + [_] -> + {error, 'Malformed or incomplete PROXY protocol header line. (PP 2.1)'} + end; +parse_v1(_) -> + {error, 'The INET protocol and family string was not recognized. (PP 2.1)'}. + +parse_v1(Rest0, Family) -> + try + {ok, SrcAddr, Rest1} = parse_ip(Rest0, Family), + {ok, DestAddr, Rest2} = parse_ip(Rest1, Family), + {ok, SrcPort, Rest3} = parse_port(Rest2, $\s), + {ok, DestPort, Rest4} = parse_port(Rest3, $\r), + <<"\n", Rest/bits>> = Rest4, + {ok, #{ + version => 1, + command => proxy, + transport_family => Family, + transport_protocol => stream, + src_address => SrcAddr, + src_port => SrcPort, + dest_address => DestAddr, + dest_port => DestPort + }, Rest} + catch + throw:parse_ipv4_error -> + {error, 'Failed to parse an IPv4 address in the PROXY protocol header line. (PP 2.1)'}; + throw:parse_ipv6_error -> + {error, 'Failed to parse an IPv6 address in the PROXY protocol header line. (PP 2.1)'}; + throw:parse_port_error -> + {error, 'Failed to parse a port number in the PROXY protocol header line. (PP 2.1)'}; + _:_ -> + {error, 'Malformed or incomplete PROXY protocol header line. (PP 2.1)'} + end. + +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(<>, ipv4) -> parse_ipv4(Addr, Rest); +parse_ip(Data, ipv6) -> + [Addr, Rest] = binary:split(Data, <<$\s>>), + parse_ipv6(Addr, Rest). + +parse_ipv4(Addr0, Rest) -> + case inet:parse_ipv4strict_address(binary_to_list(Addr0)) of + {ok, Addr} -> {ok, Addr, Rest}; + {error, einval} -> throw(parse_ipv4_error) + end. + +parse_ipv6(Addr0, Rest) -> + case inet:parse_ipv6strict_address(binary_to_list(Addr0)) of + {ok, Addr} -> {ok, Addr, Rest}; + {error, einval} -> throw(parse_ipv6_error) + end. + +parse_port(<>, C) -> parse_port(Port, Rest); +parse_port(<>, C) -> parse_port(Port, Rest); +parse_port(<>, C) -> parse_port(Port, Rest); +parse_port(<>, C) -> parse_port(Port, Rest); +parse_port(<>, C) -> parse_port(Port, Rest); + +parse_port(Port0, Rest) -> + try binary_to_integer(Port0) of + Port when Port > 0, Port =< 65535 -> + {ok, Port, Rest}; + _ -> + throw(parse_port_error) + catch _:_ -> + throw(parse_port_error) + end. + +-ifdef(TEST). +parse_v1_test() -> + %% Examples taken from the PROXY protocol header specification. + {ok, #{ + version := 1, + command := proxy, + transport_family := ipv4, + transport_protocol := stream, + src_address := {255, 255, 255, 255}, + src_port := 65535, + dest_address := {255, 255, 255, 255}, + dest_port := 65535 + }, <<>>} = parse(<<"PROXY TCP4 255.255.255.255 255.255.255.255 65535 65535\r\n">>), + {ok, #{ + version := 1, + command := proxy, + transport_family := ipv6, + transport_protocol := stream, + src_address := {65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535}, + src_port := 65535, + dest_address := {65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535}, + dest_port := 65535 + }, <<>>} = parse(<<"PROXY TCP6 " + "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff " + "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff 65535 65535\r\n">>), + {ok, #{ + version := 1, + command := proxy, + transport_family := undefined, + transport_protocol := undefined + }, <<>>} = parse(<<"PROXY UNKNOWN\r\n">>), + {ok, #{ + version := 1, + command := proxy, + transport_family := undefined, + transport_protocol := undefined + }, <<>>} = parse(<<"PROXY UNKNOWN " + "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff " + "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff 65535 65535\r\n">>), + {ok, #{ + version := 1, + command := proxy, + transport_family := ipv4, + transport_protocol := stream, + src_address := {192, 168, 0, 1}, + src_port := 56324, + dest_address := {192, 168, 0, 11}, + dest_port := 443 + }, <<"GET / HTTP/1.1\r\nHost: 192.168.0.11\r\n\r\n">>} = parse(<< + "PROXY TCP4 192.168.0.1 192.168.0.11 56324 443\r\n" + "GET / HTTP/1.1\r\n" + "Host: 192.168.0.11\r\n" + "\r\n">>), + %% Test cases taken from tomciopp/proxy_protocol. + {ok, #{ + version := 1, + command := proxy, + transport_family := ipv4, + transport_protocol := stream, + src_address := {192, 168, 0, 1}, + src_port := 56324, + dest_address := {192, 168, 0, 11}, + dest_port := 443 + }, <<"GET / HTTP/1.1\r">>} = parse(<< + "PROXY TCP4 192.168.0.1 192.168.0.11 56324 443\r\nGET / HTTP/1.1\r">>), + {error, _} = parse(<<"PROXY TCP4 192.1638.0.1 192.168.0.11 56324 443\r\nGET / HTTP/1.1\r">>), + {error, _} = parse(<<"PROXY TCP4 192.168.0.1 192.168.0.11 1111111 443\r\nGET / HTTP/1.1\r">>), + {ok, #{ + version := 1, + command := proxy, + transport_family := ipv6, + transport_protocol := stream, + src_address := {8193, 3512, 0, 66, 0, 35374, 880, 29492}, + src_port := 4124, + dest_address := {8193, 3512, 0, 66, 0, 35374, 880, 29493}, + dest_port := 443 + }, <<"GET / HTTP/1.1\r">>} = parse(<<"PROXY TCP6 " + "2001:0db8:0000:0042:0000:8a2e:0370:7334 " + "2001:0db8:0000:0042:0000:8a2e:0370:7335 4124 443\r\nGET / HTTP/1.1\r">>), + {error, _} = parse(<<"PROXY TCP6 " + "2001:0db8:0000:0042:0000:8a2e:0370:7334 " + "2001:0db8:00;0:0042:0000:8a2e:0370:7335 4124 443\r\nGET / HTTP/1.1\r">>), + {error, _} = parse(<<"PROXY TCP6 " + "2001:0db8:0000:0042:0000:8a2e:0370:7334 " + "2001:0db8:0000:0042:0000:8a2e:0370:7335 4124 foo\r\nGET / HTTP/1.1\r">>), + {ok, #{ + version := 1, + command := proxy, + transport_family := undefined, + transport_protocol := undefined + }, <<"GET / HTTP/1.1\r">>} = parse(<<"PROXY UNKNOWN 4124 443\r\nGET / HTTP/1.1\r">>), + {ok, #{ + version := 1, + command := proxy, + transport_family := undefined, + transport_protocol := undefined + }, <<"GET / HTTP/1.1\r">>} = parse(<<"PROXY UNKNOWN\r\nGET / HTTP/1.1\r">>), + ok. +-endif. + +%% Binary header format (version 2). + +%% LOCAL. +parse_v2(<<2:4, 0:4, _:8, Len:16, Rest0/bits>>) -> + case Rest0 of + <<_:Len/binary, Rest/bits>> -> + {ok, #{ + version => 2, + command => local + }, Rest}; + _ -> + {error, 'Missing data in the PROXY protocol binary header. (PP 2.2)'} + end; +%% PROXY. +parse_v2(<<2:4, 1:4, Family:4, Protocol:4, Len:16, Rest/bits>>) + when Family =< 3, Protocol =< 2 -> + case Rest of + <> -> + parse_v2(Rest, Len, parse_family(Family), parse_protocol(Protocol), + <>); + _ -> + {error, 'Missing data in the PROXY protocol binary header. (PP 2.2)'} + end; +%% Errors. +parse_v2(<>) when Version =/= 2 -> + {error, 'Invalid version in the PROXY protocol binary header. (PP 2.2)'}; +parse_v2(<<_:4, Command:4, _/bits>>) when Command > 1 -> + {error, 'Invalid command in the PROXY protocol binary header. (PP 2.2)'}; +parse_v2(<<_:8, Family:4, _/bits>>) when Family > 3 -> + {error, 'Invalid address family in the PROXY protocol binary header. (PP 2.2)'}; +parse_v2(<<_:12, Protocol:4, _/bits>>) when Protocol > 2 -> + {error, 'Invalid transport protocol in the PROXY protocol binary header. (PP 2.2)'}. + +parse_family(0) -> undefined; +parse_family(1) -> ipv4; +parse_family(2) -> ipv6; +parse_family(3) -> unix. + +parse_protocol(0) -> undefined; +parse_protocol(1) -> stream; +parse_protocol(2) -> dgram. + +parse_v2(Data, Len, Family, Protocol, _) + when Family =:= undefined; Protocol =:= undefined -> + <<_:Len/binary, Rest/bits>> = Data, + {ok, #{ + version => 2, + command => proxy, + %% In case only one value was undefined, we set both explicitly. + %% It doesn't make sense to have only one known value. + transport_family => undefined, + transport_protocol => undefined + }, Rest}; +parse_v2(<< + S1, S2, S3, S4, + D1, D2, D3, D4, + SrcPort:16, DestPort:16, Rest/bits>>, Len, Family=ipv4, Protocol, Header) + when Len >= 12 -> + parse_tlv(Rest, Len - 12, #{ + version => 2, + command => proxy, + transport_family => Family, + transport_protocol => Protocol, + src_address => {S1, S2, S3, S4}, + src_port => SrcPort, + dest_address => {D1, D2, D3, D4}, + dest_port => DestPort + }, Header); +parse_v2(<< + S1:16, S2:16, S3:16, S4:16, S5:16, S6:16, S7:16, S8:16, + D1:16, D2:16, D3:16, D4:16, D5:16, D6:16, D7:16, D8:16, + SrcPort:16, DestPort:16, Rest/bits>>, Len, Family=ipv6, Protocol, Header) + when Len >= 36 -> + parse_tlv(Rest, Len - 36, #{ + version => 2, + command => proxy, + transport_family => Family, + transport_protocol => Protocol, + src_address => {S1, S2, S3, S4, S5, S6, S7, S8}, + src_port => SrcPort, + dest_address => {D1, D2, D3, D4, D5, D6, D7, D8}, + dest_port => DestPort + }, Header); +parse_v2(<>, + Len, Family=unix, Protocol, Header) + when Len >= 216 -> + try + [SrcAddr, _] = binary:split(SrcAddr0, <<0>>), + true = byte_size(SrcAddr) > 0, + [DestAddr, _] = binary:split(DestAddr0, <<0>>), + true = byte_size(DestAddr) > 0, + parse_tlv(Rest, Len - 216, #{ + version => 2, + command => proxy, + transport_family => Family, + transport_protocol => Protocol, + src_address => SrcAddr, + dest_address => DestAddr + }, Header) + catch _:_ -> + {error, 'Invalid UNIX address in PROXY protocol binary header. (PP 2.2)'} + end; +parse_v2(_, _, _, _, _) -> + {error, 'Invalid length in the PROXY protocol binary header. (PP 2.2)'}. + +-ifdef(TEST). +parse_v2_test() -> + %% Test cases taken from tomciopp/proxy_protocol. + {ok, #{ + version := 2, + command := proxy, + transport_family := ipv4, + transport_protocol := stream, + src_address := {127, 0, 0, 1}, + src_port := 444, + dest_address := {192, 168, 0, 1}, + dest_port := 443 + }, <<"GET / HTTP/1.1\r\n">>} = parse(<< + 13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, %% Signature. + 33, %% Version and command. + 17, %% Family and protocol. + 0, 12, %% Length. + 127, 0, 0, 1, %% Source address. + 192, 168, 0, 1, %% Destination address. + 1, 188, %% Source port. + 1, 187, %% Destination port. + "GET / HTTP/1.1\r\n">>), + {ok, #{ + version := 2, + command := proxy, + transport_family := ipv4, + transport_protocol := dgram, + src_address := {127, 0, 0, 1}, + src_port := 444, + dest_address := {192, 168, 0, 1}, + dest_port := 443 + }, <<"GET / HTTP/1.1\r\n">>} = parse(<< + 13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, %% Signature. + 33, %% Version and command. + 18, %% Family and protocol. + 0, 12, %% Length. + 127, 0, 0, 1, %% Source address. + 192, 168, 0, 1, %% Destination address. + 1, 188, %% Source port. + 1, 187, %% Destination port. + "GET / HTTP/1.1\r\n">>), + {ok, #{ + version := 2, + command := proxy, + transport_family := ipv6, + transport_protocol := stream, + src_address := {5532, 4240, 1, 0, 0, 0, 0, 0}, + src_port := 444, + dest_address := {8193, 3512, 1, 0, 0, 0, 0, 0}, + dest_port := 443 + }, <<"GET / HTTP/1.1\r\n">>} = parse(<< + 13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, %% Signature. + 33, %% Version and command. + 33, %% Family and protocol. + 0, 36, %% Length. + 21, 156, 16, 144, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, %% Source address. + 32, 1, 13, 184, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, %% Destination address. + 1, 188, %% Source port. + 1, 187, %% Destination port. + "GET / HTTP/1.1\r\n">>), + {ok, #{ + version := 2, + command := proxy, + transport_family := ipv6, + transport_protocol := dgram, + src_address := {5532, 4240, 1, 0, 0, 0, 0, 0}, + src_port := 444, + dest_address := {8193, 3512, 1, 0, 0, 0, 0, 0}, + dest_port := 443 + }, <<"GET / HTTP/1.1\r\n">>} = parse(<< + 13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, %% Signature. + 33, %% Version and command. + 34, %% Family and protocol. + 0, 36, %% Length. + 21, 156, 16, 144, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, %% Source address. + 32, 1, 13, 184, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, %% Destination address. + 1, 188, %% Source port. + 1, 187, %% Destination port. + "GET / HTTP/1.1\r\n">>), + Path = <<"/var/pgsql_sock">>, + Len = byte_size(Path), + Padding = 8 * (108 - Len), + {ok, #{ + version := 2, + command := proxy, + transport_family := unix, + transport_protocol := stream, + src_address := Path, + dest_address := Path + }, <<"GET / HTTP/1.1\r\n">>} = parse(<< + 13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, + 33, + 49, + 0, 216, + Path/binary, 0:Padding, + Path/binary, 0:Padding, + "GET / HTTP/1.1\r\n">>), + {ok, #{ + version := 2, + command := proxy, + transport_family := unix, + transport_protocol := dgram, + src_address := Path, + dest_address := Path + }, <<"GET / HTTP/1.1\r\n">>} = parse(<< + 13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, + 33, + 50, + 0, 216, + Path/binary, 0:Padding, + Path/binary, 0:Padding, + "GET / HTTP/1.1\r\n">>), + ok. + +parse_v2_regression_test() -> + %% Real packet received from AWS. We confirm that the CRC32C + %% check succeeds only (in other words that ok is returned). + {ok, _, <<>>} = parse(<< + 13, 10, 13, 10, 0, 13, 10, 81, 85, 73, 84, 10, 33, 17, 0, 84, + 172, 31, 7, 113, 172, 31, 10, 31, 200, 242, 0, 80, 3, 0, 4, + 232, 214, 137, 45, 234, 0, 23, 1, 118, 112, 99, 101, 45, 48, + 56, 100, 50, 98, 102, 49, 53, 102, 97, 99, 53, 48, 48, 49, 99, + 57, 4, 0, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>), + ok. +-endif. + +parse_tlv(Rest, 0, Info, _) -> + {ok, Info, Rest}; +%% PP2_TYPE_ALPN. +parse_tlv(<<16#1, TLVLen:16, ALPN:TLVLen/binary, Rest/bits>>, Len, Info, Header) -> + parse_tlv(Rest, Len - TLVLen - 3, Info#{alpn => ALPN}, Header); +%% PP2_TYPE_AUTHORITY. +parse_tlv(<<16#2, TLVLen:16, Authority:TLVLen/binary, Rest/bits>>, Len, Info, Header) -> + parse_tlv(Rest, Len - TLVLen - 3, Info#{authority => Authority}, Header); +%% PP2_TYPE_CRC32C. +parse_tlv(<<16#3, TLVLen:16, CRC32C:32, Rest/bits>>, Len0, Info, Header) when TLVLen =:= 4 -> + Len = Len0 - TLVLen - 3, + BeforeLen = byte_size(Header) - Len - TLVLen, + <> = Header, + %% The initial CRC is ranch_crc32c:crc32c(<<"\r\n\r\n\0\r\nQUIT\n", 2:4, 1:4>>). + case ranch_crc32c:crc32c(2900412422, [Before, <<0:32>>, After]) of + CRC32C -> + parse_tlv(Rest, Len, Info, Header); + _ -> + {error, 'Failed CRC32C verification in PROXY protocol binary header. (PP 2.2)'} + end; +%% PP2_TYPE_NOOP. +parse_tlv(<<16#4, TLVLen:16, _:TLVLen/binary, Rest/bits>>, Len, Info, Header) -> + parse_tlv(Rest, Len - TLVLen - 3, Info, Header); +%% PP2_TYPE_SSL. +parse_tlv(<<16#20, TLVLen:16, Client, Verify:32, Rest0/bits>>, Len, Info, Header) -> + SubsLen = TLVLen - 5, + case Rest0 of + <> -> + SSL0 = #{ + client => parse_client(<>), + verified => Verify =:= 0 + }, + case parse_ssl_tlv(Subs, SubsLen, SSL0) of + {ok, SSL, <<>>} -> + parse_tlv(Rest, Len - TLVLen - 3, Info#{ssl => SSL}, Header); + Error={error, _} -> + Error + end; + _ -> + {error, 'Invalid TLV length in the PROXY protocol binary header. (PP 2.2)'} + end; +%% PP2_TYPE_NETNS. +parse_tlv(<<16#30, TLVLen:16, NetNS:TLVLen/binary, Rest/bits>>, Len, Info, Header) -> + parse_tlv(Rest, Len - TLVLen - 3, Info#{netns => NetNS}, Header); +%% Unknown TLV. +parse_tlv(<>, Len, Info, Header) -> + RawTLVs = maps:get(raw_tlvs, Info, []), + parse_tlv(Rest, Len - TLVLen - 3, Info#{raw_tlvs => [{TLVType, TLVValue}|RawTLVs]}, Header); +%% Invalid TLV length. +parse_tlv(_, _, _, _) -> + {error, 'Invalid TLV length in the PROXY protocol binary header. (PP 2.2)'}. + +parse_client(<<_:5, ClientCertSess:1, ClientCertConn:1, ClientSSL:1>>) -> + Client0 = case ClientCertSess of + 0 -> []; + 1 -> [cert_sess] + end, + Client1 = case ClientCertConn of + 0 -> Client0; + 1 -> [cert_conn|Client0] + end, + case ClientSSL of + 0 -> Client1; + 1 -> [ssl|Client1] + end. + +parse_ssl_tlv(Rest, 0, Info) -> + {ok, Info, Rest}; +%% Valid TLVs. +parse_ssl_tlv(<>, Len, Info) -> + case ssl_subtype(TLVType) of + undefined -> + {error, 'Invalid TLV subtype for PP2_TYPE_SSL in PROXY protocol binary header. (PP 2.2)'}; + Type -> + parse_ssl_tlv(Rest, Len - TLVLen - 3, Info#{Type => TLVValue}) + end; +%% Invalid TLV length. +parse_ssl_tlv(_, _, _) -> + {error, 'Invalid TLV length in the PROXY protocol binary header. (PP 2.2)'}. + +ssl_subtype(16#21) -> version; +ssl_subtype(16#22) -> cn; +ssl_subtype(16#23) -> cipher; +ssl_subtype(16#24) -> sig_alg; +ssl_subtype(16#25) -> key_alg; +ssl_subtype(_) -> undefined. + +%% Building. + +-spec header(proxy_info()) -> iodata(). +header(ProxyInfo) -> + header(ProxyInfo, #{}). + +-spec header(proxy_info(), build_opts()) -> iodata(). +header(#{version := 2, command := local}, _) -> + <<"\r\n\r\n\0\r\nQUIT\n", 2:4, 0:28>>; +header(#{version := 2, command := proxy, + transport_family := Family, + transport_protocol := Protocol}, _) + when Family =:= undefined; Protocol =:= undefined -> + <<"\r\n\r\n\0\r\nQUIT\n", 2:4, 1:4, 0:24>>; +header(ProxyInfo=#{version := 2, command := proxy, + transport_family := Family, + transport_protocol := Protocol}, Opts) -> + Addresses = addresses(ProxyInfo), + TLVs = tlvs(ProxyInfo, Opts), + ExtraLen = case Opts of + #{checksum := crc32c} -> 7; + _ -> 0 + end, + Len = iolist_size(Addresses) + iolist_size(TLVs) + ExtraLen, + Header = [ + <<"\r\n\r\n\0\r\nQUIT\n", 2:4, 1:4>>, + <<(family(Family)):4, (protocol(Protocol)):4>>, + <>, + Addresses, + TLVs + ], + case Opts of + #{checksum := crc32c} -> + CRC32C = ranch_crc32c:crc32c([Header, <<16#3, 4:16, 0:32>>]), + [Header, <<16#3, 4:16, CRC32C:32>>]; + _ -> + Header + end; +header(#{version := 1, command := proxy, + transport_family := undefined, + transport_protocol := undefined}, _) -> + <<"PROXY UNKNOWN\r\n">>; +header(#{version := 1, command := proxy, + transport_family := Family0, + transport_protocol := stream, + src_address := SrcAddress, src_port := SrcPort, + dest_address := DestAddress, dest_port := DestPort}, _) + when SrcPort > 0, SrcPort =< 65535, DestPort > 0, DestPort =< 65535 -> + [ + <<"PROXY ">>, + case Family0 of + ipv4 when tuple_size(SrcAddress) =:= 4, tuple_size(DestAddress) =:= 4 -> + [<<"TCP4 ">>, inet:ntoa(SrcAddress), $\s, inet:ntoa(DestAddress)]; + ipv6 when tuple_size(SrcAddress) =:= 8, tuple_size(DestAddress) =:= 8 -> + [<<"TCP6 ">>, inet:ntoa(SrcAddress), $\s, inet:ntoa(DestAddress)] + end, + $\s, + integer_to_binary(SrcPort), + $\s, + integer_to_binary(DestPort), + $\r, $\n + ]. + +family(ipv4) -> 1; +family(ipv6) -> 2; +family(unix) -> 3. + +protocol(stream) -> 1; +protocol(dgram) -> 2. + +addresses(#{transport_family := ipv4, + src_address := {S1, S2, S3, S4}, src_port := SrcPort, + dest_address := {D1, D2, D3, D4}, dest_port := DestPort}) + when SrcPort > 0, SrcPort =< 65535, DestPort > 0, DestPort =< 65535 -> + <>; +addresses(#{transport_family := ipv6, + src_address := {S1, S2, S3, S4, S5, S6, S7, S8}, src_port := SrcPort, + dest_address := {D1, D2, D3, D4, D5, D6, D7, D8}, dest_port := DestPort}) + when SrcPort > 0, SrcPort =< 65535, DestPort > 0, DestPort =< 65535 -> + << + S1:16, S2:16, S3:16, S4:16, S5:16, S6:16, S7:16, S8:16, + D1:16, D2:16, D3:16, D4:16, D5:16, D6:16, D7:16, D8:16, + SrcPort:16, DestPort:16 + >>; +addresses(#{transport_family := unix, + src_address := SrcAddress, dest_address := DestAddress}) + when byte_size(SrcAddress) =< 108, byte_size(DestAddress) =< 108 -> + SrcPadding = 8 * (108 - byte_size(SrcAddress)), + DestPadding = 8 * (108 - byte_size(DestAddress)), + << + SrcAddress/binary, 0:SrcPadding, + DestAddress/binary, 0:DestPadding + >>. + +tlvs(ProxyInfo, Opts) -> + [ + binary_tlv(ProxyInfo, alpn, 16#1), + binary_tlv(ProxyInfo, authority, 16#2), + ssl_tlv(ProxyInfo), + binary_tlv(ProxyInfo, netns, 16#30), + raw_tlvs(ProxyInfo), + noop_tlv(Opts) + ]. + +binary_tlv(Info, Key, Type) -> + case Info of + #{Key := Bin} -> + Len = byte_size(Bin), + <>; + _ -> + <<>> + end. + +noop_tlv(#{padding := Len0}) when Len0 >= 3 -> + Len = Len0 - 3, + <<16#4, Len:16, 0:Len/unit:8>>; +noop_tlv(_) -> + <<>>. + +ssl_tlv(#{ssl := Info=#{client := Client0, verified := Verify0}}) -> + Client = client(Client0, 0), + Verify = if + Verify0 -> 0; + not Verify0 -> 1 + end, + TLVs = [ + binary_tlv(Info, version, 16#21), + binary_tlv(Info, cn, 16#22), + binary_tlv(Info, cipher, 16#23), + binary_tlv(Info, sig_alg, 16#24), + binary_tlv(Info, key_alg, 16#25) + ], + Len = iolist_size(TLVs) + 5, + [<<16#20, Len:16, Client, Verify:32>>, TLVs]; +ssl_tlv(_) -> + <<>>. + +client([], Client) -> Client; +client([ssl|Tail], Client) -> client(Tail, Client bor 16#1); +client([cert_conn|Tail], Client) -> client(Tail, Client bor 16#2); +client([cert_sess|Tail], Client) -> client(Tail, Client bor 16#4). + +raw_tlvs(Info) -> + [begin + Len = byte_size(Bin), + <> + end || {Type, Bin} <- maps:get(raw_tlvs, Info, [])]. + +-ifdef(TEST). +v1_test() -> + Test1 = #{ + version => 1, + command => proxy, + transport_family => undefined, + transport_protocol => undefined + }, + {ok, Test1, <<>>} = parse(iolist_to_binary(header(Test1))), + Test2 = #{ + version => 1, + command => proxy, + transport_family => ipv4, + transport_protocol => stream, + src_address => {127, 0, 0, 1}, + src_port => 1234, + dest_address => {10, 11, 12, 13}, + dest_port => 23456 + }, + {ok, Test2, <<>>} = parse(iolist_to_binary(header(Test2))), + Test3 = #{ + version => 1, + command => proxy, + transport_family => ipv6, + transport_protocol => stream, + src_address => {1, 2, 3, 4, 5, 6, 7, 8}, + src_port => 1234, + dest_address => {65535, 55555, 2222, 333, 1, 9999, 777, 8}, + dest_port => 23456 + }, + {ok, Test3, <<>>} = parse(iolist_to_binary(header(Test3))), + ok. + +v2_test() -> + Test0 = #{ + version => 2, + command => local + }, + {ok, Test0, <<>>} = parse(iolist_to_binary(header(Test0))), + Test1 = #{ + version => 2, + command => proxy, + transport_family => undefined, + transport_protocol => undefined + }, + {ok, Test1, <<>>} = parse(iolist_to_binary(header(Test1))), + Test2 = #{ + version => 2, + command => proxy, + transport_family => ipv4, + transport_protocol => stream, + src_address => {127, 0, 0, 1}, + src_port => 1234, + dest_address => {10, 11, 12, 13}, + dest_port => 23456 + }, + {ok, Test2, <<>>} = parse(iolist_to_binary(header(Test2))), + Test3 = #{ + version => 2, + command => proxy, + transport_family => ipv6, + transport_protocol => stream, + src_address => {1, 2, 3, 4, 5, 6, 7, 8}, + src_port => 1234, + dest_address => {65535, 55555, 2222, 333, 1, 9999, 777, 8}, + dest_port => 23456 + }, + {ok, Test3, <<>>} = parse(iolist_to_binary(header(Test3))), + Test4 = #{ + version => 2, + command => proxy, + transport_family => unix, + transport_protocol => dgram, + src_address => <<"/run/source.sock">>, + dest_address => <<"/run/destination.sock">> + }, + {ok, Test4, <<>>} = parse(iolist_to_binary(header(Test4))), + ok. + +v2_tlvs_test() -> + Common = #{ + version => 2, + command => proxy, + transport_family => ipv4, + transport_protocol => stream, + src_address => {127, 0, 0, 1}, + src_port => 1234, + dest_address => {10, 11, 12, 13}, + dest_port => 23456 + }, + Test1 = Common#{alpn => <<"h2">>}, + {ok, Test1, <<>>} = parse(iolist_to_binary(header(Test1))), + Test2 = Common#{authority => <<"internal.example.org">>}, + {ok, Test2, <<>>} = parse(iolist_to_binary(header(Test2))), + Test3 = Common#{netns => <<"/var/run/netns/example">>}, + {ok, Test3, <<>>} = parse(iolist_to_binary(header(Test3))), + Test4 = Common#{ssl => #{ + client => [ssl, cert_conn, cert_sess], + verified => true, + version => <<"TLSv1.3">>, + cipher => <<"ECDHE-RSA-AES128-GCM-SHA256">>, + sig_alg => <<"SHA256">>, + key_alg => <<"RSA2048">>, + cn => <<"example.com">> + }}, + {ok, Test4, <<>>} = parse(iolist_to_binary(header(Test4))), + %% Note that the raw_tlvs order is not relevant and therefore + %% the parser does not reverse the list it builds. + Test5In = Common#{raw_tlvs => RawTLVs=[ + %% The only custom TLV I am aware of is defined at: + %% https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#proxy-protocol + {16#ea, <<16#1, "instance-id">>}, + %% This TLV is entirely fictional. + {16#ff, <<1, 2, 3, 4, 5, 6, 7, 8, 9, 0>>} + ]}, + Test5Out = Test5In#{raw_tlvs => lists:reverse(RawTLVs)}, + {ok, Test5Out, <<>>} = parse(iolist_to_binary(header(Test5In))), + ok. + +v2_checksum_test() -> + Test = #{ + version => 2, + command => proxy, + transport_family => ipv4, + transport_protocol => stream, + src_address => {127, 0, 0, 1}, + src_port => 1234, + dest_address => {10, 11, 12, 13}, + dest_port => 23456 + }, + {ok, Test, <<>>} = parse(iolist_to_binary(header(Test, #{checksum => crc32c}))), + ok. + +v2_padding_test() -> + Test = #{ + version => 2, + command => proxy, + transport_family => ipv4, + transport_protocol => stream, + src_address => {127, 0, 0, 1}, + src_port => 1234, + dest_address => {10, 11, 12, 13}, + dest_port => 23456 + }, + {ok, Test, <<>>} = parse(iolist_to_binary(header(Test, #{padding => 123}))), + ok. +-endif. + +%% Helper to convert proxy_info() to ssl:connection_info(). +%% +%% Because there isn't a lot of fields common to both types +%% this only ends up returning the keys protocol, selected_cipher_suite +%% and sni_hostname *at most*. + +-spec to_connection_info(proxy_info()) -> ssl:connection_info(). +to_connection_info(ProxyInfo=#{ssl := SSL}) -> + ConnInfo0 = case ProxyInfo of + #{authority := Authority} -> + [{sni_hostname, Authority}]; + _ -> + [] + end, + ConnInfo = case SSL of + #{cipher := Cipher} -> + case ssl:str_to_suite(binary_to_list(Cipher)) of + {error, {not_recognized, _}} -> + ConnInfo0; + CipherInfo -> + [{selected_cipher_suite, CipherInfo}|ConnInfo0] + end; + _ -> + ConnInfo0 + end, + %% https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html + case SSL of + #{version := <<"TLSv1.3">>} -> [{protocol, 'tlsv1.3'}|ConnInfo]; + #{version := <<"TLSv1.2">>} -> [{protocol, 'tlsv1.2'}|ConnInfo]; + #{version := <<"TLSv1.1">>} -> [{protocol, 'tlsv1.1'}|ConnInfo]; + #{version := <<"TLSv1">>} -> [{protocol, tlsv1}|ConnInfo]; + #{version := <<"SSLv3">>} -> [{protocol, sslv3}|ConnInfo]; + #{version := <<"SSLv2">>} -> [{protocol, sslv2}|ConnInfo]; + %% <<"unknown">>, unsupported or missing version. + _ -> ConnInfo + end; +%% No SSL/TLS information available. +to_connection_info(_) -> + []. + +-ifdef(TEST). +to_connection_info_test() -> + Common = #{ + version => 2, + command => proxy, + transport_family => ipv4, + transport_protocol => stream, + src_address => {127, 0, 0, 1}, + src_port => 1234, + dest_address => {10, 11, 12, 13}, + dest_port => 23456 + }, + %% Version 1. + [] = to_connection_info(#{ + version => 1, + command => proxy, + transport_family => undefined, + transport_protocol => undefined + }), + [] = to_connection_info(Common#{version => 1}), + %% Version 2, no ssl data. + [] = to_connection_info(#{ + version => 2, + command => local + }), + [] = to_connection_info(#{ + version => 2, + command => proxy, + transport_family => undefined, + transport_protocol => undefined + }), + [] = to_connection_info(Common), + [] = to_connection_info(#{ + version => 2, + command => proxy, + transport_family => unix, + transport_protocol => dgram, + src_address => <<"/run/source.sock">>, + dest_address => <<"/run/destination.sock">> + }), + [] = to_connection_info(Common#{netns => <<"/var/run/netns/example">>}), + [] = to_connection_info(Common#{raw_tlvs => [ + {16#ff, <<1, 2, 3, 4, 5, 6, 7, 8, 9, 0>>} + ]}), + %% Version 2, with ssl-related data. + [] = to_connection_info(Common#{alpn => <<"h2">>}), + %% The authority alone is not enough to deduce that this is SNI. + [] = to_connection_info(Common#{authority => <<"internal.example.org">>}), + [ + {protocol, 'tlsv1.3'}, + {selected_cipher_suite, #{ + cipher := aes_128_gcm, + key_exchange := ecdhe_rsa, + mac := aead, + prf := sha256 + }} + ] = to_connection_info(Common#{ssl => #{ + client => [ssl, cert_conn, cert_sess], + verified => true, + version => <<"TLSv1.3">>, + cipher => <<"ECDHE-RSA-AES128-GCM-SHA256">>, + sig_alg => <<"SHA256">>, + key_alg => <<"RSA2048">>, + cn => <<"example.com">> + }}), + [ + {protocol, 'tlsv1.3'}, + {selected_cipher_suite, #{ + cipher := aes_128_gcm, + key_exchange := ecdhe_rsa, + mac := aead, + prf := sha256 + }}, + {sni_hostname, <<"internal.example.org">>} + ] = to_connection_info(Common#{authority => <<"internal.example.org">>, ssl => #{ + client => [ssl, cert_conn, cert_sess], + verified => true, + version => <<"TLSv1.3">>, + cipher => <<"ECDHE-RSA-AES128-GCM-SHA256">>, + sig_alg => <<"SHA256">>, + key_alg => <<"RSA2048">>, + cn => <<"example.com">> + }}), + ok. +-endif. diff --git a/src/wsNet/ranch_server.erl b/src/wsNet/ranch_server.erl new file mode 100644 index 0000000..3966c1b --- /dev/null +++ b/src/wsNet/ranch_server.erl @@ -0,0 +1,279 @@ +%% Copyright (c) 2012-2021, Loïc Hoguin +%% Copyright (c) 2020-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_server). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). +-export([set_new_listener_opts/5]). +-export([cleanup_listener_opts/1]). +-export([cleanup_connections_sups/1]). +-export([set_connections_sup/3]). +-export([get_connections_sup/2]). +-export([get_connections_sups/1]). +-export([get_connections_sups/0]). +-export([set_listener_sup/2]). +-export([get_listener_sup/1]). +-export([get_listener_sups/0]). +-export([set_addr/2]). +-export([get_addr/1]). +-export([set_max_connections/2]). +-export([get_max_connections/1]). +-export([set_stats_counters/2]). +-export([get_stats_counters/1]). +-export([set_transport_options/2]). +-export([get_transport_options/1]). +-export([set_protocol_options/2]). +-export([get_protocol_options/1]). +-export([get_listener_start_args/1]). +-export([count_connections/1]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-define(TAB, ?MODULE). + +-type monitors() :: [{{reference(), pid()}, any()}]. +-record(state, { + monitors = [] :: monitors() +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +-spec set_new_listener_opts(ranch:ref(), ranch:max_conns(), any(), any(), [any()]) -> ok. +set_new_listener_opts(Ref, MaxConns, TransOpts, ProtoOpts, StartArgs) -> + gen_server:call(?MODULE, {set_new_listener_opts, Ref, MaxConns, TransOpts, ProtoOpts, StartArgs}). + +-spec cleanup_listener_opts(ranch:ref()) -> ok. +cleanup_listener_opts(Ref) -> + _ = ets:delete(?TAB, {addr, Ref}), + _ = ets:delete(?TAB, {max_conns, Ref}), + _ = ets:delete(?TAB, {trans_opts, Ref}), + _ = ets:delete(?TAB, {proto_opts, Ref}), + _ = ets:delete(?TAB, {listener_start_args, Ref}), + %% We also remove the pid of the connection supervisors. + %% Depending on the timing, they might already have been deleted + %% when we handled the monitor DOWN message. However, in some + %% cases when calling stop_listener followed by get_connections_sup, + %% we could end up with the pid still being returned, when we + %% expected a crash (because the listener was stopped). + %% Deleting it explicitly here removes any possible confusion. + _ = ets:match_delete(?TAB, {{conns_sup, Ref, '_'}, '_'}), + _ = ets:delete(?TAB, {stats_counters, Ref}), + %% Ditto for the listener supervisor. + _ = ets:delete(?TAB, {listener_sup, Ref}), + ok. + +-spec cleanup_connections_sups(ranch:ref()) -> ok. +cleanup_connections_sups(Ref) -> + _ = ets:match_delete(?TAB, {{conns_sup, Ref, '_'}, '_'}), + _ = ets:delete(?TAB, {stats_counters, Ref}), + ok. + +-spec set_connections_sup(ranch:ref(), non_neg_integer(), pid()) -> ok. +set_connections_sup(Ref, Id, Pid) -> + gen_server:call(?MODULE, {set_connections_sup, Ref, Id, Pid}). + +-spec get_connections_sup(ranch:ref(), pos_integer()) -> pid(). +get_connections_sup(Ref, Id) -> + ConnsSups = get_connections_sups(Ref), + NConnsSups = length(ConnsSups), + {_, Pid} = lists:keyfind((Id rem NConnsSups) + 1, 1, ConnsSups), + Pid. + +-spec get_connections_sups(ranch:ref()) -> [{pos_integer(), pid()}]. +get_connections_sups(Ref) -> + [{Id, Pid} || + [Id, Pid] <- ets:match(?TAB, {{conns_sup, Ref, '$1'}, '$2'})]. + +-spec get_connections_sups() -> [{ranch:ref(), pos_integer(), pid()}]. +get_connections_sups() -> + [{Ref, Id, Pid} || + [Ref, Id, Pid] <- ets:match(?TAB, {{conns_sup, '$1', '$2'}, '$3'})]. + +-spec set_listener_sup(ranch:ref(), pid()) -> ok. +set_listener_sup(Ref, Pid) -> + gen_server:call(?MODULE, {set_listener_sup, Ref, Pid}). + +-spec get_listener_sup(ranch:ref()) -> pid(). +get_listener_sup(Ref) -> + ets:lookup_element(?TAB, {listener_sup, Ref}, 2). + +-spec get_listener_sups() -> [{ranch:ref(), pid()}]. +get_listener_sups() -> + [{Ref, Pid} || [Ref, Pid] <- ets:match(?TAB, {{listener_sup, '$1'}, '$2'})]. + +-spec set_addr(ranch:ref(), {inet:ip_address(), inet:port_number()} | + {local, binary()} | {undefined, undefined}) -> ok. +set_addr(Ref, Addr) -> + gen_server:call(?MODULE, {set_addr, Ref, Addr}). + +-spec get_addr(ranch:ref()) -> {inet:ip_address(), inet:port_number()} | + {local, binary()} | {undefined, undefined}. +get_addr(Ref) -> + ets:lookup_element(?TAB, {addr, Ref}, 2). + +-spec set_max_connections(ranch:ref(), ranch:max_conns()) -> ok. +set_max_connections(Ref, MaxConnections) -> + gen_server:call(?MODULE, {set_max_conns, Ref, MaxConnections}). + +-spec get_max_connections(ranch:ref()) -> ranch:max_conns(). +get_max_connections(Ref) -> + ets:lookup_element(?TAB, {max_conns, Ref}, 2). + +-spec set_stats_counters(ranch:ref(), counters:counters_ref()) -> ok. +set_stats_counters(Ref, Counters) -> + gen_server:call(?MODULE, {set_stats_counters, Ref, Counters}). + +-spec get_stats_counters(ranch:ref()) -> counters:counters_ref(). +get_stats_counters(Ref) -> + ets:lookup_element(?TAB, {stats_counters, Ref}, 2). + +-spec set_transport_options(ranch:ref(), any()) -> ok. +set_transport_options(Ref, TransOpts) -> + gen_server:call(?MODULE, {set_trans_opts, Ref, TransOpts}). + +-spec get_transport_options(ranch:ref()) -> any(). +get_transport_options(Ref) -> + ets:lookup_element(?TAB, {trans_opts, Ref}, 2). + +-spec set_protocol_options(ranch:ref(), any()) -> ok. +set_protocol_options(Ref, ProtoOpts) -> + gen_server:call(?MODULE, {set_proto_opts, Ref, ProtoOpts}). + +-spec get_protocol_options(ranch:ref()) -> any(). +get_protocol_options(Ref) -> + ets:lookup_element(?TAB, {proto_opts, Ref}, 2). + +-spec get_listener_start_args(ranch:ref()) -> [any()]. +get_listener_start_args(Ref) -> + ets:lookup_element(?TAB, {listener_start_args, Ref}, 2). + +-spec count_connections(ranch:ref()) -> non_neg_integer(). +count_connections(Ref) -> + lists:foldl( + fun ({_, ConnsSup}, Acc) -> + Acc+ranch_conns_sup:active_connections(ConnsSup) + end, + 0, + get_connections_sups(Ref)). + +%% gen_server. + +-spec init([]) -> {ok, #state{}}. +init([]) -> + ConnMonitors = [{{erlang:monitor(process, Pid), Pid}, {conns_sup, Ref, Id}} || + [Ref, Id, Pid] <- ets:match(?TAB, {{conns_sup, '$1', '$2'}, '$3'})], + ListenerMonitors = [{{erlang:monitor(process, Pid), Pid}, {listener_sup, Ref}} || + [Ref, Pid] <- ets:match(?TAB, {{listener_sup, '$1'}, '$2'})], + {ok, #state{monitors=ConnMonitors++ListenerMonitors}}. + +-spec handle_call(term(), {pid(), reference()}, #state{}) -> {reply, ok | ignore, #state{}}. +handle_call({set_new_listener_opts, Ref, MaxConns, TransOpts, ProtoOpts, StartArgs}, _, State) -> + ets:insert_new(?TAB, {{max_conns, Ref}, MaxConns}), + ets:insert_new(?TAB, {{trans_opts, Ref}, TransOpts}), + ets:insert_new(?TAB, {{proto_opts, Ref}, ProtoOpts}), + ets:insert_new(?TAB, {{listener_start_args, Ref}, StartArgs}), + {reply, ok, State}; +handle_call({set_connections_sup, Ref, Id, Pid}, _, State0) -> + State = set_monitored_process({conns_sup, Ref, Id}, Pid, State0), + {reply, ok, State}; +handle_call({set_listener_sup, Ref, Pid}, _, State0) -> + State = set_monitored_process({listener_sup, Ref}, Pid, State0), + {reply, ok, State}; +handle_call({set_addr, Ref, Addr}, _, State) -> + true = ets:insert(?TAB, {{addr, Ref}, Addr}), + {reply, ok, State}; +handle_call({set_max_conns, Ref, MaxConns}, _, State) -> + ets:insert(?TAB, {{max_conns, Ref}, MaxConns}), + _ = [ConnsSup ! {set_max_conns, MaxConns} || {_, ConnsSup} <- get_connections_sups(Ref)], + {reply, ok, State}; +handle_call({set_stats_counters, Ref, Counters}, _, State) -> + ets:insert(?TAB, {{stats_counters, Ref}, Counters}), + {reply, ok, State}; +handle_call({set_trans_opts, Ref, Opts}, _, State) -> + ets:insert(?TAB, {{trans_opts, Ref}, Opts}), + {reply, ok, State}; +handle_call({set_proto_opts, Ref, Opts}, _, State) -> + ets:insert(?TAB, {{proto_opts, Ref}, Opts}), + _ = [ConnsSup ! {set_protocol_options, Opts} || {_, ConnsSup} <- get_connections_sups(Ref)], + {reply, ok, State}; +handle_call(_Request, _From, State) -> + {reply, ignore, State}. + +-spec handle_cast(_, #state{}) -> {noreply, #state{}}. +handle_cast(_Request, State) -> + {noreply, State}. + +-spec handle_info(term(), #state{}) -> {noreply, #state{}}. +handle_info({'DOWN', MonitorRef, process, Pid, Reason}, + State=#state{monitors=Monitors}) -> + {_, TypeRef} = lists:keyfind({MonitorRef, Pid}, 1, Monitors), + ok = case {TypeRef, Reason} of + {{listener_sup, Ref}, normal} -> + cleanup_listener_opts(Ref); + {{listener_sup, Ref}, shutdown} -> + cleanup_listener_opts(Ref); + {{listener_sup, Ref}, {shutdown, _}} -> + cleanup_listener_opts(Ref); + _ -> + _ = ets:delete(?TAB, TypeRef), + ok + end, + Monitors2 = lists:keydelete({MonitorRef, Pid}, 1, Monitors), + {noreply, State#state{monitors=Monitors2}}; +handle_info(_Info, State) -> + {noreply, State}. + +-spec terminate(_, #state{}) -> ok. +terminate(_Reason, _State) -> + ok. + +-spec code_change(term() | {down, term()}, #state{}, term()) -> {ok, term()}. +code_change({down, _}, State, _Extra) -> + true = ets:match_delete(?TAB, {{stats_counters, '_'}, '_'}), + {ok, State}; +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + +%% Internal. + +set_monitored_process(Key, Pid, State=#state{monitors=Monitors0}) -> + %% First we cleanup the monitor if a residual one exists. + %% This can happen during crashes when the restart is faster + %% than the cleanup. + Monitors = case lists:keytake(Key, 2, Monitors0) of + false -> + Monitors0; + {value, {{OldMonitorRef, _}, _}, Monitors1} -> + true = erlang:demonitor(OldMonitorRef, [flush]), + Monitors1 + end, + %% Then we unconditionally insert in the ets table. + %% If residual data is there, it will be overwritten. + true = ets:insert(?TAB, {Key, Pid}), + %% Finally we start monitoring this new process. + MonitorRef = erlang:monitor(process, Pid), + State#state{monitors=[{{MonitorRef, Pid}, Key}|Monitors]}. diff --git a/src/wsNet/ranch_server_proxy.erl b/src/wsNet/ranch_server_proxy.erl new file mode 100644 index 0000000..22f9dc8 --- /dev/null +++ b/src/wsNet/ranch_server_proxy.erl @@ -0,0 +1,67 @@ +%% Copyright (c) 2019-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_server_proxy). + +-behavior(gen_server). + +-export([start_link/0]). +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([code_change/3]). + +-spec start_link() -> {ok, pid()} | {error, term()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +-spec init([]) -> {ok, pid()} | {stop, term()}. +init([]) -> + case wait_ranch_server(50) of + {ok, Monitor} -> + {ok, Monitor, hibernate}; + {error, Reason} -> + {stop, Reason} + end. + +-spec handle_call(_, _, reference()) -> {noreply, reference(), hibernate}. +handle_call(_, _, Monitor) -> + {noreply, Monitor, hibernate}. + +-spec handle_cast(_, reference()) -> {noreply, reference(), hibernate}. +handle_cast(_, Monitor) -> + {noreply, Monitor, hibernate}. + +-spec handle_info(term(), reference()) -> {noreply, reference(), hibernate} | {stop, term(), reference()}. +handle_info({'DOWN', Monitor, process, _, Reason}, Monitor) -> + {stop, Reason, Monitor}; +handle_info(_, Monitor) -> + {noreply, Monitor, hibernate}. + +-spec code_change(term() | {down, term()}, reference(), term()) -> {ok, reference()}. +code_change(_, Monitor, _) -> + {ok, Monitor}. + +wait_ranch_server(N) -> + case whereis(ranch_server) of + undefined when N > 0 -> + receive after 100 -> ok end, + wait_ranch_server(N - 1); + undefined -> + {error, noproc}; + Pid -> + Monitor = monitor(process, Pid), + {ok, Monitor} + end. diff --git a/src/wsNet/ranch_ssl.erl b/src/wsNet/ranch_ssl.erl new file mode 100644 index 0000000..ff5831e --- /dev/null +++ b/src/wsNet/ranch_ssl.erl @@ -0,0 +1,341 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% Copyright (c) 2020-2021, Jan Uhlig +%% Copyright (c) 2021, Maria Scott +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_ssl). +-behaviour(ranch_transport). + +-export([name/0]). +-export([secure/0]). +-export([messages/0]). +-export([listen/1]). +-export([disallowed_listen_options/0]). +-export([accept/2]). +-export([handshake/2]). +-export([handshake/3]). +-export([handshake_continue/2]). +-export([handshake_continue/3]). +-export([handshake_cancel/1]). +-export([connect/3]). +-export([connect/4]). +-export([recv/3]). +-export([recv_proxy_header/2]). +-export([send/2]). +-export([sendfile/2]). +-export([sendfile/4]). +-export([sendfile/5]). +-export([setopts/2]). +-export([getopts/2]). +-export([getstat/1]). +-export([getstat/2]). +-export([controlling_process/2]). +-export([peername/1]). +-export([sockname/1]). +-export([shutdown/2]). +-export([close/1]). +-export([cleanup/1]). + +-type ssl_opt() :: {alpn_preferred_protocols, [binary()]} + | {anti_replay, '10k' | '100k' | {integer(), integer(), integer()}} + | {beast_mitigation, one_n_minus_one | zero_n | disabled} + | {cacertfile, file:filename()} + | {cacerts, [public_key:der_encoded()]} + | {cert, public_key:der_encoded()} + | {certfile, file:filename()} + | {ciphers, ssl:ciphers()} + | {client_renegotiation, boolean()} + | {crl_cache, [any()]} + | {crl_check, boolean() | peer | best_effort} + | {depth, integer()} + | {dh, binary()} + | {dhfile, file:filename()} + | {eccs, [ssl:named_curve()]} + | {fail_if_no_peer_cert, boolean()} + | {handshake, hello | full} + | {hibernate_after, timeout()} + | {honor_cipher_order, boolean()} + | {honor_ecc_order, boolean()} + | {key, ssl:key()} + | {key_update_at, pos_integer()} + | {keyfile, file:filename()} + | {log_alert, boolean()} + | {log_level, logger:level()} + | {max_handshake_size, integer()} + | {middlebox_comp_mode, boolean()} + | {next_protocols_advertised, [binary()]} + | {padding_check, boolean()} + | {partial_chain, fun()} + | {password, string()} + | {protocol, tls | dtls} + | {psk_identity, string()} + | {reuse_session, fun()} + | {reuse_sessions, boolean()} + | {secure_renegotiate, boolean()} + | {session_tickets, disabled | stateful | stateless} + | {signature_algs, [{ssl:hash(), ssl:sign_algo()}]} + | {signature_algs_cert, [ssl:sign_scheme()]} + | {sni_fun, fun()} + | {sni_hosts, [{string(), ssl_opt()}]} + | {supported_groups, [ssl:group()]} + | {user_lookup_fun, {fun(), any()}} + | {verify, verify_none | verify_peer} + | {verify_fun, {fun(), any()}} + | {versions, [ssl:protocol_version()]}. +-export_type([ssl_opt/0]). + +-type opt() :: ranch_tcp:opt() | ssl_opt(). +-export_type([opt/0]). + +-type opts() :: [opt()]. +-export_type([opts/0]). + +-spec name() -> ssl. +name() -> ssl. + +-spec secure() -> boolean(). +secure() -> + true. + +-spec messages() -> {ssl, ssl_closed, ssl_error, ssl_passive}. +messages() -> {ssl, ssl_closed, ssl_error, ssl_passive}. + +-spec listen(ranch:transport_opts(opts())) -> {ok, ssl:sslsocket()} | {error, atom()}. +listen(TransOpts) -> + ok = cleanup(TransOpts), + SocketOpts = maps:get(socket_opts, TransOpts, []), + case lists:keymember(cert, 1, SocketOpts) + orelse lists:keymember(certfile, 1, SocketOpts) + orelse lists:keymember(sni_fun, 1, SocketOpts) + orelse lists:keymember(sni_hosts, 1, SocketOpts) + orelse lists:keymember(user_lookup_fun, 1, SocketOpts) of + true -> + Logger = maps:get(logger, TransOpts, logger), + do_listen(SocketOpts, Logger); + false -> + {error, no_cert} + end. + +do_listen(SocketOpts0, Logger) -> + SocketOpts1 = ranch:set_option_default(SocketOpts0, backlog, 1024), + SocketOpts2 = ranch:set_option_default(SocketOpts1, nodelay, true), + SocketOpts3 = ranch:set_option_default(SocketOpts2, send_timeout, 30000), + SocketOpts = ranch:set_option_default(SocketOpts3, send_timeout_close, true), + DisallowedOpts0 = disallowed_listen_options(), + DisallowedOpts = unsupported_tls_options(SocketOpts) ++ DisallowedOpts0, + %% We set the port to 0 because it is given in the Opts directly. + %% The port in the options takes precedence over the one in the + %% first argument. + ssl:listen(0, ranch:filter_options(SocketOpts, DisallowedOpts, + [binary, {active, false}, {packet, raw}, {reuseaddr, true}], Logger)). + +%% 'binary' and 'list' are disallowed but they are handled +%% specifically as they do not have 2-tuple equivalents. +-spec disallowed_listen_options() -> [atom()]. +disallowed_listen_options() -> + [alpn_advertised_protocols, client_preferred_next_protocols, + fallback, server_name_indication, srp_identity + |ranch_tcp:disallowed_listen_options()]. + +unsupported_tls_options(SocketOpts) -> + unsupported_tls_version_options(lists:usort(get_tls_versions(SocketOpts))). + +unsupported_tls_version_options([tlsv1|_]) -> + []; +unsupported_tls_version_options(['tlsv1.1'|_]) -> + [beast_mitigation, padding_check]; +unsupported_tls_version_options(['tlsv1.2'|_]) -> + [beast_mitigation, padding_check]; +unsupported_tls_version_options(['tlsv1.3'|_]) -> + [beast_mitigation, client_renegotiation, next_protocols_advertised, + padding_check, psk_identity, reuse_session, reuse_sessions, + secure_renegotiate, user_lookup_fun]; +unsupported_tls_version_options(_) -> + []. + +-spec accept(ssl:sslsocket(), timeout()) + -> {ok, ssl:sslsocket()} | {error, closed | timeout | atom()}. +accept(LSocket, Timeout) -> + ssl:transport_accept(LSocket, Timeout). + +-spec handshake(inet:socket() | ssl:sslsocket(), timeout()) + -> {ok, ssl:sslsocket()} | {ok, ssl:sslsocket(), ssl:protocol_extensions()} | {error, any()}. +handshake(CSocket, Timeout) -> + handshake(CSocket, [], Timeout). + +-spec handshake(inet:socket() | ssl:sslsocket(), opts(), timeout()) + -> {ok, ssl:sslsocket()} | {ok, ssl:sslsocket(), ssl:protocol_extensions()} | {error, any()}. +handshake(CSocket, Opts, Timeout) -> + case ssl:handshake(CSocket, Opts, Timeout) of + OK = {ok, _} -> + OK; + OK = {ok, _, _} -> + OK; + Error = {error, _} -> + Error + end. + +-spec handshake_continue(ssl:sslsocket(), timeout()) + -> {ok, ssl:sslsocket()} | {error, any()}. +handshake_continue(CSocket, Timeout) -> + handshake_continue(CSocket, [], Timeout). + +-spec handshake_continue(ssl:sslsocket(), [ssl:tls_server_option()], timeout()) + -> {ok, ssl:sslsocket()} | {error, any()}. +handshake_continue(CSocket, Opts, Timeout) -> + case ssl:handshake_continue(CSocket, Opts, Timeout) of + OK = {ok, _} -> + OK; + Error = {error, _} -> + Error + end. + +-spec handshake_cancel(ssl:sslsocket()) -> ok. +handshake_cancel(CSocket) -> + ok = ssl:handshake_cancel(CSocket). + +%% @todo Probably filter Opts? +-spec connect(inet:ip_address() | inet:hostname(), + inet:port_number(), any()) + -> {ok, inet:socket()} | {error, atom()}. +connect(Host, Port, Opts) when is_integer(Port) -> + ssl:connect(Host, Port, + Opts ++ [binary, {active, false}, {packet, raw}]). + +%% @todo Probably filter Opts? +-spec connect(inet:ip_address() | inet:hostname(), + inet:port_number(), any(), timeout()) + -> {ok, inet:socket()} | {error, atom()}. +connect(Host, Port, Opts, Timeout) when is_integer(Port) -> + ssl:connect(Host, Port, + Opts ++ [binary, {active, false}, {packet, raw}], + Timeout). + +-spec recv(ssl:sslsocket(), non_neg_integer(), timeout()) + -> {ok, any()} | {error, closed | atom()}. +recv(Socket, Length, Timeout) -> + ssl:recv(Socket, Length, Timeout). + +-spec recv_proxy_header(ssl:sslsocket(), timeout()) + -> {ok, ranch_proxy_header:proxy_info()} + | {error, closed | atom()} + | {error, protocol_error, atom()}. +recv_proxy_header(SSLSocket, Timeout) -> + %% There's currently no documented way to perform a TCP recv + %% on an sslsocket(), even before the TLS handshake. However + %% nothing prevents us from retrieving the TCP socket and using + %% it. Since it's an undocumented interface this may however + %% make forward-compatibility more difficult. + {sslsocket, {gen_tcp, TCPSocket, _, _}, _} = SSLSocket, + ranch_tcp:recv_proxy_header(TCPSocket, Timeout). + +-spec send(ssl:sslsocket(), iodata()) -> ok | {error, atom()}. +send(Socket, Packet) -> + ssl:send(Socket, Packet). + +-spec sendfile(ssl:sslsocket(), file:name_all() | file:fd()) + -> {ok, non_neg_integer()} | {error, atom()}. +sendfile(Socket, Filename) -> + sendfile(Socket, Filename, 0, 0, []). + +-spec sendfile(ssl:sslsocket(), file:name_all() | file:fd(), + non_neg_integer(), non_neg_integer()) + -> {ok, non_neg_integer()} | {error, atom()}. +sendfile(Socket, File, Offset, Bytes) -> + sendfile(Socket, File, Offset, Bytes, []). + +%% Unlike with TCP, no syscall can be used here, so sending files +%% through SSL will be much slower in comparison. Note that unlike +%% file:sendfile/5 this function accepts either a file or a file name. +-spec sendfile(ssl:sslsocket(), file:name_all() | file:fd(), + non_neg_integer(), non_neg_integer(), ranch_transport:sendfile_opts()) + -> {ok, non_neg_integer()} | {error, atom()}. +sendfile(Socket, File, Offset, Bytes, Opts) -> + ranch_transport:sendfile(?MODULE, Socket, File, Offset, Bytes, Opts). + +%% @todo Probably filter Opts? +-spec setopts(ssl:sslsocket(), list()) -> ok | {error, atom()}. +setopts(Socket, Opts) -> + ssl:setopts(Socket, Opts). + +-spec getopts(ssl:sslsocket(), [atom()]) -> {ok, list()} | {error, atom()}. +getopts(Socket, Opts) -> + ssl:getopts(Socket, Opts). + +-spec getstat(ssl:sslsocket()) -> {ok, list()} | {error, atom()}. +getstat(Socket) -> + ssl:getstat(Socket). + +-spec getstat(ssl:sslsocket(), [atom()]) -> {ok, list()} | {error, atom()}. +getstat(Socket, OptionNames) -> + ssl:getstat(Socket, OptionNames). + +-spec controlling_process(ssl:sslsocket(), pid()) + -> ok | {error, closed | not_owner | atom()}. +controlling_process(Socket, Pid) -> + ssl:controlling_process(Socket, Pid). + +-spec peername(ssl:sslsocket()) + -> {ok, {inet:ip_address(), inet:port_number()} | {local, binary()}} | {error, atom()}. +peername(Socket) -> + ssl:peername(Socket). + +-spec sockname(ssl:sslsocket()) + -> {ok, {inet:ip_address(), inet:port_number()} | {local, binary()}} | {error, atom()}. +sockname(Socket) -> + ssl:sockname(Socket). + +-spec shutdown(ssl:sslsocket(), read | write | read_write) + -> ok | {error, atom()}. +shutdown(Socket, How) -> + ssl:shutdown(Socket, How). + +-spec close(ssl:sslsocket()) -> ok. +close(Socket) -> + ssl:close(Socket). + +-spec cleanup(ranch:transport_opts(opts())) -> ok. +cleanup(#{socket_opts:=SocketOpts}) -> + case lists:keyfind(ip, 1, lists:reverse(SocketOpts)) of + {ip, {local, SockFile}} -> + _ = file:delete(SockFile), + ok; + _ -> + ok + end; +cleanup(_) -> + ok. + +get_tls_versions(SocketOpts) -> + %% Socket options need to be reversed for keyfind because later options + %% take precedence when contained multiple times, but keyfind will return + %% the earliest occurence. + case lists:keyfind(versions, 1, lists:reverse(SocketOpts)) of + {versions, Versions} -> + Versions; + false -> + get_tls_versions_env() + end. + +get_tls_versions_env() -> + case application:get_env(ssl, protocol_version) of + {ok, Versions} -> + Versions; + undefined -> + get_tls_versions_app() + end. + +get_tls_versions_app() -> + {supported, Versions} = lists:keyfind(supported, 1, ssl:versions()), + Versions. diff --git a/src/wsNet/ranch_sup.erl b/src/wsNet/ranch_sup.erl new file mode 100644 index 0000000..15b8b81 --- /dev/null +++ b/src/wsNet/ranch_sup.erl @@ -0,0 +1,39 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% Copyright (c) 2020-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_sup). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +-spec start_link() -> {ok, pid()}. +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +-spec init([]) -> {ok, {supervisor:sup_flags(), [supervisor:child_spec()]}}. +init([]) -> + Intensity = case application:get_env(ranch_sup_intensity) of + {ok, Value1} -> Value1; + undefined -> 1 + end, + Period = case application:get_env(ranch_sup_period) of + {ok, Value2} -> Value2; + undefined -> 5 + end, + Procs = [ + #{id => ranch_server, start => {ranch_server, start_link, []}} + ], + {ok, {#{intensity => Intensity, period => Period}, Procs}}. diff --git a/src/wsNet/ranch_tcp.erl b/src/wsNet/ranch_tcp.erl new file mode 100644 index 0000000..3541289 --- /dev/null +++ b/src/wsNet/ranch_tcp.erl @@ -0,0 +1,287 @@ +%% Copyright (c) 2011-2021, Loïc Hoguin +%% Copyright (c) 2020-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_tcp). +-behaviour(ranch_transport). + +-export([name/0]). +-export([secure/0]). +-export([messages/0]). +-export([listen/1]). +-export([disallowed_listen_options/0]). +-export([accept/2]). +-export([handshake/2]). +-export([handshake/3]). +-export([handshake_continue/2]). +-export([handshake_continue/3]). +-export([handshake_cancel/1]). +-export([connect/3]). +-export([connect/4]). +-export([recv/3]). +-export([recv_proxy_header/2]). +-export([send/2]). +-export([sendfile/2]). +-export([sendfile/4]). +-export([sendfile/5]). +-export([setopts/2]). +-export([getopts/2]). +-export([getstat/1]). +-export([getstat/2]). +-export([controlling_process/2]). +-export([peername/1]). +-export([sockname/1]). +-export([shutdown/2]). +-export([close/1]). +-export([cleanup/1]). + +-type opt() :: {backlog, non_neg_integer()} + | {buffer, non_neg_integer()} + | {delay_send, boolean()} + | {dontroute, boolean()} + | {exit_on_close, boolean()} + | {fd, non_neg_integer()} + | {high_msgq_watermark, non_neg_integer()} + | {high_watermark, non_neg_integer()} + | inet + | inet6 + | {ip, inet:ip_address() | inet:local_address()} + | {ipv6_v6only, boolean()} + | {keepalive, boolean()} + | {linger, {boolean(), non_neg_integer()}} + | {low_msgq_watermark, non_neg_integer()} + | {low_watermark, non_neg_integer()} + | {nodelay, boolean()} + | {port, inet:port_number()} + | {priority, integer()} + | {raw, non_neg_integer(), non_neg_integer(), binary()} + | {recbuf, non_neg_integer()} + | {send_timeout, timeout()} + | {send_timeout_close, boolean()} + | {sndbuf, non_neg_integer()} + | {tos, integer()}. +-export_type([opt/0]). + +-type opts() :: [opt()]. +-export_type([opts/0]). + +-spec name() -> tcp. +name() -> tcp. + +-spec secure() -> boolean(). +secure() -> + false. + +-spec messages() -> {tcp, tcp_closed, tcp_error, tcp_passive}. +messages() -> {tcp, tcp_closed, tcp_error, tcp_passive}. + +-spec listen(ranch:transport_opts(opts())) -> {ok, inet:socket()} | {error, atom()}. +listen(TransOpts) -> + ok = cleanup(TransOpts), + Logger = maps:get(logger, TransOpts, logger), + SocketOpts = maps:get(socket_opts, TransOpts, []), + %% We set the port to 0 because it is given in the Opts directly. + %% The port in the options takes precedence over the one in the + %% first argument. + gen_tcp:listen(0, prepare_socket_opts(SocketOpts, Logger)). + +prepare_socket_opts([Backend = {inet_backend, _}|SocketOpts], Logger) -> + %% In OTP/23, the inet_backend option may be used to activate the + %% experimental socket backend for inet/gen_tcp. If present, it must + %% be the first option in the list. + [Backend|prepare_socket_opts(SocketOpts, Logger)]; +prepare_socket_opts(SocketOpts0, Logger) -> + SocketOpts1 = ranch:set_option_default(SocketOpts0, backlog, 1024), + SocketOpts2 = ranch:set_option_default(SocketOpts1, nodelay, true), + SocketOpts3 = ranch:set_option_default(SocketOpts2, send_timeout, 30000), + SocketOpts4 = ranch:set_option_default(SocketOpts3, send_timeout_close, true), + ranch:filter_options(SocketOpts4, disallowed_listen_options(), + [binary, {active, false}, {packet, raw}, {reuseaddr, true}], Logger). + +%% 'binary' and 'list' are disallowed but they are handled +%% specifically as they do not have 2-tuple equivalents. +-spec disallowed_listen_options() -> [atom()]. +disallowed_listen_options() -> + [active, header, mode, packet, packet_size, line_delimiter, reuseaddr]. + +-spec accept(inet:socket(), timeout()) + -> {ok, inet:socket()} | {error, closed | timeout | atom()}. +accept(LSocket, Timeout) -> + gen_tcp:accept(LSocket, Timeout). + +-spec handshake(inet:socket(), timeout()) -> {ok, inet:socket()}. +handshake(CSocket, Timeout) -> + handshake(CSocket, [], Timeout). + +-spec handshake(inet:socket(), opts(), timeout()) -> {ok, inet:socket()}. +handshake(CSocket, _, _) -> + {ok, CSocket}. + +-spec handshake_continue(inet:socket(), timeout()) -> no_return(). +handshake_continue(CSocket, Timeout) -> + handshake_continue(CSocket, [], Timeout). + +-spec handshake_continue(inet:socket(), opts(), timeout()) -> no_return(). +handshake_continue(_, _, _) -> + error(not_supported). + +-spec handshake_cancel(inet:socket()) -> no_return(). +handshake_cancel(_) -> + error(not_supported). + +%% @todo Probably filter Opts? +-spec connect(inet:ip_address() | inet:hostname(), + inet:port_number(), any()) + -> {ok, inet:socket()} | {error, atom()}. +connect(Host, Port, Opts) when is_integer(Port) -> + gen_tcp:connect(Host, Port, + Opts ++ [binary, {active, false}, {packet, raw}]). + +%% @todo Probably filter Opts? +-spec connect(inet:ip_address() | inet:hostname(), + inet:port_number(), any(), timeout()) + -> {ok, inet:socket()} | {error, atom()}. +connect(Host, Port, Opts, Timeout) when is_integer(Port) -> + gen_tcp:connect(Host, Port, + Opts ++ [binary, {active, false}, {packet, raw}], + Timeout). + +-spec recv(inet:socket(), non_neg_integer(), timeout()) + -> {ok, any()} | {error, closed | atom()}. +recv(Socket, Length, Timeout) -> + gen_tcp:recv(Socket, Length, Timeout). + +-spec recv_proxy_header(inet:socket(), timeout()) + -> {ok, ranch_proxy_header:proxy_info()} + | {error, closed | atom()} + | {error, protocol_error, atom()}. +recv_proxy_header(Socket, Timeout) -> + case recv(Socket, 0, Timeout) of + {ok, Data} -> + case ranch_proxy_header:parse(Data) of + {ok, ProxyInfo, <<>>} -> + {ok, ProxyInfo}; + {ok, ProxyInfo, Rest} -> + case gen_tcp:unrecv(Socket, Rest) of + ok -> + {ok, ProxyInfo}; + Error -> + Error + end; + {error, HumanReadable} -> + {error, protocol_error, HumanReadable} + end; + Error -> + Error + end. + +-spec send(inet:socket(), iodata()) -> ok | {error, atom()}. +send(Socket, Packet) -> + gen_tcp:send(Socket, Packet). + +-spec sendfile(inet:socket(), file:name_all() | file:fd()) + -> {ok, non_neg_integer()} | {error, atom()}. +sendfile(Socket, Filename) -> + sendfile(Socket, Filename, 0, 0, []). + +-spec sendfile(inet:socket(), file:name_all() | file:fd(), non_neg_integer(), + non_neg_integer()) + -> {ok, non_neg_integer()} | {error, atom()}. +sendfile(Socket, File, Offset, Bytes) -> + sendfile(Socket, File, Offset, Bytes, []). + +-spec sendfile(inet:socket(), file:name_all() | file:fd(), non_neg_integer(), + non_neg_integer(), [{chunk_size, non_neg_integer()}]) + -> {ok, non_neg_integer()} | {error, atom()}. +sendfile(Socket, Filename, Offset, Bytes, Opts) + when is_list(Filename) orelse is_atom(Filename) + orelse is_binary(Filename) -> + case file:open(Filename, [read, raw, binary]) of + {ok, RawFile} -> + try sendfile(Socket, RawFile, Offset, Bytes, Opts) of + Result -> Result + after + ok = file:close(RawFile) + end; + {error, _} = Error -> + Error + end; +sendfile(Socket, RawFile, Offset, Bytes, Opts) -> + Opts2 = case Opts of + [] -> [{chunk_size, 16#1FFF}]; + _ -> Opts + end, + try file:sendfile(RawFile, Socket, Offset, Bytes, Opts2) of + Result -> Result + catch + error:{badmatch, {error, enotconn}} -> + %% file:sendfile/5 might fail by throwing a + %% {badmatch, {error, enotconn}}. This is because its + %% implementation fails with a badmatch in + %% prim_file:sendfile/10 if the socket is not connected. + {error, closed} + end. + +%% @todo Probably filter Opts? +-spec setopts(inet:socket(), list()) -> ok | {error, atom()}. +setopts(Socket, Opts) -> + inet:setopts(Socket, Opts). + +-spec getopts(inet:socket(), [atom()]) -> {ok, list()} | {error, atom()}. +getopts(Socket, Opts) -> + inet:getopts(Socket, Opts). + +-spec getstat(inet:socket()) -> {ok, list()} | {error, atom()}. +getstat(Socket) -> + inet:getstat(Socket). + +-spec getstat(inet:socket(), [atom()]) -> {ok, list()} | {error, atom()}. +getstat(Socket, OptionNames) -> + inet:getstat(Socket, OptionNames). + +-spec controlling_process(inet:socket(), pid()) + -> ok | {error, closed | not_owner | atom()}. +controlling_process(Socket, Pid) -> + gen_tcp:controlling_process(Socket, Pid). + +-spec peername(inet:socket()) + -> {ok, {inet:ip_address(), inet:port_number()} | {local, binary()}} | {error, atom()}. +peername(Socket) -> + inet:peername(Socket). + +-spec sockname(inet:socket()) + -> {ok, {inet:ip_address(), inet:port_number()} | {local, binary()}} | {error, atom()}. +sockname(Socket) -> + inet:sockname(Socket). + +-spec shutdown(inet:socket(), read | write | read_write) + -> ok | {error, atom()}. +shutdown(Socket, How) -> + gen_tcp:shutdown(Socket, How). + +-spec close(inet:socket()) -> ok. +close(Socket) -> + gen_tcp:close(Socket). + +-spec cleanup(ranch:transport_opts(opts())) -> ok. +cleanup(#{socket_opts:=SocketOpts}) -> + case lists:keyfind(ip, 1, lists:reverse(SocketOpts)) of + {ip, {local, SockFile}} -> + _ = file:delete(SockFile), + ok; + _ -> + ok + end; +cleanup(_) -> + ok. diff --git a/src/wsNet/ranch_transport.erl b/src/wsNet/ranch_transport.erl new file mode 100644 index 0000000..52eeba3 --- /dev/null +++ b/src/wsNet/ranch_transport.erl @@ -0,0 +1,157 @@ +%% Copyright (c) 2012-2021, Loïc Hoguin +%% Copyright (c) 2020-2021, Jan Uhlig +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ranch_transport). + +-export([sendfile/6]). + +-type socket() :: any(). +-export_type([socket/0]). + +-type opts() :: any(). +-type stats() :: any(). +-type sendfile_opts() :: [{chunk_size, non_neg_integer()}]. +-export_type([sendfile_opts/0]). + +-callback name() -> atom(). +-callback secure() -> boolean(). +-callback messages() -> {OK::atom(), Closed::atom(), Error::atom(), Passive::atom()}. +-callback listen(ranch:transport_opts(any())) -> {ok, socket()} | {error, atom()}. +-callback accept(socket(), timeout()) + -> {ok, socket()} | {error, closed | timeout | atom()}. +-callback handshake(socket(), timeout()) -> {ok, socket()} | {ok, socket(), any()} | {error, any()}. +-callback handshake(socket(), opts(), timeout()) -> {ok, socket()} | {ok, socket(), any()} | {error, any()}. +-callback handshake_continue(socket(), timeout()) -> {ok, socket()} | {error, any()}. +-callback handshake_continue(socket(), opts(), timeout()) -> {ok, socket()} | {error, any()}. +-callback handshake_cancel(socket()) -> ok. +-callback connect(string(), inet:port_number(), opts()) + -> {ok, socket()} | {error, atom()}. +-callback connect(string(), inet:port_number(), opts(), timeout()) + -> {ok, socket()} | {error, atom()}. +-callback recv(socket(), non_neg_integer(), timeout()) + -> {ok, any()} | {error, closed | timeout | atom()}. +-callback recv_proxy_header(socket(), timeout()) + -> {ok, ranch_proxy_header:proxy_info()} + | {error, closed | atom()} + | {error, protocol_error, atom()}. +-callback send(socket(), iodata()) -> ok | {error, atom()}. +-callback sendfile(socket(), file:name_all() | file:fd()) + -> {ok, non_neg_integer()} | {error, atom()}. +-callback sendfile(socket(), file:name_all() | file:fd(), non_neg_integer(), + non_neg_integer()) -> {ok, non_neg_integer()} | {error, atom()}. +-callback sendfile(socket(), file:name_all() | file:fd(), non_neg_integer(), + non_neg_integer(), sendfile_opts()) + -> {ok, non_neg_integer()} | {error, atom()}. +-callback setopts(socket(), opts()) -> ok | {error, atom()}. +-callback getopts(socket(), [atom()]) -> {ok, opts()} | {error, atom()}. +-callback getstat(socket()) -> {ok, stats()} | {error, atom()}. +-callback getstat(socket(), [atom()]) -> {ok, stats()} | {error, atom()}. +-callback controlling_process(socket(), pid()) + -> ok | {error, closed | not_owner | atom()}. +-callback peername(socket()) + -> {ok, {inet:ip_address(), inet:port_number()} | {local, binary()}} | {error, atom()}. +-callback sockname(socket()) + -> {ok, {inet:ip_address(), inet:port_number()} | {local, binary()}} | {error, atom()}. +-callback shutdown(socket(), read | write | read_write) + -> ok | {error, atom()}. +-callback close(socket()) -> ok. +-callback cleanup(ranch:transport_opts(any())) -> ok. + +%% A fallback for transports that don't have a native sendfile implementation. +%% Note that the ordering of arguments is different from file:sendfile/5 and +%% that this function accepts either a raw file or a file name. +-spec sendfile(module(), socket(), file:name_all() | file:fd(), + non_neg_integer(), non_neg_integer(), sendfile_opts()) + -> {ok, non_neg_integer()} | {error, atom()}. +sendfile(Transport, Socket, Filename, Offset, Bytes, Opts) + when is_list(Filename) orelse is_atom(Filename) + orelse is_binary(Filename) -> + ChunkSize = chunk_size(Opts), + case file:open(Filename, [read, raw, binary]) of + {ok, RawFile} -> + _ = case Offset of + 0 -> + ok; + _ -> + {ok, _} = file:position(RawFile, {bof, Offset}) + end, + try + sendfile_loop(Transport, Socket, RawFile, Bytes, 0, ChunkSize) + after + ok = file:close(RawFile) + end; + {error, _Reason} = Error -> + Error + end; +sendfile(Transport, Socket, RawFile, Offset, Bytes, Opts) -> + ChunkSize = chunk_size(Opts), + Initial2 = case file:position(RawFile, {cur, 0}) of + {ok, Offset} -> + Offset; + {ok, Initial} -> + {ok, _} = file:position(RawFile, {bof, Offset}), + Initial + end, + case sendfile_loop(Transport, Socket, RawFile, Bytes, 0, ChunkSize) of + {ok, _Sent} = Result -> + {ok, _} = file:position(RawFile, {bof, Initial2}), + Result; + {error, _Reason} = Error -> + Error + end. + +-spec chunk_size(sendfile_opts()) -> pos_integer(). +chunk_size(Opts) -> + case lists:keyfind(chunk_size, 1, Opts) of + {chunk_size, ChunkSize} + when is_integer(ChunkSize) andalso ChunkSize > 0 -> + ChunkSize; + {chunk_size, 0} -> + 16#1FFF; + false -> + 16#1FFF + end. + +-spec sendfile_loop(module(), socket(), file:fd(), non_neg_integer(), + non_neg_integer(), pos_integer()) + -> {ok, non_neg_integer()} | {error, any()}. +sendfile_loop(_Transport, _Socket, _RawFile, Sent, Sent, _ChunkSize) + when Sent =/= 0 -> + %% All requested data has been read and sent, return number of bytes sent. + {ok, Sent}; +sendfile_loop(Transport, Socket, RawFile, Bytes, Sent, ChunkSize) -> + ReadSize = read_size(Bytes, Sent, ChunkSize), + case file:read(RawFile, ReadSize) of + {ok, IoData} -> + case Transport:send(Socket, IoData) of + ok -> + Sent2 = iolist_size(IoData) + Sent, + sendfile_loop(Transport, Socket, RawFile, Bytes, Sent2, + ChunkSize); + {error, _Reason} = Error -> + Error + end; + eof -> + {ok, Sent}; + {error, _Reason} = Error -> + Error + end. + +-spec read_size(non_neg_integer(), non_neg_integer(), non_neg_integer()) -> + non_neg_integer(). +read_size(0, _Sent, ChunkSize) -> + ChunkSize; +read_size(Bytes, Sent, ChunkSize) -> + min(Bytes - Sent, ChunkSize). diff --git a/src/wsSrv/cowboy.erl b/src/wsSrv/cowboy.erl new file mode 100644 index 0000000..c4be25b --- /dev/null +++ b/src/wsSrv/cowboy.erl @@ -0,0 +1,105 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy). + +-export([start_clear/3]). +-export([start_tls/3]). +-export([stop_listener/1]). +-export([set_env/3]). + +%% Internal. +-export([log/2]). +-export([log/4]). + +-type opts() :: cowboy_http:opts() | cowboy_http2:opts(). +-export_type([opts/0]). + +-type fields() :: [atom() + | {atom(), cowboy_constraints:constraint() | [cowboy_constraints:constraint()]} + | {atom(), cowboy_constraints:constraint() | [cowboy_constraints:constraint()], any()}]. +-export_type([fields/0]). + +-type http_headers() :: #{binary() => iodata()}. +-export_type([http_headers/0]). + +-type http_status() :: non_neg_integer() | binary(). +-export_type([http_status/0]). + +-type http_version() :: 'HTTP/2' | 'HTTP/1.1' | 'HTTP/1.0'. +-export_type([http_version/0]). + +-spec start_clear(ranch:ref(), ranch:opts(), opts()) + -> {ok, pid()} | {error, any()}. +start_clear(Ref, TransOpts0, ProtoOpts0) -> + TransOpts1 = ranch:normalize_opts(TransOpts0), + {TransOpts, ConnectionType} = ensure_connection_type(TransOpts1), + ProtoOpts = ProtoOpts0#{connection_type => ConnectionType}, + ranch:start_listener(Ref, ranch_tcp, TransOpts, cowboy_clear, ProtoOpts). + +-spec start_tls(ranch:ref(), ranch:opts(), opts()) + -> {ok, pid()} | {error, any()}. +start_tls(Ref, TransOpts0, ProtoOpts0) -> + TransOpts1 = ranch:normalize_opts(TransOpts0), + SocketOpts = maps:get(socket_opts, TransOpts1, []), + TransOpts2 = TransOpts1#{socket_opts => [ + {next_protocols_advertised, [<<"h2">>, <<"http/1.1">>]}, + {alpn_preferred_protocols, [<<"h2">>, <<"http/1.1">>]} + |SocketOpts]}, + {TransOpts, ConnectionType} = ensure_connection_type(TransOpts2), + ProtoOpts = ProtoOpts0#{connection_type => ConnectionType}, + ranch:start_listener(Ref, ranch_ssl, TransOpts, cowboy_tls, ProtoOpts). + +ensure_connection_type(TransOpts=#{connection_type := ConnectionType}) -> + {TransOpts, ConnectionType}; +ensure_connection_type(TransOpts) -> + {TransOpts#{connection_type => supervisor}, supervisor}. + +-spec stop_listener(ranch:ref()) -> ok | {error, not_found}. +stop_listener(Ref) -> + ranch:stop_listener(Ref). + +-spec set_env(ranch:ref(), atom(), any()) -> ok. +set_env(Ref, Name, Value) -> + Opts = ranch:get_protocol_options(Ref), + Env = maps:get(env, Opts, #{}), + Opts2 = maps:put(env, maps:put(Name, Value, Env), Opts), + ok = ranch:set_protocol_options(Ref, Opts2). + +%% Internal. + +-spec log({log, logger:level(), io:format(), list()}, opts()) -> ok. +log({log, Level, Format, Args}, Opts) -> + log(Level, Format, Args, Opts). + +-spec log(logger:level(), io:format(), list(), opts()) -> ok. +log(Level, Format, Args, #{logger := Logger}) + when Logger =/= error_logger -> + _ = Logger:Level(Format, Args), + ok; +%% We use error_logger by default. Because error_logger does +%% not have all the levels we accept we have to do some +%% mapping to error_logger functions. +log(Level, Format, Args, _) -> + Function = case Level of + emergency -> error_msg; + alert -> error_msg; + critical -> error_msg; + error -> error_msg; + warning -> warning_msg; + notice -> warning_msg; + info -> info_msg; + debug -> info_msg + end, + error_logger:Function(Format, Args). diff --git a/src/wsSrv/cowboy_app.erl b/src/wsSrv/cowboy_app.erl new file mode 100644 index 0000000..74cba41 --- /dev/null +++ b/src/wsSrv/cowboy_app.erl @@ -0,0 +1,27 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +-spec start(_, _) -> {ok, pid()}. +start(_, _) -> + cowboy_sup:start_link(). + +-spec stop(_) -> ok. +stop(_) -> + ok. diff --git a/src/wsSrv/cowboy_bstr.erl b/src/wsSrv/cowboy_bstr.erl new file mode 100644 index 0000000..d8041e4 --- /dev/null +++ b/src/wsSrv/cowboy_bstr.erl @@ -0,0 +1,123 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_bstr). + +%% Binary strings. +-export([capitalize_token/1]). +-export([to_lower/1]). +-export([to_upper/1]). + +%% Characters. +-export([char_to_lower/1]). +-export([char_to_upper/1]). + +%% The first letter and all letters after a dash are capitalized. +%% This is the form seen for header names in the HTTP/1.1 RFC and +%% others. Note that using this form isn't required, as header names +%% are case insensitive, and it is only provided for use with eventual +%% badly implemented clients. +-spec capitalize_token(B) -> B when B::binary(). +capitalize_token(B) -> + capitalize_token(B, true, <<>>). +capitalize_token(<<>>, _, Acc) -> + Acc; +capitalize_token(<< $-, Rest/bits >>, _, Acc) -> + capitalize_token(Rest, true, << Acc/binary, $- >>); +capitalize_token(<< C, Rest/bits >>, true, Acc) -> + capitalize_token(Rest, false, << Acc/binary, (char_to_upper(C)) >>); +capitalize_token(<< C, Rest/bits >>, false, Acc) -> + capitalize_token(Rest, false, << Acc/binary, (char_to_lower(C)) >>). + +-spec to_lower(B) -> B when B::binary(). +to_lower(B) -> + << << (char_to_lower(C)) >> || << C >> <= B >>. + +-spec to_upper(B) -> B when B::binary(). +to_upper(B) -> + << << (char_to_upper(C)) >> || << C >> <= B >>. + +-spec char_to_lower(char()) -> char(). +char_to_lower($A) -> $a; +char_to_lower($B) -> $b; +char_to_lower($C) -> $c; +char_to_lower($D) -> $d; +char_to_lower($E) -> $e; +char_to_lower($F) -> $f; +char_to_lower($G) -> $g; +char_to_lower($H) -> $h; +char_to_lower($I) -> $i; +char_to_lower($J) -> $j; +char_to_lower($K) -> $k; +char_to_lower($L) -> $l; +char_to_lower($M) -> $m; +char_to_lower($N) -> $n; +char_to_lower($O) -> $o; +char_to_lower($P) -> $p; +char_to_lower($Q) -> $q; +char_to_lower($R) -> $r; +char_to_lower($S) -> $s; +char_to_lower($T) -> $t; +char_to_lower($U) -> $u; +char_to_lower($V) -> $v; +char_to_lower($W) -> $w; +char_to_lower($X) -> $x; +char_to_lower($Y) -> $y; +char_to_lower($Z) -> $z; +char_to_lower(Ch) -> Ch. + +-spec char_to_upper(char()) -> char(). +char_to_upper($a) -> $A; +char_to_upper($b) -> $B; +char_to_upper($c) -> $C; +char_to_upper($d) -> $D; +char_to_upper($e) -> $E; +char_to_upper($f) -> $F; +char_to_upper($g) -> $G; +char_to_upper($h) -> $H; +char_to_upper($i) -> $I; +char_to_upper($j) -> $J; +char_to_upper($k) -> $K; +char_to_upper($l) -> $L; +char_to_upper($m) -> $M; +char_to_upper($n) -> $N; +char_to_upper($o) -> $O; +char_to_upper($p) -> $P; +char_to_upper($q) -> $Q; +char_to_upper($r) -> $R; +char_to_upper($s) -> $S; +char_to_upper($t) -> $T; +char_to_upper($u) -> $U; +char_to_upper($v) -> $V; +char_to_upper($w) -> $W; +char_to_upper($x) -> $X; +char_to_upper($y) -> $Y; +char_to_upper($z) -> $Z; +char_to_upper(Ch) -> Ch. + +%% Tests. + +-ifdef(TEST). +capitalize_token_test_() -> + Tests = [ + {<<"heLLo-woRld">>, <<"Hello-World">>}, + {<<"Sec-Websocket-Version">>, <<"Sec-Websocket-Version">>}, + {<<"Sec-WebSocket-Version">>, <<"Sec-Websocket-Version">>}, + {<<"sec-websocket-version">>, <<"Sec-Websocket-Version">>}, + {<<"SEC-WEBSOCKET-VERSION">>, <<"Sec-Websocket-Version">>}, + {<<"Sec-WebSocket--Version">>, <<"Sec-Websocket--Version">>}, + {<<"Sec-WebSocket---Version">>, <<"Sec-Websocket---Version">>} + ], + [{H, fun() -> R = capitalize_token(H) end} || {H, R} <- Tests]. +-endif. diff --git a/src/wsSrv/cowboy_children.erl b/src/wsSrv/cowboy_children.erl new file mode 100644 index 0000000..05d39fb --- /dev/null +++ b/src/wsSrv/cowboy_children.erl @@ -0,0 +1,192 @@ +%% Copyright (c) 2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_children). + +-export([init/0]). +-export([up/4]). +-export([down/2]). +-export([shutdown/2]). +-export([shutdown_timeout/3]). +-export([terminate/1]). +-export([handle_supervisor_call/4]). + +-record(child, { + pid :: pid(), + streamid :: cowboy_stream:streamid() | undefined, + shutdown :: timeout(), + timer = undefined :: undefined | reference() +}). + +-type children() :: [#child{}]. +-export_type([children/0]). + +-spec init() -> []. +init() -> + []. + +-spec up(Children, pid(), cowboy_stream:streamid(), timeout()) + -> Children when Children::children(). +up(Children, Pid, StreamID, Shutdown) -> + [#child{ + pid=Pid, + streamid=StreamID, + shutdown=Shutdown + }|Children]. + +-spec down(Children, pid()) + -> {ok, cowboy_stream:streamid() | undefined, Children} | error + when Children::children(). +down(Children0, Pid) -> + case lists:keytake(Pid, #child.pid, Children0) of + {value, #child{streamid=StreamID, timer=Ref}, Children} -> + _ = case Ref of + undefined -> ok; + _ -> erlang:cancel_timer(Ref, [{async, true}, {info, false}]) + end, + {ok, StreamID, Children}; + false -> + error + end. + +%% We ask the processes to shutdown first. This gives +%% a chance to processes that are trapping exits to +%% shut down gracefully. Others will exit immediately. +%% +%% @todo We currently fire one timer per process being +%% shut down. This is probably not the most efficient. +%% A more efficient solution could be to maintain a +%% single timer and decrease the shutdown time of all +%% processes when it fires. This is however much more +%% complex, and there aren't that many processes that +%% will need to be shutdown through this function, so +%% this is left for later. +-spec shutdown(Children, cowboy_stream:streamid()) + -> Children when Children::children(). +shutdown(Children0, StreamID) -> + [ + case Child of + #child{pid=Pid, streamid=StreamID, shutdown=Shutdown} -> + exit(Pid, shutdown), + Ref = erlang:start_timer(Shutdown, self(), {shutdown, Pid}), + Child#child{streamid=undefined, timer=Ref}; + _ -> + Child + end + || Child <- Children0]. + +-spec shutdown_timeout(children(), reference(), pid()) -> ok. +shutdown_timeout(Children, Ref, Pid) -> + case lists:keyfind(Pid, #child.pid, Children) of + #child{timer=Ref} -> + exit(Pid, kill), + ok; + _ -> + ok + end. + +-spec terminate(children()) -> ok. +terminate(Children) -> + %% For each child, either ask for it to shut down, + %% or cancel its shutdown timer if it already is. + %% + %% We do not need to flush stray timeout messages out because + %% we are either terminating or switching protocols, + %% and in the latter case we flush all messages. + _ = [case TRef of + undefined -> exit(Pid, shutdown); + _ -> erlang:cancel_timer(TRef, [{async, true}, {info, false}]) + end || #child{pid=Pid, timer=TRef} <- Children], + before_terminate_loop(Children). + +before_terminate_loop([]) -> + ok; +before_terminate_loop(Children) -> + %% Find the longest shutdown time. + Time = longest_shutdown_time(Children, 0), + %% We delay the creation of the timer if one of the + %% processes has an infinity shutdown value. + TRef = case Time of + infinity -> undefined; + _ -> erlang:start_timer(Time, self(), terminate) + end, + %% Loop until that time or until all children are dead. + terminate_loop(Children, TRef). + +terminate_loop([], TRef) -> + %% Don't forget to cancel the timer, if any! + case TRef of + undefined -> + ok; + _ -> + _ = erlang:cancel_timer(TRef, [{async, true}, {info, false}]), + ok + end; +terminate_loop(Children, TRef) -> + receive + {'EXIT', Pid, _} when TRef =:= undefined -> + {value, #child{shutdown=Shutdown}, Children1} + = lists:keytake(Pid, #child.pid, Children), + %% We delayed the creation of the timer. If a process with + %% infinity shutdown just ended, we might have to start that timer. + case Shutdown of + infinity -> before_terminate_loop(Children1); + _ -> terminate_loop(Children1, TRef) + end; + {'EXIT', Pid, _} -> + terminate_loop(lists:keydelete(Pid, #child.pid, Children), TRef); + {timeout, TRef, terminate} -> + %% Brutally kill any remaining children. + _ = [exit(Pid, kill) || #child{pid=Pid} <- Children], + ok + end. + +longest_shutdown_time([], Time) -> + Time; +longest_shutdown_time([#child{shutdown=ChildTime}|Tail], Time) when ChildTime > Time -> + longest_shutdown_time(Tail, ChildTime); +longest_shutdown_time([_|Tail], Time) -> + longest_shutdown_time(Tail, Time). + +-spec handle_supervisor_call(any(), {pid(), any()}, children(), module()) -> ok. +handle_supervisor_call(which_children, {From, Tag}, Children, Module) -> + From ! {Tag, which_children(Children, Module)}, + ok; +handle_supervisor_call(count_children, {From, Tag}, Children, _) -> + From ! {Tag, count_children(Children)}, + ok; +%% We disable start_child since only incoming requests +%% end up creating a new process. +handle_supervisor_call({start_child, _}, {From, Tag}, _, _) -> + From ! {Tag, {error, start_child_disabled}}, + ok; +%% All other calls refer to children. We act in a similar way +%% to a simple_one_for_one so we never find those. +handle_supervisor_call(_, {From, Tag}, _, _) -> + From ! {Tag, {error, not_found}}, + ok. + +-spec which_children(children(), module()) -> [{module(), pid(), worker, [module()]}]. +which_children(Children, Module) -> + [{Module, Pid, worker, [Module]} || #child{pid=Pid} <- Children]. + +-spec count_children(children()) -> [{atom(), non_neg_integer()}]. +count_children(Children) -> + Count = length(Children), + [ + {specs, 1}, + {active, Count}, + {supervisors, 0}, + {workers, Count} + ]. diff --git a/src/wsSrv/cowboy_clear.erl b/src/wsSrv/cowboy_clear.erl new file mode 100644 index 0000000..4f3a234 --- /dev/null +++ b/src/wsSrv/cowboy_clear.erl @@ -0,0 +1,60 @@ +%% Copyright (c) 2016-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_clear). +-behavior(ranch_protocol). + +-export([start_link/3]). +-export([start_link/4]). +-export([connection_process/4]). + +%% Ranch 1. +-spec start_link(ranch:ref(), inet:socket(), module(), cowboy:opts()) -> {ok, pid()}. +start_link(Ref, _Socket, Transport, Opts) -> + start_link(Ref, Transport, Opts). + +%% Ranch 2. +-spec start_link(ranch:ref(), module(), cowboy:opts()) -> {ok, pid()}. +start_link(Ref, Transport, Opts) -> + Pid = proc_lib:spawn_link(?MODULE, connection_process, + [self(), Ref, Transport, Opts]), + {ok, Pid}. + +-spec connection_process(pid(), ranch:ref(), module(), cowboy:opts()) -> ok. +connection_process(Parent, Ref, Transport, Opts) -> + ProxyInfo = case maps:get(proxy_header, Opts, false) of + true -> + {ok, ProxyInfo0} = ranch:recv_proxy_header(Ref, 1000), + ProxyInfo0; + false -> + undefined + end, + {ok, Socket} = ranch:handshake(Ref), + %% Use cowboy_http2 directly only when 'http' is missing. + %% Otherwise switch to cowboy_http2 from cowboy_http. + %% + %% @todo Extend this option to cowboy_tls and allow disabling + %% the switch to cowboy_http2 in cowboy_http. Also document it. + Protocol = case maps:get(protocols, Opts, [http2, http]) of + [http2] -> cowboy_http2; + [_|_] -> cowboy_http + end, + init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, Protocol). + +init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, Protocol) -> + _ = case maps:get(connection_type, Opts, supervisor) of + worker -> ok; + supervisor -> process_flag(trap_exit, true) + end, + Protocol:init(Parent, Ref, Socket, Transport, ProxyInfo, Opts). diff --git a/src/wsSrv/cowboy_clock.erl b/src/wsSrv/cowboy_clock.erl new file mode 100644 index 0000000..28f8a1b --- /dev/null +++ b/src/wsSrv/cowboy_clock.erl @@ -0,0 +1,221 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% While a gen_server process runs in the background to update +%% the cache of formatted dates every second, all API calls are +%% local and directly read from the ETS cache table, providing +%% fast time and date computations. +-module(cowboy_clock). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). +-export([stop/0]). +-export([rfc1123/0]). +-export([rfc1123/1]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { + universaltime = undefined :: undefined | calendar:datetime(), + rfc1123 = <<>> :: binary(), + tref = undefined :: undefined | reference() +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +-spec stop() -> stopped. +stop() -> + gen_server:call(?MODULE, stop). + +%% When the ets table doesn't exist, either because of a bug +%% or because Cowboy is being restarted, we perform in a +%% slightly degraded state and build a new timestamp for +%% every request. +-spec rfc1123() -> binary(). +rfc1123() -> + try + ets:lookup_element(?MODULE, rfc1123, 2) + catch error:badarg -> + rfc1123(erlang:universaltime()) + end. + +-spec rfc1123(calendar:datetime()) -> binary(). +rfc1123(DateTime) -> + update_rfc1123(<<>>, undefined, DateTime). + +%% gen_server. + +-spec init([]) -> {ok, #state{}}. +init([]) -> + ?MODULE = ets:new(?MODULE, [set, protected, + named_table, {read_concurrency, true}]), + T = erlang:universaltime(), + B = update_rfc1123(<<>>, undefined, T), + TRef = erlang:send_after(1000, self(), update), + ets:insert(?MODULE, {rfc1123, B}), + {ok, #state{universaltime=T, rfc1123=B, tref=TRef}}. + +-type from() :: {pid(), term()}. +-spec handle_call + (stop, from(), State) -> {stop, normal, stopped, State} + when State::#state{}. +handle_call(stop, _From, State) -> + {stop, normal, stopped, State}; +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +-spec handle_cast(_, State) -> {noreply, State} when State::#state{}. +handle_cast(_Msg, State) -> + {noreply, State}. + +-spec handle_info(any(), State) -> {noreply, State} when State::#state{}. +handle_info(update, #state{universaltime=Prev, rfc1123=B1, tref=TRef0}) -> + %% Cancel the timer in case an external process sent an update message. + _ = erlang:cancel_timer(TRef0), + T = erlang:universaltime(), + B2 = update_rfc1123(B1, Prev, T), + ets:insert(?MODULE, {rfc1123, B2}), + TRef = erlang:send_after(1000, self(), update), + {noreply, #state{universaltime=T, rfc1123=B2, tref=TRef}}; +handle_info(_Info, State) -> + {noreply, State}. + +-spec terminate(_, _) -> ok. +terminate(_Reason, _State) -> + ok. + +-spec code_change(_, State, _) -> {ok, State} when State::#state{}. +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + +%% Internal. + +-spec update_rfc1123(binary(), undefined | calendar:datetime(), + calendar:datetime()) -> binary(). +update_rfc1123(Bin, Now, Now) -> + Bin; +update_rfc1123(<< Keep:23/binary, _/bits >>, + {Date, {H, M, _}}, {Date, {H, M, S}}) -> + << Keep/binary, (pad_int(S))/binary, " GMT" >>; +update_rfc1123(<< Keep:20/binary, _/bits >>, + {Date, {H, _, _}}, {Date, {H, M, S}}) -> + << Keep/binary, (pad_int(M))/binary, $:, (pad_int(S))/binary, " GMT" >>; +update_rfc1123(<< Keep:17/binary, _/bits >>, {Date, _}, {Date, {H, M, S}}) -> + << Keep/binary, (pad_int(H))/binary, $:, (pad_int(M))/binary, + $:, (pad_int(S))/binary, " GMT" >>; +update_rfc1123(<< _:7/binary, Keep:10/binary, _/bits >>, + {{Y, Mo, _}, _}, {Date = {Y, Mo, D}, {H, M, S}}) -> + Wday = calendar:day_of_the_week(Date), + << (weekday(Wday))/binary, ", ", (pad_int(D))/binary, Keep/binary, + (pad_int(H))/binary, $:, (pad_int(M))/binary, + $:, (pad_int(S))/binary, " GMT" >>; +update_rfc1123(<< _:11/binary, Keep:6/binary, _/bits >>, + {{Y, _, _}, _}, {Date = {Y, Mo, D}, {H, M, S}}) -> + Wday = calendar:day_of_the_week(Date), + << (weekday(Wday))/binary, ", ", (pad_int(D))/binary, " ", + (month(Mo))/binary, Keep/binary, + (pad_int(H))/binary, $:, (pad_int(M))/binary, + $:, (pad_int(S))/binary, " GMT" >>; +update_rfc1123(_, _, {Date = {Y, Mo, D}, {H, M, S}}) -> + Wday = calendar:day_of_the_week(Date), + << (weekday(Wday))/binary, ", ", (pad_int(D))/binary, " ", + (month(Mo))/binary, " ", (integer_to_binary(Y))/binary, + " ", (pad_int(H))/binary, $:, (pad_int(M))/binary, + $:, (pad_int(S))/binary, " GMT" >>. + +%% Following suggestion by MononcQc on #erlounge. +-spec pad_int(0..59) -> binary(). +pad_int(X) when X < 10 -> + << $0, ($0 + X) >>; +pad_int(X) -> + integer_to_binary(X). + +-spec weekday(1..7) -> <<_:24>>. +weekday(1) -> <<"Mon">>; +weekday(2) -> <<"Tue">>; +weekday(3) -> <<"Wed">>; +weekday(4) -> <<"Thu">>; +weekday(5) -> <<"Fri">>; +weekday(6) -> <<"Sat">>; +weekday(7) -> <<"Sun">>. + +-spec month(1..12) -> <<_:24>>. +month( 1) -> <<"Jan">>; +month( 2) -> <<"Feb">>; +month( 3) -> <<"Mar">>; +month( 4) -> <<"Apr">>; +month( 5) -> <<"May">>; +month( 6) -> <<"Jun">>; +month( 7) -> <<"Jul">>; +month( 8) -> <<"Aug">>; +month( 9) -> <<"Sep">>; +month(10) -> <<"Oct">>; +month(11) -> <<"Nov">>; +month(12) -> <<"Dec">>. + +%% Tests. + +-ifdef(TEST). +update_rfc1123_test_() -> + Tests = [ + {<<"Sat, 14 May 2011 14:25:33 GMT">>, undefined, + {{2011, 5, 14}, {14, 25, 33}}, <<>>}, + {<<"Sat, 14 May 2011 14:25:33 GMT">>, {{2011, 5, 14}, {14, 25, 33}}, + {{2011, 5, 14}, {14, 25, 33}}, <<"Sat, 14 May 2011 14:25:33 GMT">>}, + {<<"Sat, 14 May 2011 14:25:34 GMT">>, {{2011, 5, 14}, {14, 25, 33}}, + {{2011, 5, 14}, {14, 25, 34}}, <<"Sat, 14 May 2011 14:25:33 GMT">>}, + {<<"Sat, 14 May 2011 14:26:00 GMT">>, {{2011, 5, 14}, {14, 25, 59}}, + {{2011, 5, 14}, {14, 26, 0}}, <<"Sat, 14 May 2011 14:25:59 GMT">>}, + {<<"Sat, 14 May 2011 15:00:00 GMT">>, {{2011, 5, 14}, {14, 59, 59}}, + {{2011, 5, 14}, {15, 0, 0}}, <<"Sat, 14 May 2011 14:59:59 GMT">>}, + {<<"Sun, 15 May 2011 00:00:00 GMT">>, {{2011, 5, 14}, {23, 59, 59}}, + {{2011, 5, 15}, { 0, 0, 0}}, <<"Sat, 14 May 2011 23:59:59 GMT">>}, + {<<"Wed, 01 Jun 2011 00:00:00 GMT">>, {{2011, 5, 31}, {23, 59, 59}}, + {{2011, 6, 1}, { 0, 0, 0}}, <<"Tue, 31 May 2011 23:59:59 GMT">>}, + {<<"Sun, 01 Jan 2012 00:00:00 GMT">>, {{2011, 5, 31}, {23, 59, 59}}, + {{2012, 1, 1}, { 0, 0, 0}}, <<"Sat, 31 Dec 2011 23:59:59 GMT">>} + ], + [{R, fun() -> R = update_rfc1123(B, P, N) end} || {R, P, N, B} <- Tests]. + +pad_int_test_() -> + Tests = [ + { 0, <<"00">>}, { 1, <<"01">>}, { 2, <<"02">>}, { 3, <<"03">>}, + { 4, <<"04">>}, { 5, <<"05">>}, { 6, <<"06">>}, { 7, <<"07">>}, + { 8, <<"08">>}, { 9, <<"09">>}, {10, <<"10">>}, {11, <<"11">>}, + {12, <<"12">>}, {13, <<"13">>}, {14, <<"14">>}, {15, <<"15">>}, + {16, <<"16">>}, {17, <<"17">>}, {18, <<"18">>}, {19, <<"19">>}, + {20, <<"20">>}, {21, <<"21">>}, {22, <<"22">>}, {23, <<"23">>}, + {24, <<"24">>}, {25, <<"25">>}, {26, <<"26">>}, {27, <<"27">>}, + {28, <<"28">>}, {29, <<"29">>}, {30, <<"30">>}, {31, <<"31">>}, + {32, <<"32">>}, {33, <<"33">>}, {34, <<"34">>}, {35, <<"35">>}, + {36, <<"36">>}, {37, <<"37">>}, {38, <<"38">>}, {39, <<"39">>}, + {40, <<"40">>}, {41, <<"41">>}, {42, <<"42">>}, {43, <<"43">>}, + {44, <<"44">>}, {45, <<"45">>}, {46, <<"46">>}, {47, <<"47">>}, + {48, <<"48">>}, {49, <<"49">>}, {50, <<"50">>}, {51, <<"51">>}, + {52, <<"52">>}, {53, <<"53">>}, {54, <<"54">>}, {55, <<"55">>}, + {56, <<"56">>}, {57, <<"57">>}, {58, <<"58">>}, {59, <<"59">>} + ], + [{I, fun() -> O = pad_int(I) end} || {I, O} <- Tests]. +-endif. diff --git a/src/wsSrv/cowboy_compress_h.erl b/src/wsSrv/cowboy_compress_h.erl new file mode 100644 index 0000000..374cb6a --- /dev/null +++ b/src/wsSrv/cowboy_compress_h.erl @@ -0,0 +1,249 @@ +%% Copyright (c) 2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_compress_h). +-behavior(cowboy_stream). + +-export([init/3]). +-export([data/4]). +-export([info/3]). +-export([terminate/3]). +-export([early_error/5]). + +-record(state, { + next :: any(), + threshold :: non_neg_integer() | undefined, + compress = undefined :: undefined | gzip, + deflate = undefined :: undefined | zlib:zstream(), + deflate_flush = sync :: none | sync +}). + +-spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) + -> {cowboy_stream:commands(), #state{}}. +init(StreamID, Req, Opts) -> + State0 = check_req(Req), + CompressThreshold = maps:get(compress_threshold, Opts, 300), + DeflateFlush = buffering_to_zflush(maps:get(compress_buffering, Opts, false)), + {Commands0, Next} = cowboy_stream:init(StreamID, Req, Opts), + fold(Commands0, State0#state{next=Next, + threshold=CompressThreshold, + deflate_flush=DeflateFlush}). + +-spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +data(StreamID, IsFin, Data, State0=#state{next=Next0}) -> + {Commands0, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0), + fold(Commands0, State0#state{next=Next}). + +-spec info(cowboy_stream:streamid(), any(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +info(StreamID, Info, State0=#state{next=Next0}) -> + {Commands0, Next} = cowboy_stream:info(StreamID, Info, Next0), + fold(Commands0, State0#state{next=Next}). + +-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), #state{}) -> any(). +terminate(StreamID, Reason, #state{next=Next, deflate=Z}) -> + %% Clean the zlib:stream() in case something went wrong. + %% In the normal scenario the stream is already closed. + case Z of + undefined -> ok; + _ -> zlib:close(Z) + end, + cowboy_stream:terminate(StreamID, Reason, Next). + +-spec early_error(cowboy_stream:streamid(), cowboy_stream:reason(), + cowboy_stream:partial_req(), Resp, cowboy:opts()) -> Resp + when Resp::cowboy_stream:resp_command(). +early_error(StreamID, Reason, PartialReq, Resp, Opts) -> + cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts). + +%% Internal. + +%% Check if the client supports decoding of gzip responses. +%% +%% A malformed accept-encoding header is ignored (no compression). +check_req(Req) -> + try cowboy_req:parse_header(<<"accept-encoding">>, Req) of + %% Client doesn't support any compression algorithm. + undefined -> + #state{compress=undefined}; + Encodings -> + %% We only support gzip so look for it specifically. + %% @todo A recipient SHOULD consider "x-gzip" to be + %% equivalent to "gzip". (RFC7230 4.2.3) + case [E || E={<<"gzip">>, Q} <- Encodings, Q =/= 0] of + [] -> + #state{compress=undefined}; + _ -> + #state{compress=gzip} + end + catch + _:_ -> + #state{compress=undefined} + end. + +%% Do not compress responses that contain the content-encoding header. +check_resp_headers(#{<<"content-encoding">> := _}, State) -> + State#state{compress=undefined}; +check_resp_headers(_, State) -> + State. + +fold(Commands, State=#state{compress=undefined}) -> + {Commands, State}; +fold(Commands, State) -> + fold(Commands, State, []). + +fold([], State, Acc) -> + {lists:reverse(Acc), State}; +%% We do not compress full sendfile bodies. +fold([Response={response, _, _, {sendfile, _, _, _}}|Tail], State, Acc) -> + fold(Tail, State, [Response|Acc]); +%% We compress full responses directly, unless they are lower than +%% the configured threshold or we find we are not able to by looking at the headers. +fold([Response0={response, _, Headers, Body}|Tail], + State0=#state{threshold=CompressThreshold}, Acc) -> + case check_resp_headers(Headers, State0) of + State=#state{compress=undefined} -> + fold(Tail, State, [Response0|Acc]); + State1 -> + BodyLength = iolist_size(Body), + if + BodyLength =< CompressThreshold -> + fold(Tail, State1, [Response0|Acc]); + true -> + {Response, State} = gzip_response(Response0, State1), + fold(Tail, State, [Response|Acc]) + end + end; +%% Check headers and initiate compression... +fold([Response0={headers, _, Headers}|Tail], State0, Acc) -> + case check_resp_headers(Headers, State0) of + State=#state{compress=undefined} -> + fold(Tail, State, [Response0|Acc]); + State1 -> + {Response, State} = gzip_headers(Response0, State1), + fold(Tail, State, [Response|Acc]) + end; +%% then compress each data commands individually. +fold([Data0={data, _, _}|Tail], State0=#state{compress=gzip}, Acc) -> + {Data, State} = gzip_data(Data0, State0), + fold(Tail, State, [Data|Acc]); +%% When trailers are sent we need to end the compression. +%% This results in an extra data command being sent. +fold([Trailers={trailers, _}|Tail], State0=#state{compress=gzip}, Acc) -> + {{data, fin, Data}, State} = gzip_data({data, fin, <<>>}, State0), + fold(Tail, State, [Trailers, {data, nofin, Data}|Acc]); +%% All the options from this handler can be updated for the current stream. +%% The set_options command must be propagated as-is regardless. +fold([SetOptions={set_options, Opts}|Tail], State=#state{ + threshold=CompressThreshold0, deflate_flush=DeflateFlush0}, Acc) -> + CompressThreshold = maps:get(compress_threshold, Opts, CompressThreshold0), + DeflateFlush = case Opts of + #{compress_buffering := CompressBuffering} -> + buffering_to_zflush(CompressBuffering); + _ -> + DeflateFlush0 + end, + fold(Tail, State#state{threshold=CompressThreshold, deflate_flush=DeflateFlush}, + [SetOptions|Acc]); +%% Otherwise, we have an unrelated command or compression is disabled. +fold([Command|Tail], State, Acc) -> + fold(Tail, State, [Command|Acc]). + +buffering_to_zflush(true) -> none; +buffering_to_zflush(false) -> sync. + +gzip_response({response, Status, Headers, Body}, State) -> + %% We can't call zlib:gzip/1 because it does an + %% iolist_to_binary(GzBody) at the end to return + %% a binary(). Therefore the code here is largely + %% a duplicate of the code of that function. + Z = zlib:open(), + GzBody = try + %% 31 = 16+?MAX_WBITS from zlib.erl + %% @todo It might be good to allow them to be configured? + zlib:deflateInit(Z, default, deflated, 31, 8, default), + Gz = zlib:deflate(Z, Body, finish), + zlib:deflateEnd(Z), + Gz + after + zlib:close(Z) + end, + {{response, Status, vary(Headers#{ + <<"content-length">> => integer_to_binary(iolist_size(GzBody)), + <<"content-encoding">> => <<"gzip">> + }), GzBody}, State}. + +gzip_headers({headers, Status, Headers0}, State) -> + Z = zlib:open(), + %% We use the same arguments as when compressing the body fully. + %% @todo It might be good to allow them to be configured? + zlib:deflateInit(Z, default, deflated, 31, 8, default), + Headers = maps:remove(<<"content-length">>, Headers0), + {{headers, Status, vary(Headers#{ + <<"content-encoding">> => <<"gzip">> + })}, State#state{deflate=Z}}. + +%% We must add content-encoding to vary if it's not already there. +vary(Headers=#{<<"vary">> := Vary}) -> + try cow_http_hd:parse_vary(iolist_to_binary(Vary)) of + '*' -> Headers; + List -> + case lists:member(<<"accept-encoding">>, List) of + true -> Headers; + false -> Headers#{<<"vary">> => [Vary, <<", accept-encoding">>]} + end + catch _:_ -> + %% The vary header is invalid. Probably empty. We replace it with ours. + Headers#{<<"vary">> => <<"accept-encoding">>} + end; +vary(Headers) -> + Headers#{<<"vary">> => <<"accept-encoding">>}. + +%% It is not possible to combine zlib and the sendfile +%% syscall as far as I can tell, because the zlib format +%% includes a checksum at the end of the stream. We have +%% to read the file in memory, making this not suitable for +%% large files. +gzip_data({data, nofin, Sendfile={sendfile, _, _, _}}, + State=#state{deflate=Z, deflate_flush=Flush}) -> + {ok, Data0} = read_file(Sendfile), + Data = zlib:deflate(Z, Data0, Flush), + {{data, nofin, Data}, State}; +gzip_data({data, fin, Sendfile={sendfile, _, _, _}}, State=#state{deflate=Z}) -> + {ok, Data0} = read_file(Sendfile), + Data = zlib:deflate(Z, Data0, finish), + zlib:deflateEnd(Z), + zlib:close(Z), + {{data, fin, Data}, State#state{deflate=undefined}}; +gzip_data({data, nofin, Data0}, State=#state{deflate=Z, deflate_flush=Flush}) -> + Data = zlib:deflate(Z, Data0, Flush), + {{data, nofin, Data}, State}; +gzip_data({data, fin, Data0}, State=#state{deflate=Z}) -> + Data = zlib:deflate(Z, Data0, finish), + zlib:deflateEnd(Z), + zlib:close(Z), + {{data, fin, Data}, State#state{deflate=undefined}}. + +read_file({sendfile, Offset, Bytes, Path}) -> + {ok, IoDevice} = file:open(Path, [read, raw, binary]), + try + _ = case Offset of + 0 -> ok; + _ -> file:position(IoDevice, {bof, Offset}) + end, + file:read(IoDevice, Bytes) + after + file:close(IoDevice) + end. diff --git a/src/wsSrv/cowboy_constraints.erl b/src/wsSrv/cowboy_constraints.erl new file mode 100644 index 0000000..6509c4b --- /dev/null +++ b/src/wsSrv/cowboy_constraints.erl @@ -0,0 +1,174 @@ +%% Copyright (c) 2014-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_constraints). + +-export([validate/2]). +-export([reverse/2]). +-export([format_error/1]). + +-type constraint() :: int | nonempty | fun(). +-export_type([constraint/0]). + +-type reason() :: {constraint(), any(), any()}. +-export_type([reason/0]). + +-spec validate(binary(), constraint() | [constraint()]) + -> {ok, any()} | {error, reason()}. +validate(Value, Constraints) when is_list(Constraints) -> + apply_list(forward, Value, Constraints); +validate(Value, Constraint) -> + apply_list(forward, Value, [Constraint]). + +-spec reverse(any(), constraint() | [constraint()]) + -> {ok, binary()} | {error, reason()}. +reverse(Value, Constraints) when is_list(Constraints) -> + apply_list(reverse, Value, Constraints); +reverse(Value, Constraint) -> + apply_list(reverse, Value, [Constraint]). + +-spec format_error(reason()) -> iodata(). +format_error({Constraint, Reason, Value}) -> + apply_constraint(format_error, {Reason, Value}, Constraint). + +apply_list(_, Value, []) -> + {ok, Value}; +apply_list(Type, Value0, [Constraint|Tail]) -> + case apply_constraint(Type, Value0, Constraint) of + {ok, Value} -> + apply_list(Type, Value, Tail); + {error, Reason} -> + {error, {Constraint, Reason, Value0}} + end. + +%% @todo {int, From, To}, etc. +apply_constraint(Type, Value, int) -> + int(Type, Value); +apply_constraint(Type, Value, nonempty) -> + nonempty(Type, Value); +apply_constraint(Type, Value, F) when is_function(F) -> + F(Type, Value). + +%% Constraint functions. + +int(forward, Value) -> + try + {ok, binary_to_integer(Value)} + catch _:_ -> + {error, not_an_integer} + end; +int(reverse, Value) -> + try + {ok, integer_to_binary(Value)} + catch _:_ -> + {error, not_an_integer} + end; +int(format_error, {not_an_integer, Value}) -> + io_lib:format("The value ~p is not an integer.", [Value]). + +nonempty(Type, <<>>) when Type =/= format_error -> + {error, empty}; +nonempty(Type, Value) when Type =/= format_error, is_binary(Value) -> + {ok, Value}; +nonempty(format_error, {empty, Value}) -> + io_lib:format("The value ~p is empty.", [Value]). + +-ifdef(TEST). + +validate_test() -> + F = fun(_, Value) -> + try + {ok, binary_to_atom(Value, latin1)} + catch _:_ -> + {error, not_a_binary} + end + end, + %% Value, Constraints, Result. + Tests = [ + {<<>>, [], <<>>}, + {<<"123">>, int, 123}, + {<<"123">>, [int], 123}, + {<<"123">>, [nonempty, int], 123}, + {<<"123">>, [int, nonempty], 123}, + {<<>>, nonempty, error}, + {<<>>, [nonempty], error}, + {<<"hello">>, F, hello}, + {<<"hello">>, [F], hello}, + {<<"123">>, [F, int], error}, + {<<"123">>, [int, F], error}, + {<<"hello">>, [nonempty, F], hello}, + {<<"hello">>, [F, nonempty], hello} + ], + [{lists:flatten(io_lib:format("~p, ~p", [V, C])), fun() -> + case R of + error -> {error, _} = validate(V, C); + _ -> {ok, R} = validate(V, C) + end + end} || {V, C, R} <- Tests]. + +reverse_test() -> + F = fun(_, Value) -> + try + {ok, atom_to_binary(Value, latin1)} + catch _:_ -> + {error, not_an_atom} + end + end, + %% Value, Constraints, Result. + Tests = [ + {<<>>, [], <<>>}, + {123, int, <<"123">>}, + {123, [int], <<"123">>}, + {123, [nonempty, int], <<"123">>}, + {123, [int, nonempty], <<"123">>}, + {<<>>, nonempty, error}, + {<<>>, [nonempty], error}, + {hello, F, <<"hello">>}, + {hello, [F], <<"hello">>}, + {123, [F, int], error}, + {123, [int, F], error}, + {hello, [nonempty, F], <<"hello">>}, + {hello, [F, nonempty], <<"hello">>} + ], + [{lists:flatten(io_lib:format("~p, ~p", [V, C])), fun() -> + case R of + error -> {error, _} = reverse(V, C); + _ -> {ok, R} = reverse(V, C) + end + end} || {V, C, R} <- Tests]. + +int_format_error_test() -> + {error, Reason} = validate(<<"string">>, int), + Bin = iolist_to_binary(format_error(Reason)), + true = is_binary(Bin), + ok. + +nonempty_format_error_test() -> + {error, Reason} = validate(<<>>, nonempty), + Bin = iolist_to_binary(format_error(Reason)), + true = is_binary(Bin), + ok. + +fun_format_error_test() -> + F = fun + (format_error, {test, <<"value">>}) -> + formatted; + (_, _) -> + {error, test} + end, + {error, Reason} = validate(<<"value">>, F), + formatted = format_error(Reason), + ok. + +-endif. diff --git a/src/wsSrv/cowboy_handler.erl b/src/wsSrv/cowboy_handler.erl new file mode 100644 index 0000000..c0f7ff7 --- /dev/null +++ b/src/wsSrv/cowboy_handler.erl @@ -0,0 +1,57 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Handler middleware. +%% +%% Execute the handler given by the handler and handler_opts +%% environment values. The result of this execution is added to the +%% environment under the result value. +-module(cowboy_handler). +-behaviour(cowboy_middleware). + +-export([execute/2]). +-export([terminate/4]). + +-callback init(Req, any()) + -> {ok | module(), Req, any()} + | {module(), Req, any(), any()} + when Req::cowboy_req:req(). + +-callback terminate(any(), map(), any()) -> ok. +-optional_callbacks([terminate/3]). + +-spec execute(Req, Env) -> {ok, Req, Env} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +execute(Req, Env=#{handler := Handler, handler_opts := HandlerOpts}) -> + try Handler:init(Req, HandlerOpts) of + {ok, Req2, State} -> + Result = terminate(normal, Req2, State, Handler), + {ok, Req2, Env#{result => Result}}; + {Mod, Req2, State} -> + Mod:upgrade(Req2, Env, Handler, State); + {Mod, Req2, State, Opts} -> + Mod:upgrade(Req2, Env, Handler, State, Opts) + catch Class:Reason:Stacktrace -> + terminate({crash, Class, Reason}, Req, HandlerOpts, Handler), + erlang:raise(Class, Reason, Stacktrace) + end. + +-spec terminate(any(), Req | undefined, any(), module()) -> ok when Req::cowboy_req:req(). +terminate(Reason, Req, State, Handler) -> + case erlang:function_exported(Handler, terminate, 3) of + true -> + Handler:terminate(Reason, Req, State); + false -> + ok + end. diff --git a/src/wsSrv/cowboy_http.erl b/src/wsSrv/cowboy_http.erl new file mode 100644 index 0000000..89f9858 --- /dev/null +++ b/src/wsSrv/cowboy_http.erl @@ -0,0 +1,1523 @@ +%% Copyright (c) 2016-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_http). + +-export([init/6]). + +-export([system_continue/3]). +-export([system_terminate/4]). +-export([system_code_change/4]). + +-type opts() :: #{ + active_n => pos_integer(), + chunked => boolean(), + compress_buffering => boolean(), + compress_threshold => non_neg_integer(), + connection_type => worker | supervisor, + env => cowboy_middleware:env(), + http10_keepalive => boolean(), + idle_timeout => timeout(), + inactivity_timeout => timeout(), + initial_stream_flow_size => non_neg_integer(), + linger_timeout => timeout(), + logger => module(), + max_authority_length => non_neg_integer(), + max_empty_lines => non_neg_integer(), + max_header_name_length => non_neg_integer(), + max_header_value_length => non_neg_integer(), + max_headers => non_neg_integer(), + max_keepalive => non_neg_integer(), + max_method_length => non_neg_integer(), + max_request_line_length => non_neg_integer(), + metrics_callback => cowboy_metrics_h:metrics_callback(), + metrics_req_filter => fun((cowboy_req:req()) -> map()), + metrics_resp_headers_filter => fun((cowboy:http_headers()) -> cowboy:http_headers()), + middlewares => [module()], + proxy_header => boolean(), + request_timeout => timeout(), + sendfile => boolean(), + shutdown_timeout => timeout(), + stream_handlers => [module()], + tracer_callback => cowboy_tracer_h:tracer_callback(), + tracer_flags => [atom()], + tracer_match_specs => cowboy_tracer_h:tracer_match_specs(), + %% Open ended because configured stream handlers might add options. + _ => _ +}. +-export_type([opts/0]). + +-record(ps_request_line, { + empty_lines = 0 :: non_neg_integer() +}). + +-record(ps_header, { + method = undefined :: binary(), + authority = undefined :: binary() | undefined, + path = undefined :: binary(), + qs = undefined :: binary(), + version = undefined :: cowboy:http_version(), + headers = undefined :: cowboy:http_headers() | undefined, + name = undefined :: binary() | undefined +}). + +-record(ps_body, { + length :: non_neg_integer() | undefined, + received = 0 :: non_neg_integer(), + transfer_decode_fun :: fun((binary(), cow_http_te:state()) -> cow_http_te:decode_ret()), + transfer_decode_state :: cow_http_te:state() +}). + +-record(stream, { + id = undefined :: cowboy_stream:streamid(), + %% Stream handlers and their state. + state = undefined :: {module(), any()}, + %% Request method. + method = undefined :: binary(), + %% Client HTTP version for this stream. + version = undefined :: cowboy:http_version(), + %% Unparsed te header. Used to know if we can send trailers. + te :: undefined | binary(), + %% Expected body size. + local_expected_size = undefined :: undefined | non_neg_integer(), + %% Sent body size. + local_sent_size = 0 :: non_neg_integer(), + %% Commands queued. + queue = [] :: cowboy_stream:commands() +}). + +-type stream() :: #stream{}. + +-record(state, { + parent :: pid(), + ref :: ranch:ref(), + socket :: inet:socket(), + transport :: module(), + proxy_header :: undefined | ranch_proxy_header:proxy_info(), + opts = #{} :: cowboy:opts(), + buffer = <<>> :: binary(), + + %% Some options may be overriden for the current stream. + overriden_opts = #{} :: cowboy:opts(), + + %% Remote address and port for the connection. + peer = undefined :: {inet:ip_address(), inet:port_number()}, + + %% Local address and port for the connection. + sock = undefined :: {inet:ip_address(), inet:port_number()}, + + %% Client certificate (TLS only). + cert :: undefined | binary(), + + timer = undefined :: undefined | reference(), + + %% Whether we are currently receiving data from the socket. + active = true :: boolean(), + + %% Identifier for the stream currently being read (or waiting to be received). + in_streamid = 1 :: pos_integer(), + + %% Parsing state for the current stream or stream-to-be. + in_state = #ps_request_line{} :: #ps_request_line{} | #ps_header{} | #ps_body{}, + + %% Flow requested for the current stream. + flow = infinity :: non_neg_integer() | infinity, + + %% Identifier for the stream currently being written. + %% Note that out_streamid =< in_streamid. + out_streamid = 1 :: pos_integer(), + + %% Whether we finished writing data for the current stream. + out_state = wait :: wait | chunked | streaming | done, + + %% The connection will be closed after this stream. + last_streamid = undefined :: pos_integer(), + + %% Currently active HTTP/1.1 streams. + streams = [] :: [stream()], + + %% Children processes created by streams. + children = cowboy_children:init() :: cowboy_children:children() +}). + +-include_lib("cow_inline.hrl"). +-include_lib("cow_parse.hrl"). + +-spec init(pid(), ranch:ref(), inet:socket(), module(), + ranch_proxy_header:proxy_info(), cowboy:opts()) -> ok. +init(Parent, Ref, Socket, Transport, ProxyHeader, Opts) -> + Peer0 = Transport:peername(Socket), + Sock0 = Transport:sockname(Socket), + Cert1 = case Transport:name() of + ssl -> + case ssl:peercert(Socket) of + {error, no_peercert} -> + {ok, undefined}; + Cert0 -> + Cert0 + end; + _ -> + {ok, undefined} + end, + case {Peer0, Sock0, Cert1} of + {{ok, Peer}, {ok, Sock}, {ok, Cert}} -> + State = #state{ + parent=Parent, ref=Ref, socket=Socket, + transport=Transport, proxy_header=ProxyHeader, opts=Opts, + peer=Peer, sock=Sock, cert=Cert, + last_streamid=maps:get(max_keepalive, Opts, 1000)}, + setopts_active(State), + loop(set_timeout(State, request_timeout)); + {{error, Reason}, _, _} -> + terminate(undefined, {socket_error, Reason, + 'A socket error occurred when retrieving the peer name.'}); + {_, {error, Reason}, _} -> + terminate(undefined, {socket_error, Reason, + 'A socket error occurred when retrieving the sock name.'}); + {_, _, {error, Reason}} -> + terminate(undefined, {socket_error, Reason, + 'A socket error occurred when retrieving the client TLS certificate.'}) + end. + +setopts_active(#state{socket=Socket, transport=Transport, opts=Opts}) -> + N = maps:get(active_n, Opts, 100), + Transport:setopts(Socket, [{active, N}]). + +active(State) -> + setopts_active(State), + State#state{active=true}. + +passive(State=#state{socket=Socket, transport=Transport}) -> + Transport:setopts(Socket, [{active, false}]), + Messages = Transport:messages(), + flush_passive(Socket, Messages), + State#state{active=false}. + +flush_passive(Socket, Messages) -> + receive + {Passive, Socket} when Passive =:= element(4, Messages); + %% Hardcoded for compatibility with Ranch 1.x. + Passive =:= tcp_passive; Passive =:= ssl_passive -> + flush_passive(Socket, Messages) + after 0 -> + ok + end. + +loop(State=#state{parent=Parent, socket=Socket, transport=Transport, opts=Opts, + buffer=Buffer, timer=TimerRef, children=Children, in_streamid=InStreamID, + last_streamid=LastStreamID}) -> + Messages = Transport:messages(), + InactivityTimeout = maps:get(inactivity_timeout, Opts, 300000), + receive + %% Discard data coming in after the last request + %% we want to process was received fully. + {OK, Socket, _} when OK =:= element(1, Messages), InStreamID > LastStreamID -> + loop(State); + %% Socket messages. + {OK, Socket, Data} when OK =:= element(1, Messages) -> + parse(<< Buffer/binary, Data/binary >>, State); + {Closed, Socket} when Closed =:= element(2, Messages) -> + terminate(State, {socket_error, closed, 'The socket has been closed.'}); + {Error, Socket, Reason} when Error =:= element(3, Messages) -> + terminate(State, {socket_error, Reason, 'An error has occurred on the socket.'}); + {Passive, Socket} when Passive =:= element(4, Messages); + %% Hardcoded for compatibility with Ranch 1.x. + Passive =:= tcp_passive; Passive =:= ssl_passive -> + setopts_active(State), + loop(State); + %% Timeouts. + {timeout, Ref, {shutdown, Pid}} -> + cowboy_children:shutdown_timeout(Children, Ref, Pid), + loop(State); + {timeout, TimerRef, Reason} -> + timeout(State, Reason); + {timeout, _, _} -> + loop(State); + %% System messages. + {'EXIT', Parent, shutdown} -> + Reason = {stop, {exit, shutdown}, 'Parent process requested shutdown.'}, + loop(initiate_closing(State, Reason)); + {'EXIT', Parent, Reason} -> + terminate(State, {stop, {exit, Reason}, 'Parent process terminated.'}); + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State); + %% Messages pertaining to a stream. + {{Pid, StreamID}, Msg} when Pid =:= self() -> + loop(info(State, StreamID, Msg)); + %% Exit signal from children. + Msg = {'EXIT', Pid, _} -> + loop(down(State, Pid, Msg)); + %% Calls from supervisor module. + {'$gen_call', From, Call} -> + cowboy_children:handle_supervisor_call(Call, From, Children, ?MODULE), + loop(State); + %% Unknown messages. + Msg -> + cowboy:log(warning, "Received stray message ~p.~n", [Msg], Opts), + loop(State) + after InactivityTimeout -> + terminate(State, {internal_error, timeout, 'No message or data received before timeout.'}) + end. + +%% We do not set request_timeout if there are active streams. +set_timeout(State=#state{streams=[_|_]}, request_timeout) -> + State; +%% We do not set request_timeout if we are skipping a body. +set_timeout(State=#state{in_state=#ps_body{}}, request_timeout) -> + State; +%% We do not set idle_timeout if there are no active streams, +%% unless when we are skipping a body. +set_timeout(State=#state{streams=[], in_state=InState}, idle_timeout) + when element(1, InState) =/= ps_body -> + State; +%% Otherwise we can set the timeout. +set_timeout(State0=#state{opts=Opts, overriden_opts=Override}, Name) -> + State = cancel_timeout(State0), + Default = case Name of + request_timeout -> 5000; + idle_timeout -> 60000 + end, + Timeout = case Override of + %% The timeout may have been overriden for the current stream. + #{Name := Timeout0} -> Timeout0; + _ -> maps:get(Name, Opts, Default) + end, + TimerRef = case Timeout of + infinity -> undefined; + Timeout -> erlang:start_timer(Timeout, self(), Name) + end, + State#state{timer=TimerRef}. + +cancel_timeout(State=#state{timer=TimerRef}) -> + ok = case TimerRef of + undefined -> + ok; + _ -> + %% Do a synchronous cancel and remove the message if any + %% to avoid receiving stray messages. + _ = erlang:cancel_timer(TimerRef), + receive + {timeout, TimerRef, _} -> ok + after 0 -> + ok + end + end, + State#state{timer=undefined}. + +-spec timeout(_, _) -> no_return(). +timeout(State=#state{in_state=#ps_request_line{}}, request_timeout) -> + terminate(State, {connection_error, timeout, + 'No request-line received before timeout.'}); +timeout(State=#state{in_state=#ps_header{}}, request_timeout) -> + error_terminate(408, State, {connection_error, timeout, + 'Request headers not received before timeout.'}); +timeout(State, idle_timeout) -> + terminate(State, {connection_error, timeout, + 'Connection idle longer than configuration allows.'}). + +parse(<<>>, State) -> + loop(State#state{buffer= <<>>}); +%% Do not process requests that come in after the last request +%% and discard the buffer if any to save memory. +parse(_, State=#state{in_streamid=InStreamID, in_state=#ps_request_line{}, + last_streamid=LastStreamID}) when InStreamID > LastStreamID -> + loop(State#state{buffer= <<>>}); +parse(Buffer, State=#state{in_state=#ps_request_line{empty_lines=EmptyLines}}) -> + after_parse(parse_request(Buffer, State, EmptyLines)); +parse(Buffer, State=#state{in_state=PS=#ps_header{headers=Headers, name=undefined}}) -> + after_parse(parse_header(Buffer, + State#state{in_state=PS#ps_header{headers=undefined}}, + Headers)); +parse(Buffer, State=#state{in_state=PS=#ps_header{headers=Headers, name=Name}}) -> + after_parse(parse_hd_before_value(Buffer, + State#state{in_state=PS#ps_header{headers=undefined, name=undefined}}, + Headers, Name)); +parse(Buffer, State=#state{in_state=#ps_body{}}) -> + after_parse(parse_body(Buffer, State)). + +after_parse({request, Req=#{streamid := StreamID, method := Method, + headers := Headers, version := Version}, + State0=#state{opts=Opts, buffer=Buffer, streams=Streams0}}) -> + try cowboy_stream:init(StreamID, Req, Opts) of + {Commands, StreamState} -> + Flow = maps:get(initial_stream_flow_size, Opts, 65535), + TE = maps:get(<<"te">>, Headers, undefined), + Streams = [#stream{id=StreamID, state=StreamState, + method=Method, version=Version, te=TE}|Streams0], + State1 = case maybe_req_close(State0, Headers, Version) of + close -> State0#state{streams=Streams, last_streamid=StreamID, flow=Flow}; + keepalive -> State0#state{streams=Streams, flow=Flow} + end, + State = set_timeout(State1, idle_timeout), + parse(Buffer, commands(State, StreamID, Commands)) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(init, + [StreamID, Req, Opts], + Class, Exception, Stacktrace), Opts), + early_error(500, State0, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:init/3.'}, Req), + parse(Buffer, State0) + end; +%% Streams are sequential so the body is always about the last stream created +%% unless that stream has terminated. +after_parse({data, StreamID, IsFin, Data, State0=#state{opts=Opts, buffer=Buffer, + streams=Streams0=[Stream=#stream{id=StreamID, state=StreamState0}|_]}}) -> + try cowboy_stream:data(StreamID, IsFin, Data, StreamState0) of + {Commands, StreamState} -> + Streams = lists:keyreplace(StreamID, #stream.id, Streams0, + Stream#stream{state=StreamState}), + State1 = set_timeout(State0, case IsFin of + fin -> request_timeout; + nofin -> idle_timeout + end), + State = update_flow(IsFin, Data, State1#state{streams=Streams}), + parse(Buffer, commands(State, StreamID, Commands)) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(data, + [StreamID, IsFin, Data, StreamState0], + Class, Exception, Stacktrace), Opts), + %% @todo Should call parse after this. + stream_terminate(State0, StreamID, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:data/4.'}) + end; +%% No corresponding stream. We must skip the body of the previous request +%% in order to process the next one. +after_parse({data, _, IsFin, _, State}) -> + loop(set_timeout(State, case IsFin of + fin -> request_timeout; + nofin -> idle_timeout + end)); +after_parse({more, State}) -> + loop(set_timeout(State, idle_timeout)). + +update_flow(fin, _, State) -> + %% This function is only called after parsing, therefore we + %% are expecting to be in active mode already. + State#state{flow=infinity}; +update_flow(nofin, Data, State0=#state{flow=Flow0}) -> + Flow = Flow0 - byte_size(Data), + State = State0#state{flow=Flow}, + if + Flow0 > 0, Flow =< 0 -> + passive(State); + true -> + State + end. + +%% Request-line. + +-spec parse_request(Buffer, State, non_neg_integer()) + -> {request, cowboy_req:req(), State} + | {data, cowboy_stream:streamid(), cowboy_stream:fin(), binary(), State} + | {more, State} + when Buffer::binary(), State::#state{}. +%% Empty lines must be using \r\n. +parse_request(<< $\n, _/bits >>, State, _) -> + error_terminate(400, State, {connection_error, protocol_error, + 'Empty lines between requests must use the CRLF line terminator. (RFC7230 3.5)'}); +parse_request(<< $\s, _/bits >>, State, _) -> + error_terminate(400, State, {connection_error, protocol_error, + 'The request-line must not begin with a space. (RFC7230 3.1.1, RFC7230 3.5)'}); +%% We limit the length of the Request-line to MaxLength to avoid endlessly +%% reading from the socket and eventually crashing. +parse_request(Buffer, State=#state{opts=Opts, in_streamid=InStreamID}, EmptyLines) -> + MaxLength = maps:get(max_request_line_length, Opts, 8000), + MaxEmptyLines = maps:get(max_empty_lines, Opts, 5), + case match_eol(Buffer, 0) of + nomatch when byte_size(Buffer) > MaxLength -> + error_terminate(414, State, {connection_error, limit_reached, + 'The request-line length is larger than configuration allows. (RFC7230 3.1.1)'}); + nomatch -> + {more, State#state{buffer=Buffer, in_state=#ps_request_line{empty_lines=EmptyLines}}}; + 1 when EmptyLines =:= MaxEmptyLines -> + error_terminate(400, State, {connection_error, limit_reached, + 'More empty lines were received than configuration allows. (RFC7230 3.5)'}); + 1 -> + << _:16, Rest/bits >> = Buffer, + parse_request(Rest, State, EmptyLines + 1); + _ -> + case Buffer of + %% @todo * is only for server-wide OPTIONS request (RFC7230 5.3.4); tests + << "OPTIONS * ", Rest/bits >> -> + parse_version(Rest, State, <<"OPTIONS">>, undefined, <<"*">>, <<>>); + <<"CONNECT ", _/bits>> -> + error_terminate(501, State, {connection_error, no_error, + 'The CONNECT method is currently not implemented. (RFC7231 4.3.6)'}); + <<"TRACE ", _/bits>> -> + error_terminate(501, State, {connection_error, no_error, + 'The TRACE method is currently not implemented. (RFC7231 4.3.8)'}); + %% Accept direct HTTP/2 only at the beginning of the connection. + << "PRI * HTTP/2.0\r\n", _/bits >> when InStreamID =:= 1 -> + %% @todo Might be worth throwing to get a clean stacktrace. + http2_upgrade(State, Buffer); + _ -> + parse_method(Buffer, State, <<>>, + maps:get(max_method_length, Opts, 32)) + end + end. + +match_eol(<< $\n, _/bits >>, N) -> + N; +match_eol(<< _, Rest/bits >>, N) -> + match_eol(Rest, N + 1); +match_eol(_, _) -> + nomatch. + +parse_method(_, State, _, 0) -> + error_terminate(501, State, {connection_error, limit_reached, + 'The method name is longer than configuration allows. (RFC7230 3.1.1)'}); +parse_method(<< C, Rest/bits >>, State, SoFar, Remaining) -> + case C of + $\r -> error_terminate(400, State, {connection_error, protocol_error, + 'The method name must not be followed with a line break. (RFC7230 3.1.1)'}); + $\s -> parse_uri(Rest, State, SoFar); + _ when ?IS_TOKEN(C) -> parse_method(Rest, State, << SoFar/binary, C >>, Remaining - 1); + _ -> error_terminate(400, State, {connection_error, protocol_error, + 'The method name must contain only valid token characters. (RFC7230 3.1.1)'}) + end. + +parse_uri(<< H, T, T, P, "://", Rest/bits >>, State, Method) + when H =:= $h orelse H =:= $H, T =:= $t orelse T =:= $T; + P =:= $p orelse P =:= $P -> + parse_uri_authority(Rest, State, Method); +parse_uri(<< H, T, T, P, S, "://", Rest/bits >>, State, Method) + when H =:= $h orelse H =:= $H, T =:= $t orelse T =:= $T; + P =:= $p orelse P =:= $P; S =:= $s orelse S =:= $S -> + parse_uri_authority(Rest, State, Method); +parse_uri(<< $/, Rest/bits >>, State, Method) -> + parse_uri_path(Rest, State, Method, undefined, <<$/>>); +parse_uri(_, State, _) -> + error_terminate(400, State, {connection_error, protocol_error, + 'Invalid request-line or request-target. (RFC7230 3.1.1, RFC7230 5.3)'}). + +%% @todo We probably want to apply max_authority_length also +%% to the host header and to document this option. It might +%% also be useful for HTTP/2 requests. +parse_uri_authority(Rest, State=#state{opts=Opts}, Method) -> + parse_uri_authority(Rest, State, Method, <<>>, + maps:get(max_authority_length, Opts, 255)). + +parse_uri_authority(_, State, _, _, 0) -> + error_terminate(414, State, {connection_error, limit_reached, + 'The authority component of the absolute URI is longer than configuration allows. (RFC7230 2.7.1)'}); +parse_uri_authority(<>, State, Method, SoFar, Remaining) -> + case C of + $\r -> + error_terminate(400, State, {connection_error, protocol_error, + 'The request-target must not be followed by a line break. (RFC7230 3.1.1)'}); + $@ -> + error_terminate(400, State, {connection_error, protocol_error, + 'Absolute URIs must not include a userinfo component. (RFC7230 2.7.1)'}); + C when SoFar =:= <<>> andalso + ((C =:= $/) orelse (C =:= $\s) orelse (C =:= $?) orelse (C =:= $#)) -> + error_terminate(400, State, {connection_error, protocol_error, + 'Absolute URIs must include a non-empty host component. (RFC7230 2.7.1)'}); + $: when SoFar =:= <<>> -> + error_terminate(400, State, {connection_error, protocol_error, + 'Absolute URIs must include a non-empty host component. (RFC7230 2.7.1)'}); + $/ -> parse_uri_path(Rest, State, Method, SoFar, <<"/">>); + $\s -> parse_version(Rest, State, Method, SoFar, <<"/">>, <<>>); + $? -> parse_uri_query(Rest, State, Method, SoFar, <<"/">>, <<>>); + $# -> skip_uri_fragment(Rest, State, Method, SoFar, <<"/">>, <<>>); + C -> parse_uri_authority(Rest, State, Method, <>, Remaining - 1) + end. + +parse_uri_path(<>, State, Method, Authority, SoFar) -> + case C of + $\r -> error_terminate(400, State, {connection_error, protocol_error, + 'The request-target must not be followed by a line break. (RFC7230 3.1.1)'}); + $\s -> parse_version(Rest, State, Method, Authority, SoFar, <<>>); + $? -> parse_uri_query(Rest, State, Method, Authority, SoFar, <<>>); + $# -> skip_uri_fragment(Rest, State, Method, Authority, SoFar, <<>>); + _ -> parse_uri_path(Rest, State, Method, Authority, <>) + end. + +parse_uri_query(<>, State, M, A, P, SoFar) -> + case C of + $\r -> error_terminate(400, State, {connection_error, protocol_error, + 'The request-target must not be followed by a line break. (RFC7230 3.1.1)'}); + $\s -> parse_version(Rest, State, M, A, P, SoFar); + $# -> skip_uri_fragment(Rest, State, M, A, P, SoFar); + _ -> parse_uri_query(Rest, State, M, A, P, <>) + end. + +skip_uri_fragment(<>, State, M, A, P, Q) -> + case C of + $\r -> error_terminate(400, State, {connection_error, protocol_error, + 'The request-target must not be followed by a line break. (RFC7230 3.1.1)'}); + $\s -> parse_version(Rest, State, M, A, P, Q); + _ -> skip_uri_fragment(Rest, State, M, A, P, Q) + end. + +parse_version(<< "HTTP/1.1\r\n", Rest/bits >>, State, M, A, P, Q) -> + before_parse_headers(Rest, State, M, A, P, Q, 'HTTP/1.1'); +parse_version(<< "HTTP/1.0\r\n", Rest/bits >>, State, M, A, P, Q) -> + before_parse_headers(Rest, State, M, A, P, Q, 'HTTP/1.0'); +parse_version(<< "HTTP/1.", _, C, _/bits >>, State, _, _, _, _) when C =:= $\s; C =:= $\t -> + error_terminate(400, State, {connection_error, protocol_error, + 'Whitespace is not allowed after the HTTP version. (RFC7230 3.1.1)'}); +parse_version(<< C, _/bits >>, State, _, _, _, _) when C =:= $\s; C =:= $\t -> + error_terminate(400, State, {connection_error, protocol_error, + 'The separator between request target and version must be a single SP. (RFC7230 3.1.1)'}); +parse_version(_, State, _, _, _, _) -> + error_terminate(505, State, {connection_error, protocol_error, + 'Unsupported HTTP version. (RFC7230 2.6)'}). + +before_parse_headers(Rest, State, M, A, P, Q, V) -> + parse_header(Rest, State#state{in_state=#ps_header{ + method=M, authority=A, path=P, qs=Q, version=V}}, #{}). + +%% Headers. + +%% We need two or more bytes in the buffer to continue. +parse_header(Rest, State=#state{in_state=PS}, Headers) when byte_size(Rest) < 2 -> + {more, State#state{buffer=Rest, in_state=PS#ps_header{headers=Headers}}}; +parse_header(<< $\r, $\n, Rest/bits >>, S, Headers) -> + request(Rest, S, Headers); +parse_header(Buffer, State=#state{opts=Opts, in_state=PS}, Headers) -> + MaxHeaders = maps:get(max_headers, Opts, 100), + NumHeaders = maps:size(Headers), + if + NumHeaders >= MaxHeaders -> + error_terminate(431, State#state{in_state=PS#ps_header{headers=Headers}}, + {connection_error, limit_reached, + 'The number of headers is larger than configuration allows. (RFC7230 3.2.5, RFC6585 5)'}); + true -> + parse_header_colon(Buffer, State, Headers) + end. + +parse_header_colon(Buffer, State=#state{opts=Opts, in_state=PS}, Headers) -> + MaxLength = maps:get(max_header_name_length, Opts, 64), + case match_colon(Buffer, 0) of + nomatch when byte_size(Buffer) > MaxLength -> + error_terminate(431, State#state{in_state=PS#ps_header{headers=Headers}}, + {connection_error, limit_reached, + 'A header name is larger than configuration allows. (RFC7230 3.2.5, RFC6585 5)'}); + nomatch -> + %% We don't have a colon but we might have an invalid header line, + %% so check if we have an LF and abort with an error if we do. + case match_eol(Buffer, 0) of + nomatch -> + {more, State#state{buffer=Buffer, in_state=PS#ps_header{headers=Headers}}}; + _ -> + error_terminate(400, State#state{in_state=PS#ps_header{headers=Headers}}, + {connection_error, protocol_error, + 'A header line is missing a colon separator. (RFC7230 3.2.4)'}) + end; + _ -> + parse_hd_name(Buffer, State, Headers, <<>>) + end. + +match_colon(<< $:, _/bits >>, N) -> + N; +match_colon(<< _, Rest/bits >>, N) -> + match_colon(Rest, N + 1); +match_colon(_, _) -> + nomatch. + +parse_hd_name(<< $:, Rest/bits >>, State, H, SoFar) -> + parse_hd_before_value(Rest, State, H, SoFar); +parse_hd_name(<< C, _/bits >>, State=#state{in_state=PS}, H, <<>>) when ?IS_WS(C) -> + error_terminate(400, State#state{in_state=PS#ps_header{headers=H}}, + {connection_error, protocol_error, + 'Whitespace is not allowed before the header name. (RFC7230 3.2)'}); +parse_hd_name(<< C, _/bits >>, State=#state{in_state=PS}, H, _) when ?IS_WS(C) -> + error_terminate(400, State#state{in_state=PS#ps_header{headers=H}}, + {connection_error, protocol_error, + 'Whitespace is not allowed between the header name and the colon. (RFC7230 3.2.4)'}); +parse_hd_name(<< C, Rest/bits >>, State, H, SoFar) -> + ?LOWER(parse_hd_name, Rest, State, H, SoFar). + +parse_hd_before_value(<< $\s, Rest/bits >>, S, H, N) -> + parse_hd_before_value(Rest, S, H, N); +parse_hd_before_value(<< $\t, Rest/bits >>, S, H, N) -> + parse_hd_before_value(Rest, S, H, N); +parse_hd_before_value(Buffer, State=#state{opts=Opts, in_state=PS}, H, N) -> + MaxLength = maps:get(max_header_value_length, Opts, 4096), + case match_eol(Buffer, 0) of + nomatch when byte_size(Buffer) > MaxLength -> + error_terminate(431, State#state{in_state=PS#ps_header{headers=H}}, + {connection_error, limit_reached, + 'A header value is larger than configuration allows. (RFC7230 3.2.5, RFC6585 5)'}); + nomatch -> + {more, State#state{buffer=Buffer, in_state=PS#ps_header{headers=H, name=N}}}; + _ -> + parse_hd_value(Buffer, State, H, N, <<>>) + end. + +parse_hd_value(<< $\r, $\n, Rest/bits >>, S, Headers0, Name, SoFar) -> + Value = clean_value_ws_end(SoFar, byte_size(SoFar) - 1), + Headers = case maps:get(Name, Headers0, undefined) of + undefined -> Headers0#{Name => Value}; + %% The cookie header does not use proper HTTP header lists. + Value0 when Name =:= <<"cookie">> -> Headers0#{Name => << Value0/binary, "; ", Value/binary >>}; + Value0 -> Headers0#{Name => << Value0/binary, ", ", Value/binary >>} + end, + parse_header(Rest, S, Headers); +parse_hd_value(<< C, Rest/bits >>, S, H, N, SoFar) -> + parse_hd_value(Rest, S, H, N, << SoFar/binary, C >>). + +clean_value_ws_end(_, -1) -> + <<>>; +clean_value_ws_end(Value, N) -> + case binary:at(Value, N) of + $\s -> clean_value_ws_end(Value, N - 1); + $\t -> clean_value_ws_end(Value, N - 1); + _ -> + S = N + 1, + << Value2:S/binary, _/bits >> = Value, + Value2 + end. + +-ifdef(TEST). +clean_value_ws_end_test_() -> + Tests = [ + {<<>>, <<>>}, + {<<" ">>, <<>>}, + {<<"text/*;q=0.3, text/html;q=0.7, text/html;level=1, " + "text/html;level=2;q=0.4, */*;q=0.5 \t \t ">>, + <<"text/*;q=0.3, text/html;q=0.7, text/html;level=1, " + "text/html;level=2;q=0.4, */*;q=0.5">>} + ], + [{V, fun() -> R = clean_value_ws_end(V, byte_size(V) - 1) end} || {V, R} <- Tests]. + +horse_clean_value_ws_end() -> + horse:repeat(200000, + clean_value_ws_end( + <<"text/*;q=0.3, text/html;q=0.7, text/html;level=1, " + "text/html;level=2;q=0.4, */*;q=0.5 ">>, + byte_size(<<"text/*;q=0.3, text/html;q=0.7, text/html;level=1, " + "text/html;level=2;q=0.4, */*;q=0.5 ">>) - 1) + ). +-endif. + +request(Buffer, State=#state{transport=Transport, + in_state=PS=#ps_header{authority=Authority, version=Version}}, Headers) -> + case maps:get(<<"host">>, Headers, undefined) of + undefined when Version =:= 'HTTP/1.1' -> + %% @todo Might want to not close the connection on this and next one. + error_terminate(400, State#state{in_state=PS#ps_header{headers=Headers}}, + {stream_error, protocol_error, + 'HTTP/1.1 requests must include a host header. (RFC7230 5.4)'}); + undefined -> + request(Buffer, State, Headers, <<>>, default_port(Transport:secure())); + %% @todo When CONNECT requests come in we need to ignore the RawHost + %% and instead use the Authority as the source of host. + RawHost when Authority =:= undefined; Authority =:= RawHost -> + request_parse_host(Buffer, State, Headers, RawHost); + %% RFC7230 does not explicitly ask us to reject requests + %% that have a different authority component and host header. + %% However it DOES ask clients to set them to the same value, + %% so we enforce that. + _ -> + error_terminate(400, State#state{in_state=PS#ps_header{headers=Headers}}, + {stream_error, protocol_error, + 'The host header is different than the absolute-form authority component. (RFC7230 5.4)'}) + end. + +request_parse_host(Buffer, State=#state{transport=Transport, in_state=PS}, Headers, RawHost) -> + try cow_http_hd:parse_host(RawHost) of + {Host, undefined} -> + request(Buffer, State, Headers, Host, default_port(Transport:secure())); + {Host, Port} when Port > 0, Port =< 65535 -> + request(Buffer, State, Headers, Host, Port); + _ -> + error_terminate(400, State, {stream_error, protocol_error, + 'The port component of the absolute-form is not in the range 0..65535. (RFC7230 2.7.1)'}) + catch _:_ -> + error_terminate(400, State#state{in_state=PS#ps_header{headers=Headers}}, + {stream_error, protocol_error, + 'The host header is invalid. (RFC7230 5.4)'}) + end. + +-spec default_port(boolean()) -> 80 | 443. +default_port(true) -> 443; +default_port(_) -> 80. + +%% End of request parsing. + +request(Buffer, State0=#state{ref=Ref, transport=Transport, peer=Peer, sock=Sock, cert=Cert, + proxy_header=ProxyHeader, in_streamid=StreamID, in_state= + PS=#ps_header{method=Method, path=Path, qs=Qs, version=Version}}, + Headers0, Host, Port) -> + Scheme = case Transport:secure() of + true -> <<"https">>; + false -> <<"http">> + end, + {Headers, HasBody, BodyLength, TDecodeFun, TDecodeState} = case Headers0 of + #{<<"transfer-encoding">> := TransferEncoding0} -> + try cow_http_hd:parse_transfer_encoding(TransferEncoding0) of + [<<"chunked">>] -> + {maps:remove(<<"content-length">>, Headers0), + true, undefined, fun cow_http_te:stream_chunked/2, {0, 0}}; + _ -> + error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers0}}, + {stream_error, protocol_error, + 'Cowboy only supports transfer-encoding: chunked. (RFC7230 3.3.1)'}) + catch _:_ -> + error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers0}}, + {stream_error, protocol_error, + 'The transfer-encoding header is invalid. (RFC7230 3.3.1)'}) + end; + #{<<"content-length">> := <<"0">>} -> + {Headers0, false, 0, undefined, undefined}; + #{<<"content-length">> := BinLength} -> + Length = try + cow_http_hd:parse_content_length(BinLength) + catch _:_ -> + error_terminate(400, State0#state{in_state=PS#ps_header{headers=Headers0}}, + {stream_error, protocol_error, + 'The content-length header is invalid. (RFC7230 3.3.2)'}) + end, + {Headers0, true, Length, fun cow_http_te:stream_identity/2, {0, Length}}; + _ -> + {Headers0, false, 0, undefined, undefined} + end, + Req0 = #{ + ref => Ref, + pid => self(), + streamid => StreamID, + peer => Peer, + sock => Sock, + cert => Cert, + method => Method, + scheme => Scheme, + host => Host, + port => Port, + path => Path, + qs => Qs, + version => Version, + %% We are transparently taking care of transfer-encodings so + %% the user code has no need to know about it. + headers => maps:remove(<<"transfer-encoding">>, Headers), + has_body => HasBody, + body_length => BodyLength + }, + %% We add the PROXY header information if any. + Req = case ProxyHeader of + undefined -> Req0; + _ -> Req0#{proxy_header => ProxyHeader} + end, + case is_http2_upgrade(Headers, Version) of + false -> + State = case HasBody of + true -> + State0#state{in_state=#ps_body{ + length = BodyLength, + transfer_decode_fun = TDecodeFun, + transfer_decode_state = TDecodeState + }}; + false -> + State0#state{in_streamid=StreamID + 1, in_state=#ps_request_line{}} + end, + {request, Req, State#state{buffer=Buffer}}; + {true, HTTP2Settings} -> + %% We save the headers in case the upgrade will fail + %% and we need to pass them to cowboy_stream:early_error. + http2_upgrade(State0#state{in_state=PS#ps_header{headers=Headers}}, + Buffer, HTTP2Settings, Req) + end. + +%% HTTP/2 upgrade. + +%% @todo We must not upgrade to h2c over a TLS connection. +is_http2_upgrade(#{<<"connection">> := Conn, <<"upgrade">> := Upgrade, + <<"http2-settings">> := HTTP2Settings}, 'HTTP/1.1') -> + Conns = cow_http_hd:parse_connection(Conn), + case {lists:member(<<"upgrade">>, Conns), lists:member(<<"http2-settings">>, Conns)} of + {true, true} -> + Protocols = cow_http_hd:parse_upgrade(Upgrade), + case lists:member(<<"h2c">>, Protocols) of + true -> + {true, HTTP2Settings}; + false -> + false + end; + _ -> + false + end; +is_http2_upgrade(_, _) -> + false. + +%% Prior knowledge upgrade, without an HTTP/1.1 request. +http2_upgrade(State=#state{parent=Parent, ref=Ref, socket=Socket, transport=Transport, + proxy_header=ProxyHeader, opts=Opts, peer=Peer, sock=Sock, cert=Cert}, Buffer) -> + case Transport:secure() of + false -> + _ = cancel_timeout(State), + cowboy_http2:init(Parent, Ref, Socket, Transport, + ProxyHeader, Opts, Peer, Sock, Cert, Buffer); + true -> + error_terminate(400, State, {connection_error, protocol_error, + 'Clients that support HTTP/2 over TLS MUST use ALPN. (RFC7540 3.4)'}) + end. + +%% Upgrade via an HTTP/1.1 request. +http2_upgrade(State=#state{parent=Parent, ref=Ref, socket=Socket, transport=Transport, + proxy_header=ProxyHeader, opts=Opts, peer=Peer, sock=Sock, cert=Cert}, + Buffer, HTTP2Settings, Req) -> + %% @todo + %% However if the client sent a body, we need to read the body in full + %% and if we can't do that, return a 413 response. Some options are in order. + %% Always half-closed stream coming from this side. + try cow_http_hd:parse_http2_settings(HTTP2Settings) of + Settings -> + _ = cancel_timeout(State), + cowboy_http2:init(Parent, Ref, Socket, Transport, + ProxyHeader, Opts, Peer, Sock, Cert, Buffer, Settings, Req) + catch _:_ -> + error_terminate(400, State, {connection_error, protocol_error, + 'The HTTP2-Settings header must contain a base64 SETTINGS payload. (RFC7540 3.2, RFC7540 3.2.1)'}) + end. + +%% Request body parsing. + +parse_body(Buffer, State=#state{in_streamid=StreamID, in_state= + PS=#ps_body{received=Received, transfer_decode_fun=TDecode, + transfer_decode_state=TState0}}) -> + %% @todo Proper trailers. + try TDecode(Buffer, TState0) of + more -> + {more, State#state{buffer=Buffer}}; + {more, Data, TState} -> + {data, StreamID, nofin, Data, State#state{buffer= <<>>, + in_state=PS#ps_body{received=Received + byte_size(Data), + transfer_decode_state=TState}}}; + {more, Data, _Length, TState} when is_integer(_Length) -> + {data, StreamID, nofin, Data, State#state{buffer= <<>>, + in_state=PS#ps_body{received=Received + byte_size(Data), + transfer_decode_state=TState}}}; + {more, Data, Rest, TState} -> + {data, StreamID, nofin, Data, State#state{buffer=Rest, + in_state=PS#ps_body{received=Received + byte_size(Data), + transfer_decode_state=TState}}}; + {done, _HasTrailers, Rest} -> + {data, StreamID, fin, <<>>, + State#state{buffer=Rest, in_streamid=StreamID + 1, in_state=#ps_request_line{}}}; + {done, Data, _HasTrailers, Rest} -> + {data, StreamID, fin, Data, + State#state{buffer=Rest, in_streamid=StreamID + 1, in_state=#ps_request_line{}}} + catch _:_ -> + Reason = {connection_error, protocol_error, + 'Failure to decode the content. (RFC7230 4)'}, + terminate(stream_terminate(State, StreamID, Reason), Reason) + end. + +%% Message handling. + +down(State=#state{opts=Opts, children=Children0}, Pid, Msg) -> + case cowboy_children:down(Children0, Pid) of + %% The stream was terminated already. + {ok, undefined, Children} -> + State#state{children=Children}; + %% The stream is still running. + {ok, StreamID, Children} -> + info(State#state{children=Children}, StreamID, Msg); + %% The process was unknown. + error -> + cowboy:log(warning, "Received EXIT signal ~p for unknown process ~p.~n", + [Msg, Pid], Opts), + State + end. + +info(State=#state{opts=Opts, streams=Streams0}, StreamID, Msg) -> + case lists:keyfind(StreamID, #stream.id, Streams0) of + Stream = #stream{state=StreamState0} -> + try cowboy_stream:info(StreamID, Msg, StreamState0) of + {Commands, StreamState} -> + Streams = lists:keyreplace(StreamID, #stream.id, Streams0, + Stream#stream{state=StreamState}), + commands(State#state{streams=Streams}, StreamID, Commands) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(info, + [StreamID, Msg, StreamState0], + Class, Exception, Stacktrace), Opts), + stream_terminate(State, StreamID, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:info/3.'}) + end; + false -> + cowboy:log(warning, "Received message ~p for unknown stream ~p.~n", + [Msg, StreamID], Opts), + State + end. + +%% Commands. + +commands(State, _, []) -> + State; +%% Supervise a child process. +commands(State=#state{children=Children}, StreamID, [{spawn, Pid, Shutdown}|Tail]) -> + commands(State#state{children=cowboy_children:up(Children, Pid, StreamID, Shutdown)}, + StreamID, Tail); +%% Error handling. +commands(State, StreamID, [Error = {internal_error, _, _}|Tail]) -> + commands(stream_terminate(State, StreamID, Error), StreamID, Tail); +%% Commands for a stream currently inactive. +commands(State=#state{out_streamid=Current, streams=Streams0}, StreamID, Commands) + when Current =/= StreamID -> + + %% @todo We still want to handle some commands... + + Stream = #stream{queue=Queue} = lists:keyfind(StreamID, #stream.id, Streams0), + Streams = lists:keyreplace(StreamID, #stream.id, Streams0, + Stream#stream{queue=Queue ++ Commands}), + State#state{streams=Streams}; +%% When we have finished reading the request body, do nothing. +commands(State=#state{flow=infinity}, StreamID, [{flow, _}|Tail]) -> + commands(State, StreamID, Tail); +%% Read the request body. +commands(State0=#state{flow=Flow0}, StreamID, [{flow, Size}|Tail]) -> + %% We must read *at least* Size of data otherwise functions + %% like cowboy_req:read_body/1,2 will wait indefinitely. + Flow = if + Flow0 < 0 -> Size; + true -> Flow0 + Size + end, + %% Reenable active mode if necessary. + State = if + Flow0 =< 0, Flow > 0 -> + active(State0); + true -> + State0 + end, + commands(State#state{flow=Flow}, StreamID, Tail); +%% Error responses are sent only if a response wasn't sent already. +commands(State=#state{out_state=wait, out_streamid=StreamID}, StreamID, + [{error_response, Status, Headers0, Body}|Tail]) -> + %% We close the connection when the error response is 408, as it + %% indicates a timeout and the RFC recommends that we stop here. (RFC7231 6.5.7) + Headers = case Status of + 408 -> Headers0#{<<"connection">> => <<"close">>}; + <<"408", _/bits>> -> Headers0#{<<"connection">> => <<"close">>}; + _ -> Headers0 + end, + commands(State, StreamID, [{response, Status, Headers, Body}|Tail]); +commands(State, StreamID, [{error_response, _, _, _}|Tail]) -> + commands(State, StreamID, Tail); +%% Send an informational response. +commands(State=#state{socket=Socket, transport=Transport, out_state=wait, streams=Streams}, + StreamID, [{inform, StatusCode, Headers}|Tail]) -> + %% @todo I'm pretty sure the last stream in the list is the one we want + %% considering all others are queued. + #stream{version=Version} = lists:keyfind(StreamID, #stream.id, Streams), + _ = case Version of + 'HTTP/1.1' -> + Transport:send(Socket, cow_http:response(StatusCode, 'HTTP/1.1', + headers_to_list(Headers))); + %% Do not send informational responses to HTTP/1.0 clients. (RFC7231 6.2) + 'HTTP/1.0' -> + ok + end, + commands(State, StreamID, Tail); +%% Send a full response. +%% +%% @todo Kill the stream if it sent a response when one has already been sent. +%% @todo Keep IsFin in the state. +%% @todo Same two things above apply to DATA, possibly promise too. +commands(State0=#state{socket=Socket, transport=Transport, out_state=wait, streams=Streams}, StreamID, + [{response, StatusCode, Headers0, Body}|Tail]) -> + %% @todo I'm pretty sure the last stream in the list is the one we want + %% considering all others are queued. + #stream{version=Version} = lists:keyfind(StreamID, #stream.id, Streams), + {State1, Headers} = connection(State0, Headers0, StreamID, Version), + State = State1#state{out_state=done}, + %% @todo Ensure content-length is set. 204 must never have content-length set. + Response = cow_http:response(StatusCode, 'HTTP/1.1', headers_to_list(Headers)), + %% @todo 204 and 304 responses must not include a response body. (RFC7230 3.3.1, RFC7230 3.3.2) + case Body of + {sendfile, _, _, _} -> + Transport:send(Socket, Response), + sendfile(State, Body); + _ -> + Transport:send(Socket, [Response, Body]) + end, + commands(State, StreamID, Tail); +%% Send response headers and initiate chunked encoding or streaming. +commands(State0=#state{socket=Socket, transport=Transport, + opts=Opts, overriden_opts=Override, streams=Streams0, out_state=OutState}, + StreamID, [{headers, StatusCode, Headers0}|Tail]) -> + %% @todo Same as above (about the last stream in the list). + Stream = #stream{version=Version} = lists:keyfind(StreamID, #stream.id, Streams0), + Status = cow_http:status_to_integer(StatusCode), + ContentLength = maps:get(<<"content-length">>, Headers0, undefined), + %% Chunked transfer-encoding can be disabled on a per-request basis. + Chunked = case Override of + #{chunked := Chunked0} -> Chunked0; + _ -> maps:get(chunked, Opts, true) + end, + {State1, Headers1} = case {Status, ContentLength, Version} of + {204, _, 'HTTP/1.1'} -> + {State0#state{out_state=done}, Headers0}; + {304, _, 'HTTP/1.1'} -> + {State0#state{out_state=done}, Headers0}; + {_, undefined, 'HTTP/1.1'} when Chunked -> + {State0#state{out_state=chunked}, Headers0#{<<"transfer-encoding">> => <<"chunked">>}}; + %% Close the connection after streaming without content-length + %% to all HTTP/1.0 clients and to HTTP/1.1 clients when chunked is disabled. + {_, undefined, _} -> + {State0#state{out_state=streaming, last_streamid=StreamID}, Headers0}; + %% Stream the response body without chunked transfer-encoding. + _ -> + ExpectedSize = cow_http_hd:parse_content_length(ContentLength), + Streams = lists:keyreplace(StreamID, #stream.id, Streams0, + Stream#stream{local_expected_size=ExpectedSize}), + {State0#state{out_state=streaming, streams=Streams}, Headers0} + end, + Headers2 = case stream_te(OutState, Stream) of + trailers -> Headers1; + _ -> maps:remove(<<"trailer">>, Headers1) + end, + {State, Headers} = connection(State1, Headers2, StreamID, Version), + Transport:send(Socket, cow_http:response(StatusCode, 'HTTP/1.1', headers_to_list(Headers))), + commands(State, StreamID, Tail); +%% Send a response body chunk. +%% @todo We need to kill the stream if it tries to send data before headers. +commands(State0=#state{socket=Socket, transport=Transport, streams=Streams0, out_state=OutState}, + StreamID, [{data, IsFin, Data}|Tail]) -> + %% Do not send anything when the user asks to send an empty + %% data frame, as that would break the protocol. + Size = case Data of + {sendfile, _, B, _} -> B; + _ -> iolist_size(Data) + end, + %% Depending on the current state we may need to send nothing, + %% the last chunk, chunked data with/without the last chunk, + %% or just the data as-is. + Stream = case lists:keyfind(StreamID, #stream.id, Streams0) of + Stream0=#stream{method= <<"HEAD">>} -> + Stream0; + Stream0 when Size =:= 0, IsFin =:= fin, OutState =:= chunked -> + Transport:send(Socket, <<"0\r\n\r\n">>), + Stream0; + Stream0 when Size =:= 0 -> + Stream0; + Stream0 when is_tuple(Data), OutState =:= chunked -> + Transport:send(Socket, [integer_to_binary(Size, 16), <<"\r\n">>]), + sendfile(State0, Data), + Transport:send(Socket, + case IsFin of + fin -> <<"\r\n0\r\n\r\n">>; + nofin -> <<"\r\n">> + end), + Stream0; + Stream0 when OutState =:= chunked -> + Transport:send(Socket, [ + integer_to_binary(Size, 16), <<"\r\n">>, Data, + case IsFin of + fin -> <<"\r\n0\r\n\r\n">>; + nofin -> <<"\r\n">> + end + ]), + Stream0; + Stream0 when OutState =:= streaming -> + #stream{local_sent_size=SentSize0, local_expected_size=ExpectedSize} = Stream0, + SentSize = SentSize0 + Size, + if + %% ExpectedSize may be undefined, which is > any integer value. + SentSize > ExpectedSize -> + terminate(State0, response_body_too_large); + is_tuple(Data) -> + sendfile(State0, Data); + true -> + Transport:send(Socket, Data) + end, + Stream0#stream{local_sent_size=SentSize} + end, + State = case IsFin of + fin -> State0#state{out_state=done}; + nofin -> State0 + end, + Streams = lists:keyreplace(StreamID, #stream.id, Streams0, Stream), + commands(State#state{streams=Streams}, StreamID, Tail); +commands(State=#state{socket=Socket, transport=Transport, streams=Streams, out_state=OutState}, + StreamID, [{trailers, Trailers}|Tail]) -> + case stream_te(OutState, lists:keyfind(StreamID, #stream.id, Streams)) of + trailers -> + Transport:send(Socket, [ + <<"0\r\n">>, + cow_http:headers(maps:to_list(Trailers)), + <<"\r\n">> + ]); + no_trailers -> + Transport:send(Socket, <<"0\r\n\r\n">>); + not_chunked -> + ok + end, + commands(State#state{out_state=done}, StreamID, Tail); +%% Protocol takeover. +commands(State0=#state{ref=Ref, parent=Parent, socket=Socket, transport=Transport, + out_state=OutState, opts=Opts, buffer=Buffer, children=Children}, StreamID, + [{switch_protocol, Headers, Protocol, InitialState}|_Tail]) -> + %% @todo If there's streams opened after this one, fail instead of 101. + State1 = cancel_timeout(State0), + %% Before we send the 101 response we need to stop receiving data + %% from the socket, otherwise the data might be receive before the + %% call to flush/0 and we end up inadvertently dropping a packet. + %% + %% @todo Handle cases where the request came with a body. We need + %% to process or skip the body before the upgrade can be completed. + State = passive(State1), + %% Send a 101 response if necessary, then terminate the stream. + #state{streams=Streams} = case OutState of + wait -> info(State, StreamID, {inform, 101, Headers}); + _ -> State + end, + #stream{state=StreamState} = lists:keyfind(StreamID, #stream.id, Streams), + %% @todo We need to shutdown processes here first. + stream_call_terminate(StreamID, switch_protocol, StreamState, State), + %% Terminate children processes and flush any remaining messages from the mailbox. + cowboy_children:terminate(Children), + flush(Parent), + Protocol:takeover(Parent, Ref, Socket, Transport, Opts, Buffer, InitialState); +%% Set options dynamically. +commands(State0=#state{overriden_opts=Opts}, + StreamID, [{set_options, SetOpts}|Tail]) -> + State1 = case SetOpts of + #{idle_timeout := IdleTimeout} -> + set_timeout(State0#state{overriden_opts=Opts#{idle_timeout => IdleTimeout}}, + idle_timeout); + _ -> + State0 + end, + State = case SetOpts of + #{chunked := Chunked} -> + State1#state{overriden_opts=Opts#{chunked => Chunked}}; + _ -> + State1 + end, + commands(State, StreamID, Tail); +%% Stream shutdown. +commands(State, StreamID, [stop|Tail]) -> + %% @todo Do we want to run the commands after a stop? + %% @todo We currently wait for the stop command before we + %% continue with the next request/response. In theory, if + %% the request body was read fully and the response body + %% was sent fully we should be able to start working on + %% the next request concurrently. This can be done as a + %% future optimization. + maybe_terminate(State, StreamID, Tail); +%% Log event. +commands(State=#state{opts=Opts}, StreamID, [Log={log, _, _, _}|Tail]) -> + cowboy:log(Log, Opts), + commands(State, StreamID, Tail); +%% HTTP/1.1 does not support push; ignore. +commands(State, StreamID, [{push, _, _, _, _, _, _, _}|Tail]) -> + commands(State, StreamID, Tail). + +%% The set-cookie header is special; we can only send one cookie per header. +headers_to_list(Headers0=#{<<"set-cookie">> := SetCookies}) -> + Headers1 = maps:to_list(maps:remove(<<"set-cookie">>, Headers0)), + Headers1 ++ [{<<"set-cookie">>, Value} || Value <- SetCookies]; +headers_to_list(Headers) -> + maps:to_list(Headers). + +%% We wrap the sendfile call into a try/catch because on OTP-20 +%% and earlier a few different crashes could occur for sockets +%% that were closing or closed. For example a badarg in +%% erlang:port_get_data(#Port<...>) or a badmatch like +%% {{badmatch,{error,einval}},[{prim_file,sendfile,8,[]}... +%% +%% OTP-21 uses a NIF instead of a port so the implementation +%% and behavior has dramatically changed and it is unclear +%% whether it will be necessary in the future. +%% +%% This try/catch prevents some noisy logs to be written +%% when these errors occur. +sendfile(State=#state{socket=Socket, transport=Transport, opts=Opts}, + {sendfile, Offset, Bytes, Path}) -> + try + %% When sendfile is disabled we explicitly use the fallback. + _ = case maps:get(sendfile, Opts, true) of + true -> Transport:sendfile(Socket, Path, Offset, Bytes); + false -> ranch_transport:sendfile(Transport, Socket, Path, Offset, Bytes, []) + end, + ok + catch _:_ -> + terminate(State, {socket_error, sendfile_crash, + 'An error occurred when using the sendfile function.'}) + end. + +%% Flush messages specific to cowboy_http before handing over the +%% connection to another protocol. +flush(Parent) -> + receive + {timeout, _, _} -> + flush(Parent); + {{Pid, _}, _} when Pid =:= self() -> + flush(Parent); + {'EXIT', Pid, _} when Pid =/= Parent -> + flush(Parent) + after 0 -> + ok + end. + +%% @todo In these cases I'm not sure if we should continue processing commands. +maybe_terminate(State=#state{last_streamid=StreamID}, StreamID, _Tail) -> + terminate(stream_terminate(State, StreamID, normal), normal); %% @todo Reason ok? +maybe_terminate(State, StreamID, _Tail) -> + stream_terminate(State, StreamID, normal). + +stream_terminate(State0=#state{opts=Opts, in_streamid=InStreamID, in_state=InState, + out_streamid=OutStreamID, out_state=OutState, streams=Streams0, + children=Children0}, StreamID, Reason) -> + #stream{version=Version, local_expected_size=ExpectedSize, local_sent_size=SentSize} + = lists:keyfind(StreamID, #stream.id, Streams0), + %% Send a response or terminate chunks depending on the current output state. + State1 = #state{streams=Streams1} = case OutState of + wait when element(1, Reason) =:= internal_error -> + info(State0, StreamID, {response, 500, #{<<"content-length">> => <<"0">>}, <<>>}); + wait when element(1, Reason) =:= connection_error -> + info(State0, StreamID, {response, 400, #{<<"content-length">> => <<"0">>}, <<>>}); + wait -> + info(State0, StreamID, {response, 204, #{}, <<>>}); + chunked when Version =:= 'HTTP/1.1' -> + info(State0, StreamID, {data, fin, <<>>}); + streaming when SentSize < ExpectedSize -> + terminate(State0, response_body_too_small); + _ -> %% done or Version =:= 'HTTP/1.0' + State0 + end, + %% Stop the stream, shutdown children and reset overriden options. + {value, #stream{state=StreamState}, Streams} + = lists:keytake(StreamID, #stream.id, Streams1), + stream_call_terminate(StreamID, Reason, StreamState, State1), + Children = cowboy_children:shutdown(Children0, StreamID), + State = State1#state{overriden_opts=#{}, streams=Streams, children=Children}, + %% We want to drop the connection if the body was not read fully + %% and we don't know its length or more remains to be read than + %% configuration allows. + MaxSkipBodyLength = maps:get(max_skip_body_length, Opts, 1000000), + case InState of + #ps_body{length=undefined} + when InStreamID =:= OutStreamID -> + terminate(State, skip_body_unknown_length); + #ps_body{length=Len, received=Received} + when InStreamID =:= OutStreamID, Received + MaxSkipBodyLength < Len -> + terminate(State, skip_body_too_large); + #ps_body{} when InStreamID =:= OutStreamID -> + stream_next(State#state{flow=infinity}); + _ -> + stream_next(State) + end. + +stream_next(State0=#state{opts=Opts, active=Active, out_streamid=OutStreamID, streams=Streams}) -> + NextOutStreamID = OutStreamID + 1, + case lists:keyfind(NextOutStreamID, #stream.id, Streams) of + false -> + State0#state{out_streamid=NextOutStreamID, out_state=wait}; + #stream{queue=Commands} -> + State = case Active of + true -> State0; + false -> active(State0) + end, + %% @todo Remove queue from the stream. + %% We set the flow to the initial flow size even though + %% we might have sent some data through already due to pipelining. + Flow = maps:get(initial_stream_flow_size, Opts, 65535), + commands(State#state{flow=Flow, out_streamid=NextOutStreamID, out_state=wait}, + NextOutStreamID, Commands) + end. + +stream_call_terminate(StreamID, Reason, StreamState, #state{opts=Opts}) -> + try + cowboy_stream:terminate(StreamID, Reason, StreamState) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(terminate, + [StreamID, Reason, StreamState], + Class, Exception, Stacktrace), Opts) + end. + +maybe_req_close(#state{opts=#{http10_keepalive := false}}, _, 'HTTP/1.0') -> + close; +maybe_req_close(_, #{<<"connection">> := Conn}, 'HTTP/1.0') -> + Conns = cow_http_hd:parse_connection(Conn), + case lists:member(<<"keep-alive">>, Conns) of + true -> keepalive; + false -> close + end; +maybe_req_close(_, _, 'HTTP/1.0') -> + close; +maybe_req_close(_, #{<<"connection">> := Conn}, 'HTTP/1.1') -> + case connection_hd_is_close(Conn) of + true -> close; + false -> keepalive + end; +maybe_req_close(_, _, _) -> + keepalive. + +connection(State=#state{last_streamid=StreamID}, Headers=#{<<"connection">> := Conn}, StreamID, _) -> + case connection_hd_is_close(Conn) of + true -> {State, Headers}; + %% @todo Here we need to remove keep-alive and add close, not just add close. + false -> {State, Headers#{<<"connection">> => [<<"close, ">>, Conn]}} + end; +connection(State=#state{last_streamid=StreamID}, Headers, StreamID, _) -> + {State, Headers#{<<"connection">> => <<"close">>}}; +connection(State, Headers=#{<<"connection">> := Conn}, StreamID, _) -> + case connection_hd_is_close(Conn) of + true -> {State#state{last_streamid=StreamID}, Headers}; + %% @todo Here we need to set keep-alive only if it wasn't set before. + false -> {State, Headers} + end; +connection(State, Headers, _, 'HTTP/1.0') -> + {State, Headers#{<<"connection">> => <<"keep-alive">>}}; +connection(State, Headers, _, _) -> + {State, Headers}. + +connection_hd_is_close(Conn) -> + Conns = cow_http_hd:parse_connection(iolist_to_binary(Conn)), + lists:member(<<"close">>, Conns). + +stream_te(streaming, _) -> + not_chunked; +%% No TE header was sent. +stream_te(_, #stream{te=undefined}) -> + no_trailers; +stream_te(_, #stream{te=TE0}) -> + try cow_http_hd:parse_te(TE0) of + {TE1, _} -> TE1 + catch _:_ -> + %% If we can't parse the TE header, assume we can't send trailers. + no_trailers + end. + +%% This function is only called when an error occurs on a new stream. +-spec error_terminate(cowboy:http_status(), #state{}, _) -> no_return(). +error_terminate(StatusCode, State=#state{ref=Ref, peer=Peer, in_state=StreamState}, Reason) -> + PartialReq = case StreamState of + #ps_request_line{} -> #{ + ref => Ref, + peer => Peer + }; + #ps_header{method=Method, path=Path, qs=Qs, + version=Version, headers=ReqHeaders} -> #{ + ref => Ref, + peer => Peer, + method => Method, + path => Path, + qs => Qs, + version => Version, + headers => case ReqHeaders of + undefined -> #{}; + _ -> ReqHeaders + end + } + end, + early_error(StatusCode, State, Reason, PartialReq, #{<<"connection">> => <<"close">>}), + terminate(State, Reason). + +early_error(StatusCode, State, Reason, PartialReq) -> + early_error(StatusCode, State, Reason, PartialReq, #{}). + +early_error(StatusCode0, #state{socket=Socket, transport=Transport, + opts=Opts, in_streamid=StreamID}, Reason, PartialReq, RespHeaders0) -> + RespHeaders1 = RespHeaders0#{<<"content-length">> => <<"0">>}, + Resp = {response, StatusCode0, RespHeaders1, <<>>}, + try cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts) of + {response, StatusCode, RespHeaders, RespBody} -> + Transport:send(Socket, [ + cow_http:response(StatusCode, 'HTTP/1.1', maps:to_list(RespHeaders)), + %% @todo We shouldn't send the body when the method is HEAD. + %% @todo Technically we allow the sendfile tuple. + RespBody + ]) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(early_error, + [StreamID, Reason, PartialReq, Resp, Opts], + Class, Exception, Stacktrace), Opts), + %% We still need to send an error response, so send what we initially + %% wanted to send. It's better than nothing. + Transport:send(Socket, cow_http:response(StatusCode0, + 'HTTP/1.1', maps:to_list(RespHeaders1))) + end, + ok. + +initiate_closing(State=#state{streams=[]}, Reason) -> + terminate(State, Reason); +initiate_closing(State=#state{streams=[_Stream|Streams], + out_streamid=OutStreamID}, Reason) -> + terminate_all_streams(State, Streams, Reason), + State#state{last_streamid=OutStreamID}. + +-spec terminate(_, _) -> no_return(). +terminate(undefined, Reason) -> + exit({shutdown, Reason}); +terminate(State=#state{streams=Streams, children=Children}, Reason) -> + terminate_all_streams(State, Streams, Reason), + cowboy_children:terminate(Children), + terminate_linger(State), + exit({shutdown, Reason}). + +terminate_all_streams(_, [], _) -> + ok; +terminate_all_streams(State, [#stream{id=StreamID, state=StreamState}|Tail], Reason) -> + stream_call_terminate(StreamID, Reason, StreamState, State), + terminate_all_streams(State, Tail, Reason). + +terminate_linger(State=#state{socket=Socket, transport=Transport, opts=Opts}) -> + case Transport:shutdown(Socket, write) of + ok -> + case maps:get(linger_timeout, Opts, 1000) of + 0 -> + ok; + infinity -> + terminate_linger_before_loop(State, undefined, Transport:messages()); + Timeout -> + TimerRef = erlang:start_timer(Timeout, self(), linger_timeout), + terminate_linger_before_loop(State, TimerRef, Transport:messages()) + end; + {error, _} -> + ok + end. + +terminate_linger_before_loop(State, TimerRef, Messages) -> + %% We may already be in active mode when we do this + %% but it's OK because we are shutting down anyway. + case setopts_active(State) of + ok -> + terminate_linger_loop(State, TimerRef, Messages); + {error, _} -> + ok + end. + +terminate_linger_loop(State=#state{socket=Socket}, TimerRef, Messages) -> + receive + {OK, Socket, _} when OK =:= element(1, Messages) -> + terminate_linger_loop(State, TimerRef, Messages); + {Closed, Socket} when Closed =:= element(2, Messages) -> + ok; + {Error, Socket, _} when Error =:= element(3, Messages) -> + ok; + {Passive, Socket} when Passive =:= tcp_passive; Passive =:= ssl_passive -> + terminate_linger_before_loop(State, TimerRef, Messages); + {timeout, TimerRef, linger_timeout} -> + ok; + _ -> + terminate_linger_loop(State, TimerRef, Messages) + end. + +%% System callbacks. + +-spec system_continue(_, _, #state{}) -> ok. +system_continue(_, _, State) -> + loop(State). + +-spec system_terminate(any(), _, _, #state{}) -> no_return(). +system_terminate(Reason0, _, _, State) -> + Reason = {stop, {exit, Reason0}, 'sys:terminate/2,3 was called.'}, + loop(initiate_closing(State, Reason)). + +-spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::{#state{}, binary()}. +system_code_change(Misc, _, _, _) -> + {ok, Misc}. diff --git a/src/wsSrv/cowboy_http2.erl b/src/wsSrv/cowboy_http2.erl new file mode 100644 index 0000000..ed2623c --- /dev/null +++ b/src/wsSrv/cowboy_http2.erl @@ -0,0 +1,1220 @@ +%% Copyright (c) 2015-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_http2). + +-export([init/6]). +-export([init/10]). +-export([init/12]). + +-export([system_continue/3]). +-export([system_terminate/4]). +-export([system_code_change/4]). + +-type opts() :: #{ + active_n => pos_integer(), + compress_buffering => boolean(), + compress_threshold => non_neg_integer(), + connection_type => worker | supervisor, + connection_window_margin_size => 0..16#7fffffff, + connection_window_update_threshold => 0..16#7fffffff, + enable_connect_protocol => boolean(), + env => cowboy_middleware:env(), + goaway_initial_timeout => timeout(), + goaway_complete_timeout => timeout(), + idle_timeout => timeout(), + inactivity_timeout => timeout(), + initial_connection_window_size => 65535..16#7fffffff, + initial_stream_window_size => 0..16#7fffffff, + linger_timeout => timeout(), + logger => module(), + max_concurrent_streams => non_neg_integer() | infinity, + max_connection_buffer_size => non_neg_integer(), + max_connection_window_size => 0..16#7fffffff, + max_decode_table_size => non_neg_integer(), + max_encode_table_size => non_neg_integer(), + max_frame_size_received => 16384..16777215, + max_frame_size_sent => 16384..16777215 | infinity, + max_received_frame_rate => {pos_integer(), timeout()}, + max_reset_stream_rate => {pos_integer(), timeout()}, + max_stream_buffer_size => non_neg_integer(), + max_stream_window_size => 0..16#7fffffff, + metrics_callback => cowboy_metrics_h:metrics_callback(), + metrics_req_filter => fun((cowboy_req:req()) -> map()), + metrics_resp_headers_filter => fun((cowboy:http_headers()) -> cowboy:http_headers()), + middlewares => [module()], + preface_timeout => timeout(), + proxy_header => boolean(), + sendfile => boolean(), + settings_timeout => timeout(), + shutdown_timeout => timeout(), + stream_handlers => [module()], + stream_window_data_threshold => 0..16#7fffffff, + stream_window_margin_size => 0..16#7fffffff, + stream_window_update_threshold => 0..16#7fffffff, + tracer_callback => cowboy_tracer_h:tracer_callback(), + tracer_flags => [atom()], + tracer_match_specs => cowboy_tracer_h:tracer_match_specs(), + %% Open ended because configured stream handlers might add options. + _ => _ +}. +-export_type([opts/0]). + +-record(stream, { + %% Whether the stream is currently stopping. + status = running :: running | stopping, + + %% Flow requested for this stream. + flow = 0 :: non_neg_integer(), + + %% Stream state. + state :: {module, any()} +}). + +-record(state, { + parent = undefined :: pid(), + ref :: ranch:ref(), + socket = undefined :: inet:socket(), + transport :: module(), + proxy_header :: undefined | ranch_proxy_header:proxy_info(), + opts = #{} :: opts(), + + %% Timer for idle_timeout; also used for goaway timers. + timer = undefined :: undefined | reference(), + + %% Remote address and port for the connection. + peer = undefined :: {inet:ip_address(), inet:port_number()}, + + %% Local address and port for the connection. + sock = undefined :: {inet:ip_address(), inet:port_number()}, + + %% Client certificate (TLS only). + cert :: undefined | binary(), + + %% HTTP/2 state machine. + http2_status :: sequence | settings | upgrade | connected | closing_initiated | closing, + http2_machine :: cow_http2_machine:http2_machine(), + + %% HTTP/2 frame rate flood protection. + frame_rate_num :: undefined | pos_integer(), + frame_rate_time :: undefined | integer(), + + %% HTTP/2 reset stream flood protection. + reset_rate_num :: undefined | pos_integer(), + reset_rate_time :: undefined | integer(), + + %% Flow requested for all streams. + flow = 0 :: non_neg_integer(), + + %% Currently active HTTP/2 streams. Streams may be initiated either + %% by the client or by the server through PUSH_PROMISE frames. + streams = #{} :: #{cow_http2:streamid() => #stream{}}, + + %% Streams can spawn zero or more children which are then managed + %% by this module if operating as a supervisor. + children = cowboy_children:init() :: cowboy_children:children() +}). + +-spec init(pid(), ranch:ref(), inet:socket(), module(), + ranch_proxy_header:proxy_info() | undefined, cowboy:opts()) -> ok. +init(Parent, Ref, Socket, Transport, ProxyHeader, Opts) -> + Peer0 = Transport:peername(Socket), + Sock0 = Transport:sockname(Socket), + Cert1 = case Transport:name() of + ssl -> + case ssl:peercert(Socket) of + {error, no_peercert} -> + {ok, undefined}; + Cert0 -> + Cert0 + end; + _ -> + {ok, undefined} + end, + case {Peer0, Sock0, Cert1} of + {{ok, Peer}, {ok, Sock}, {ok, Cert}} -> + init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, <<>>); + {{error, Reason}, _, _} -> + terminate(undefined, {socket_error, Reason, + 'A socket error occurred when retrieving the peer name.'}); + {_, {error, Reason}, _} -> + terminate(undefined, {socket_error, Reason, + 'A socket error occurred when retrieving the sock name.'}); + {_, _, {error, Reason}} -> + terminate(undefined, {socket_error, Reason, + 'A socket error occurred when retrieving the client TLS certificate.'}) + end. + +-spec init(pid(), ranch:ref(), inet:socket(), module(), + ranch_proxy_header:proxy_info() | undefined, cowboy:opts(), + {inet:ip_address(), inet:port_number()}, {inet:ip_address(), inet:port_number()}, + binary() | undefined, binary()) -> ok. +init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, Buffer) -> + {ok, Preface, HTTP2Machine} = cow_http2_machine:init(server, Opts), + State = set_idle_timeout(init_rate_limiting(#state{parent=Parent, ref=Ref, socket=Socket, + transport=Transport, proxy_header=ProxyHeader, + opts=Opts, peer=Peer, sock=Sock, cert=Cert, + http2_status=sequence, http2_machine=HTTP2Machine})), + Transport:send(Socket, Preface), + setopts_active(State), + case Buffer of + <<>> -> loop(State, Buffer); + _ -> parse(State, Buffer) + end. + +init_rate_limiting(State) -> + CurrentTime = erlang:monotonic_time(millisecond), + init_reset_rate_limiting(init_frame_rate_limiting(State, CurrentTime), CurrentTime). + +init_frame_rate_limiting(State=#state{opts=Opts}, CurrentTime) -> + {FrameRateNum, FrameRatePeriod} = maps:get(max_received_frame_rate, Opts, {10000, 10000}), + State#state{ + frame_rate_num=FrameRateNum, frame_rate_time=add_period(CurrentTime, FrameRatePeriod) + }. + +init_reset_rate_limiting(State=#state{opts=Opts}, CurrentTime) -> + {ResetRateNum, ResetRatePeriod} = maps:get(max_reset_stream_rate, Opts, {10, 10000}), + State#state{ + reset_rate_num=ResetRateNum, reset_rate_time=add_period(CurrentTime, ResetRatePeriod) + }. + +add_period(_, infinity) -> infinity; +add_period(Time, Period) -> Time + Period. + +%% @todo Add an argument for the request body. +-spec init(pid(), ranch:ref(), inet:socket(), module(), + ranch_proxy_header:proxy_info() | undefined, cowboy:opts(), + {inet:ip_address(), inet:port_number()}, {inet:ip_address(), inet:port_number()}, + binary() | undefined, binary(), map() | undefined, cowboy_req:req()) -> ok. +init(Parent, Ref, Socket, Transport, ProxyHeader, Opts, Peer, Sock, Cert, Buffer, + _Settings, Req=#{method := Method}) -> + {ok, Preface, HTTP2Machine0} = cow_http2_machine:init(server, Opts), + {ok, StreamID, HTTP2Machine} + = cow_http2_machine:init_upgrade_stream(Method, HTTP2Machine0), + State0 = #state{parent=Parent, ref=Ref, socket=Socket, + transport=Transport, proxy_header=ProxyHeader, + opts=Opts, peer=Peer, sock=Sock, cert=Cert, + http2_status=upgrade, http2_machine=HTTP2Machine}, + State1 = headers_frame(State0#state{ + http2_machine=HTTP2Machine}, StreamID, Req), + %% We assume that the upgrade will be applied. A stream handler + %% must not prevent the normal operations of the server. + State2 = info(State1, 1, {switch_protocol, #{ + <<"connection">> => <<"Upgrade">>, + <<"upgrade">> => <<"h2c">> + }, ?MODULE, undefined}), %% @todo undefined or #{}? + State = set_idle_timeout(init_rate_limiting(State2#state{http2_status=sequence})), + Transport:send(Socket, Preface), + setopts_active(State), + case Buffer of + <<>> -> loop(State, Buffer); + _ -> parse(State, Buffer) + end. + +%% Because HTTP/2 has flow control and Cowboy has other rate limiting +%% mechanisms implemented, a very large active_n value should be fine, +%% as long as the stream handlers do their work in a timely manner. +setopts_active(#state{socket=Socket, transport=Transport, opts=Opts}) -> + N = maps:get(active_n, Opts, 100), + Transport:setopts(Socket, [{active, N}]). + +loop(State=#state{parent=Parent, socket=Socket, transport=Transport, + opts=Opts, timer=TimerRef, children=Children}, Buffer) -> + Messages = Transport:messages(), + InactivityTimeout = maps:get(inactivity_timeout, Opts, 300000), + receive + %% Socket messages. + {OK, Socket, Data} when OK =:= element(1, Messages) -> + parse(set_idle_timeout(State), << Buffer/binary, Data/binary >>); + {Closed, Socket} when Closed =:= element(2, Messages) -> + Reason = case State#state.http2_status of + closing -> {stop, closed, 'The client is going away.'}; + _ -> {socket_error, closed, 'The socket has been closed.'} + end, + terminate(State, Reason); + {Error, Socket, Reason} when Error =:= element(3, Messages) -> + terminate(State, {socket_error, Reason, 'An error has occurred on the socket.'}); + {Passive, Socket} when Passive =:= element(4, Messages); + %% Hardcoded for compatibility with Ranch 1.x. + Passive =:= tcp_passive; Passive =:= ssl_passive -> + setopts_active(State), + loop(State, Buffer); + %% System messages. + {'EXIT', Parent, shutdown} -> + Reason = {stop, {exit, shutdown}, 'Parent process requested shutdown.'}, + loop(initiate_closing(State, Reason), Buffer); + {'EXIT', Parent, Reason} -> + terminate(State, {stop, {exit, Reason}, 'Parent process terminated.'}); + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], {State, Buffer}); + %% Timeouts. + {timeout, TimerRef, idle_timeout} -> + terminate(State, {stop, timeout, + 'Connection idle longer than configuration allows.'}); + {timeout, Ref, {shutdown, Pid}} -> + cowboy_children:shutdown_timeout(Children, Ref, Pid), + loop(State, Buffer); + {timeout, TRef, {cow_http2_machine, Name}} -> + loop(timeout(State, Name, TRef), Buffer); + {timeout, TimerRef, {goaway_initial_timeout, Reason}} -> + loop(closing(State, Reason), Buffer); + {timeout, TimerRef, {goaway_complete_timeout, Reason}} -> + terminate(State, {stop, stop_reason(Reason), + 'Graceful shutdown timed out.'}); + %% Messages pertaining to a stream. + {{Pid, StreamID}, Msg} when Pid =:= self() -> + loop(info(State, StreamID, Msg), Buffer); + %% Exit signal from children. + Msg = {'EXIT', Pid, _} -> + loop(down(State, Pid, Msg), Buffer); + %% Calls from supervisor module. + {'$gen_call', From, Call} -> + cowboy_children:handle_supervisor_call(Call, From, Children, ?MODULE), + loop(State, Buffer); + Msg -> + cowboy:log(warning, "Received stray message ~p.", [Msg], Opts), + loop(State, Buffer) + after InactivityTimeout -> + terminate(State, {internal_error, timeout, 'No message or data received before timeout.'}) + end. + +set_idle_timeout(State=#state{http2_status=Status, timer=TimerRef}) + when Status =:= closing_initiated orelse Status =:= closing, + TimerRef =/= undefined -> + State; +set_idle_timeout(State=#state{opts=Opts}) -> + set_timeout(State, maps:get(idle_timeout, Opts, 60000), idle_timeout). + +set_timeout(State=#state{timer=TimerRef0}, Timeout, Message) -> + ok = case TimerRef0 of + undefined -> ok; + _ -> erlang:cancel_timer(TimerRef0, [{async, true}, {info, false}]) + end, + TimerRef = case Timeout of + infinity -> undefined; + Timeout -> erlang:start_timer(Timeout, self(), Message) + end, + State#state{timer=TimerRef}. + +%% HTTP/2 protocol parsing. + +parse(State=#state{http2_status=sequence}, Data) -> + case cow_http2:parse_sequence(Data) of + {ok, Rest} -> + parse(State#state{http2_status=settings}, Rest); + more -> + loop(State, Data); + Error = {connection_error, _, _} -> + terminate(State, Error) + end; +parse(State=#state{http2_status=Status, http2_machine=HTTP2Machine, streams=Streams}, Data) -> + MaxFrameSize = cow_http2_machine:get_local_setting(max_frame_size, HTTP2Machine), + case cow_http2:parse(Data, MaxFrameSize) of + {ok, Frame, Rest} -> + parse(frame_rate(State, Frame), Rest); + {ignore, Rest} -> + parse(frame_rate(State, ignore), Rest); + {stream_error, StreamID, Reason, Human, Rest} -> + parse(reset_stream(State, StreamID, {stream_error, Reason, Human}), Rest); + Error = {connection_error, _, _} -> + terminate(State, Error); + %% Terminate the connection if we are closing and all streams have completed. + more when Status =:= closing, Streams =:= #{} -> + terminate(State, {stop, normal, 'The connection is going away.'}); + more -> + loop(State, Data) + end. + +%% Frame rate flood protection. + +frame_rate(State0=#state{frame_rate_num=Num0, frame_rate_time=Time}, Frame) -> + {Result, State} = case Num0 - 1 of + 0 -> + CurrentTime = erlang:monotonic_time(millisecond), + if + CurrentTime < Time -> + {error, State0}; + true -> + %% When the option has a period of infinity we cannot reach this clause. + {ok, init_frame_rate_limiting(State0, CurrentTime)} + end; + Num -> + {ok, State0#state{frame_rate_num=Num}} + end, + case {Result, Frame} of + {ok, ignore} -> ignored_frame(State); + {ok, _} -> frame(State, Frame); + {error, _} -> terminate(State, {connection_error, enhance_your_calm, + 'Frame rate larger than configuration allows. Flood? (CVE-2019-9512, CVE-2019-9515, CVE-2019-9518)'}) + end. + +%% Frames received. + +%% We do nothing when receiving a lingering DATA frame. +%% We already removed the stream flow from the connection +%% flow and are therefore already accounting for the window +%% being reduced by these frames. +frame(State=#state{http2_machine=HTTP2Machine0}, Frame) -> + case cow_http2_machine:frame(Frame, HTTP2Machine0) of + {ok, HTTP2Machine} -> + maybe_ack(State#state{http2_machine=HTTP2Machine}, Frame); + {ok, {data, StreamID, IsFin, Data}, HTTP2Machine} -> + data_frame(State#state{http2_machine=HTTP2Machine}, StreamID, IsFin, Data); + {ok, {headers, StreamID, IsFin, Headers, PseudoHeaders, BodyLen}, HTTP2Machine} -> + headers_frame(State#state{http2_machine=HTTP2Machine}, + StreamID, IsFin, Headers, PseudoHeaders, BodyLen); + {ok, {trailers, _StreamID, _Trailers}, HTTP2Machine} -> + %% @todo Propagate trailers. + State#state{http2_machine=HTTP2Machine}; + {ok, {rst_stream, StreamID, Reason}, HTTP2Machine} -> + rst_stream_frame(State#state{http2_machine=HTTP2Machine}, StreamID, Reason); + {ok, GoAway={goaway, _, _, _}, HTTP2Machine} -> + goaway(State#state{http2_machine=HTTP2Machine}, GoAway); + {send, SendData, HTTP2Machine} -> + %% We may need to send an alarm for each of the streams sending data. + lists:foldl( + fun({StreamID, _, _}, S) -> maybe_send_data_alarm(S, HTTP2Machine0, StreamID) end, + send_data(maybe_ack(State#state{http2_machine=HTTP2Machine}, Frame), SendData, []), + SendData); + {error, {stream_error, StreamID, Reason, Human}, HTTP2Machine} -> + reset_stream(State#state{http2_machine=HTTP2Machine}, + StreamID, {stream_error, Reason, Human}); + {error, Error={connection_error, _, _}, HTTP2Machine} -> + terminate(State#state{http2_machine=HTTP2Machine}, Error) + end. + +%% We use this opportunity to mark the HTTP/2 status as connected +%% if we were still waiting for a SETTINGS frame. +maybe_ack(State=#state{http2_status=settings}, Frame) -> + maybe_ack(State#state{http2_status=connected}, Frame); +maybe_ack(State=#state{socket=Socket, transport=Transport}, Frame) -> + case Frame of + {settings, _} -> Transport:send(Socket, cow_http2:settings_ack()); + {ping, Opaque} -> Transport:send(Socket, cow_http2:ping_ack(Opaque)); + _ -> ok + end, + State. + +data_frame(State0=#state{opts=Opts, flow=Flow, streams=Streams}, StreamID, IsFin, Data) -> + case Streams of + #{StreamID := Stream=#stream{status=running, flow=StreamFlow, state=StreamState0}} -> + try cowboy_stream:data(StreamID, IsFin, Data, StreamState0) of + {Commands, StreamState} -> + %% Remove the amount of data received from the flow. + %% We may receive more data than we requested. We ensure + %% that the flow value doesn't go lower than 0. + Size = byte_size(Data), + State = update_window(State0#state{flow=max(0, Flow - Size), + streams=Streams#{StreamID => Stream#stream{ + flow=max(0, StreamFlow - Size), state=StreamState}}}, + StreamID), + commands(State, StreamID, Commands) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(data, + [StreamID, IsFin, Data, StreamState0], + Class, Exception, Stacktrace), Opts), + reset_stream(State0, StreamID, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:data/4.'}) + end; + %% We ignore DATA frames for streams that are stopping. + #{} -> + State0 + end. + +headers_frame(State, StreamID, IsFin, Headers, + PseudoHeaders=#{method := <<"CONNECT">>}, _) + when map_size(PseudoHeaders) =:= 2 -> + early_error(State, StreamID, IsFin, Headers, PseudoHeaders, 501, + 'The CONNECT method is currently not implemented. (RFC7231 4.3.6)'); +headers_frame(State, StreamID, IsFin, Headers, + PseudoHeaders=#{method := <<"TRACE">>}, _) -> + early_error(State, StreamID, IsFin, Headers, PseudoHeaders, 501, + 'The TRACE method is currently not implemented. (RFC7231 4.3.8)'); +headers_frame(State, StreamID, IsFin, Headers, PseudoHeaders=#{authority := Authority}, BodyLen) -> + headers_frame_parse_host(State, StreamID, IsFin, Headers, PseudoHeaders, BodyLen, Authority); +headers_frame(State, StreamID, IsFin, Headers, PseudoHeaders, BodyLen) -> + case lists:keyfind(<<"host">>, 1, Headers) of + {_, Authority} -> + headers_frame_parse_host(State, StreamID, IsFin, Headers, PseudoHeaders, BodyLen, Authority); + _ -> + reset_stream(State, StreamID, {stream_error, protocol_error, + 'Requests translated from HTTP/1.1 must include a host header. (RFC7540 8.1.2.3, RFC7230 5.4)'}) + end. + +headers_frame_parse_host(State=#state{ref=Ref, peer=Peer, sock=Sock, cert=Cert, proxy_header=ProxyHeader}, + StreamID, IsFin, Headers, PseudoHeaders=#{method := Method, scheme := Scheme, path := PathWithQs}, + BodyLen, Authority) -> + try cow_http_hd:parse_host(Authority) of + {Host, Port0} -> + Port = ensure_port(Scheme, Port0), + try cow_http:parse_fullpath(PathWithQs) of + {<<>>, _} -> + reset_stream(State, StreamID, {stream_error, protocol_error, + 'The path component must not be empty. (RFC7540 8.1.2.3)'}); + {Path, Qs} -> + Req0 = #{ + ref => Ref, + pid => self(), + streamid => StreamID, + peer => Peer, + sock => Sock, + cert => Cert, + method => Method, + scheme => Scheme, + host => Host, + port => Port, + path => Path, + qs => Qs, + version => 'HTTP/2', + headers => headers_to_map(Headers, #{}), + has_body => IsFin =:= nofin, + body_length => BodyLen + }, + %% We add the PROXY header information if any. + Req1 = case ProxyHeader of + undefined -> Req0; + _ -> Req0#{proxy_header => ProxyHeader} + end, + %% We add the protocol information for extended CONNECTs. + Req = case PseudoHeaders of + #{protocol := Protocol} -> Req1#{protocol => Protocol}; + _ -> Req1 + end, + headers_frame(State, StreamID, Req) + catch _:_ -> + reset_stream(State, StreamID, {stream_error, protocol_error, + 'The :path pseudo-header is invalid. (RFC7540 8.1.2.3)'}) + end + catch _:_ -> + reset_stream(State, StreamID, {stream_error, protocol_error, + 'The :authority pseudo-header is invalid. (RFC7540 8.1.2.3)'}) + end. + +ensure_port(<<"http">>, undefined) -> 80; +ensure_port(<<"https">>, undefined) -> 443; +ensure_port(_, Port) -> Port. + +%% This function is necessary to properly handle duplicate headers +%% and the special-case cookie header. +headers_to_map([], Acc) -> + Acc; +headers_to_map([{Name, Value}|Tail], Acc0) -> + Acc = case Acc0 of + %% The cookie header does not use proper HTTP header lists. + #{Name := Value0} when Name =:= <<"cookie">> -> + Acc0#{Name => << Value0/binary, "; ", Value/binary >>}; + #{Name := Value0} -> + Acc0#{Name => << Value0/binary, ", ", Value/binary >>}; + _ -> + Acc0#{Name => Value} + end, + headers_to_map(Tail, Acc). + +headers_frame(State=#state{opts=Opts, streams=Streams}, StreamID, Req) -> + try cowboy_stream:init(StreamID, Req, Opts) of + {Commands, StreamState} -> + commands(State#state{ + streams=Streams#{StreamID => #stream{state=StreamState}}}, + StreamID, Commands) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(init, + [StreamID, Req, Opts], + Class, Exception, Stacktrace), Opts), + reset_stream(State, StreamID, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:init/3.'}) + end. + +early_error(State0=#state{ref=Ref, opts=Opts, peer=Peer}, + StreamID, _IsFin, Headers, #{method := Method}, + StatusCode0, HumanReadable) -> + %% We automatically terminate the stream but it is not an error + %% per se (at least not in the first implementation). + Reason = {stream_error, no_error, HumanReadable}, + %% The partial Req is minimal for now. We only have one case + %% where it can be called (when a method is completely disabled). + %% @todo Fill in the other elements. + PartialReq = #{ + ref => Ref, + peer => Peer, + method => Method, + headers => headers_to_map(Headers, #{}) + }, + Resp = {response, StatusCode0, RespHeaders0=#{<<"content-length">> => <<"0">>}, <<>>}, + try cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts) of + {response, StatusCode, RespHeaders, RespBody} -> + send_response(State0, StreamID, StatusCode, RespHeaders, RespBody) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(early_error, + [StreamID, Reason, PartialReq, Resp, Opts], + Class, Exception, Stacktrace), Opts), + %% We still need to send an error response, so send what we initially + %% wanted to send. It's better than nothing. + send_headers(State0, StreamID, fin, StatusCode0, RespHeaders0) + end. + +rst_stream_frame(State=#state{streams=Streams0, children=Children0}, StreamID, Reason) -> + case maps:take(StreamID, Streams0) of + {#stream{state=StreamState}, Streams} -> + terminate_stream_handler(State, StreamID, Reason, StreamState), + Children = cowboy_children:shutdown(Children0, StreamID), + State#state{streams=Streams, children=Children}; + error -> + State + end. + +ignored_frame(State=#state{http2_machine=HTTP2Machine0}) -> + case cow_http2_machine:ignored_frame(HTTP2Machine0) of + {ok, HTTP2Machine} -> + State#state{http2_machine=HTTP2Machine}; + {error, Error={connection_error, _, _}, HTTP2Machine} -> + terminate(State#state{http2_machine=HTTP2Machine}, Error) + end. + +%% HTTP/2 timeouts. + +timeout(State=#state{http2_machine=HTTP2Machine0}, Name, TRef) -> + case cow_http2_machine:timeout(Name, TRef, HTTP2Machine0) of + {ok, HTTP2Machine} -> + State#state{http2_machine=HTTP2Machine}; + {error, Error={connection_error, _, _}, HTTP2Machine} -> + terminate(State#state{http2_machine=HTTP2Machine}, Error) + end. + +%% Erlang messages. + +down(State0=#state{opts=Opts, children=Children0}, Pid, Msg) -> + State = case cowboy_children:down(Children0, Pid) of + %% The stream was terminated already. + {ok, undefined, Children} -> + State0#state{children=Children}; + %% The stream is still running. + {ok, StreamID, Children} -> + info(State0#state{children=Children}, StreamID, Msg); + %% The process was unknown. + error -> + cowboy:log(warning, "Received EXIT signal ~p for unknown process ~p.~n", + [Msg, Pid], Opts), + State0 + end, + if + State#state.http2_status =:= closing, State#state.streams =:= #{} -> + terminate(State, {stop, normal, 'The connection is going away.'}); + true -> + State + end. + +info(State=#state{opts=Opts, http2_machine=HTTP2Machine, streams=Streams}, StreamID, Msg) -> + case Streams of + #{StreamID := Stream=#stream{state=StreamState0}} -> + try cowboy_stream:info(StreamID, Msg, StreamState0) of + {Commands, StreamState} -> + commands(State#state{streams=Streams#{StreamID => Stream#stream{state=StreamState}}}, + StreamID, Commands) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(info, + [StreamID, Msg, StreamState0], + Class, Exception, Stacktrace), Opts), + reset_stream(State, StreamID, {internal_error, {Class, Exception}, + 'Unhandled exception in cowboy_stream:info/3.'}) + end; + _ -> + case cow_http2_machine:is_lingering_stream(StreamID, HTTP2Machine) of + true -> + ok; + false -> + cowboy:log(warning, "Received message ~p for unknown stream ~p.", + [Msg, StreamID], Opts) + end, + State + end. + +%% Stream handler commands. +%% +%% @todo Kill the stream if it tries to send a response, headers, +%% data or push promise when the stream is closed or half-closed. + +commands(State, _, []) -> + State; +%% Error responses are sent only if a response wasn't sent already. +commands(State=#state{http2_machine=HTTP2Machine}, StreamID, + [{error_response, StatusCode, Headers, Body}|Tail]) -> + case cow_http2_machine:get_stream_local_state(StreamID, HTTP2Machine) of + {ok, idle, _} -> + commands(State, StreamID, [{response, StatusCode, Headers, Body}|Tail]); + _ -> + commands(State, StreamID, Tail) + end; +%% Send an informational response. +commands(State0, StreamID, [{inform, StatusCode, Headers}|Tail]) -> + State = send_headers(State0, StreamID, idle, StatusCode, Headers), + commands(State, StreamID, Tail); +%% Send response headers. +commands(State0, StreamID, [{response, StatusCode, Headers, Body}|Tail]) -> + State = send_response(State0, StreamID, StatusCode, Headers, Body), + commands(State, StreamID, Tail); +%% Send response headers. +commands(State0, StreamID, [{headers, StatusCode, Headers}|Tail]) -> + State = send_headers(State0, StreamID, nofin, StatusCode, Headers), + commands(State, StreamID, Tail); +%% Send a response body chunk. +commands(State0, StreamID, [{data, IsFin, Data}|Tail]) -> + State = maybe_send_data(State0, StreamID, IsFin, Data, []), + commands(State, StreamID, Tail); +%% Send trailers. +commands(State0, StreamID, [{trailers, Trailers}|Tail]) -> + State = maybe_send_data(State0, StreamID, fin, {trailers, maps:to_list(Trailers)}, []), + commands(State, StreamID, Tail); +%% Send a push promise. +%% +%% @todo Responses sent as a result of a push_promise request +%% must not send push_promise frames themselves. +%% +%% @todo We should not send push_promise frames when we are +%% in the closing http2_status. +commands(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Machine0}, + StreamID, [{push, Method, Scheme, Host, Port, Path, Qs, Headers0}|Tail]) -> + Authority = case {Scheme, Port} of + {<<"http">>, 80} -> Host; + {<<"https">>, 443} -> Host; + _ -> iolist_to_binary([Host, $:, integer_to_binary(Port)]) + end, + PathWithQs = iolist_to_binary(case Qs of + <<>> -> Path; + _ -> [Path, $?, Qs] + end), + PseudoHeaders = #{ + method => Method, + scheme => Scheme, + authority => Authority, + path => PathWithQs + }, + %% We need to make sure the header value is binary before we can + %% create the Req object, as it expects them to be flat. + Headers = maps:to_list(maps:map(fun(_, V) -> iolist_to_binary(V) end, Headers0)), + State = case cow_http2_machine:prepare_push_promise(StreamID, HTTP2Machine0, + PseudoHeaders, Headers) of + {ok, PromisedStreamID, HeaderBlock, HTTP2Machine} -> + Transport:send(Socket, cow_http2:push_promise( + StreamID, PromisedStreamID, HeaderBlock)), + headers_frame(State0#state{http2_machine=HTTP2Machine}, + PromisedStreamID, fin, Headers, PseudoHeaders, 0); + {error, no_push} -> + State0 + end, + commands(State, StreamID, Tail); +%% Read the request body. +commands(State0=#state{flow=Flow, streams=Streams}, StreamID, [{flow, Size}|Tail]) -> + #{StreamID := Stream=#stream{flow=StreamFlow}} = Streams, + State = update_window(State0#state{flow=Flow + Size, + streams=Streams#{StreamID => Stream#stream{flow=StreamFlow + Size}}}, + StreamID), + commands(State, StreamID, Tail); +%% Supervise a child process. +commands(State=#state{children=Children}, StreamID, [{spawn, Pid, Shutdown}|Tail]) -> + commands(State#state{children=cowboy_children:up(Children, Pid, StreamID, Shutdown)}, + StreamID, Tail); +%% Error handling. +commands(State, StreamID, [Error = {internal_error, _, _}|_Tail]) -> + %% @todo Do we want to run the commands after an internal_error? + %% @todo Do we even allow commands after? + %% @todo Only reset when the stream still exists. + reset_stream(State, StreamID, Error); +%% Upgrade to HTTP/2. This is triggered by cowboy_http2 itself. +commands(State=#state{socket=Socket, transport=Transport, http2_status=upgrade}, + StreamID, [{switch_protocol, Headers, ?MODULE, _}|Tail]) -> + %% @todo This 101 response needs to be passed through stream handlers. + Transport:send(Socket, cow_http:response(101, 'HTTP/1.1', maps:to_list(Headers))), + commands(State, StreamID, Tail); +%% Use a different protocol within the stream (CONNECT :protocol). +%% @todo Make sure we error out when the feature is disabled. +commands(State0, StreamID, [{switch_protocol, Headers, _Mod, _ModState}|Tail]) -> + State = info(State0, StreamID, {headers, 200, Headers}), + commands(State, StreamID, Tail); +%% Set options dynamically. +commands(State, StreamID, [{set_options, _Opts}|Tail]) -> + commands(State, StreamID, Tail); +commands(State, StreamID, [stop|_Tail]) -> + %% @todo Do we want to run the commands after a stop? + %% @todo Do we even allow commands after? + stop_stream(State, StreamID); +%% Log event. +commands(State=#state{opts=Opts}, StreamID, [Log={log, _, _, _}|Tail]) -> + cowboy:log(Log, Opts), + commands(State, StreamID, Tail). + +%% Tentatively update the window after the flow was updated. + +update_window(State=#state{socket=Socket, transport=Transport, + http2_machine=HTTP2Machine0, flow=Flow, streams=Streams}, StreamID) -> + #{StreamID := #stream{flow=StreamFlow}} = Streams, + {Data1, HTTP2Machine2} = case cow_http2_machine:ensure_window(Flow, HTTP2Machine0) of + ok -> {<<>>, HTTP2Machine0}; + {ok, Increment1, HTTP2Machine1} -> {cow_http2:window_update(Increment1), HTTP2Machine1} + end, + {Data2, HTTP2Machine} = case cow_http2_machine:ensure_window(StreamID, StreamFlow, HTTP2Machine2) of + ok -> {<<>>, HTTP2Machine2}; + {ok, Increment2, HTTP2Machine3} -> {cow_http2:window_update(StreamID, Increment2), HTTP2Machine3} + end, + case {Data1, Data2} of + {<<>>, <<>>} -> ok; + _ -> Transport:send(Socket, [Data1, Data2]) + end, + State#state{http2_machine=HTTP2Machine}. + +%% Send the response, trailers or data. + +send_response(State0=#state{http2_machine=HTTP2Machine0}, StreamID, StatusCode, Headers, Body) -> + Size = case Body of + {sendfile, _, Bytes, _} -> Bytes; + _ -> iolist_size(Body) + end, + case Size of + 0 -> + State = send_headers(State0, StreamID, fin, StatusCode, Headers), + maybe_terminate_stream(State, StreamID, fin); + _ -> + %% @todo Add a test for HEAD to make sure we don't send the body when + %% returning {response...} from a stream handler (or {headers...} then {data...}). + {ok, _IsFin, HeaderBlock, HTTP2Machine} + = cow_http2_machine:prepare_headers(StreamID, HTTP2Machine0, nofin, + #{status => cow_http:status_to_integer(StatusCode)}, + headers_to_list(Headers)), + maybe_send_data(State0#state{http2_machine=HTTP2Machine}, StreamID, fin, Body, + [cow_http2:headers(StreamID, nofin, HeaderBlock)]) + end. + +send_headers(State=#state{socket=Socket, transport=Transport, + http2_machine=HTTP2Machine0}, StreamID, IsFin0, StatusCode, Headers) -> + {ok, IsFin, HeaderBlock, HTTP2Machine} + = cow_http2_machine:prepare_headers(StreamID, HTTP2Machine0, IsFin0, + #{status => cow_http:status_to_integer(StatusCode)}, + headers_to_list(Headers)), + Transport:send(Socket, cow_http2:headers(StreamID, IsFin, HeaderBlock)), + State#state{http2_machine=HTTP2Machine}. + +%% The set-cookie header is special; we can only send one cookie per header. +headers_to_list(Headers0=#{<<"set-cookie">> := SetCookies}) -> + Headers = maps:to_list(maps:remove(<<"set-cookie">>, Headers0)), + Headers ++ [{<<"set-cookie">>, Value} || Value <- SetCookies]; +headers_to_list(Headers) -> + maps:to_list(Headers). + +maybe_send_data(State0=#state{socket=Socket, transport=Transport, + http2_machine=HTTP2Machine0}, StreamID, IsFin, Data0, Prefix) -> + Data = case is_tuple(Data0) of + false -> {data, Data0}; + true -> Data0 + end, + case cow_http2_machine:send_or_queue_data(StreamID, HTTP2Machine0, IsFin, Data) of + {ok, HTTP2Machine} -> + %% If we have prefix data (like a HEADERS frame) we need to send it + %% even if we do not send any DATA frames. + case Prefix of + [] -> ok; + _ -> Transport:send(Socket, Prefix) + end, + maybe_send_data_alarm(State0#state{http2_machine=HTTP2Machine}, HTTP2Machine0, StreamID); + {send, SendData, HTTP2Machine} -> + State = #state{http2_status=Status, streams=Streams} + = send_data(State0#state{http2_machine=HTTP2Machine}, SendData, Prefix), + %% Terminate the connection if we are closing and all streams have completed. + if + Status =:= closing, Streams =:= #{} -> + terminate(State, {stop, normal, 'The connection is going away.'}); + true -> + maybe_send_data_alarm(State, HTTP2Machine0, StreamID) + end + end. + +send_data(State0=#state{socket=Socket, transport=Transport, opts=Opts}, SendData, Prefix) -> + {Acc, State} = prepare_data(State0, SendData, [], Prefix), + _ = [case Data of + {sendfile, Offset, Bytes, Path} -> + %% When sendfile is disabled we explicitly use the fallback. + _ = case maps:get(sendfile, Opts, true) of + true -> Transport:sendfile(Socket, Path, Offset, Bytes); + false -> ranch_transport:sendfile(Transport, Socket, Path, Offset, Bytes, []) + end; + _ -> + Transport:send(Socket, Data) + end || Data <- Acc], + State. + +prepare_data(State, [], Acc, []) -> + {lists:reverse(Acc), State}; +prepare_data(State, [], Acc, Buffer) -> + {lists:reverse([lists:reverse(Buffer)|Acc]), State}; +prepare_data(State0, [{StreamID, IsFin, SendData}|Tail], Acc0, Buffer0) -> + {Acc, Buffer, State} = prepare_data(State0, StreamID, IsFin, SendData, Acc0, Buffer0), + prepare_data(State, Tail, Acc, Buffer). + +prepare_data(State0, StreamID, IsFin, [], Acc, Buffer) -> + State = maybe_terminate_stream(State0, StreamID, IsFin), + {Acc, Buffer, State}; +prepare_data(State0, StreamID, IsFin, [FrameData|Tail], Acc, Buffer) -> + FrameIsFin = case Tail of + [] -> IsFin; + _ -> nofin + end, + case prepare_data_frame(State0, StreamID, FrameIsFin, FrameData) of + {{MoreData, Sendfile}, State} when is_tuple(Sendfile) -> + case Buffer of + [] -> + prepare_data(State, StreamID, IsFin, Tail, + [Sendfile, MoreData|Acc], []); + _ -> + prepare_data(State, StreamID, IsFin, Tail, + [Sendfile, lists:reverse([MoreData|Buffer])|Acc], []) + end; + {MoreData, State} -> + prepare_data(State, StreamID, IsFin, Tail, + Acc, [MoreData|Buffer]) + end. + +prepare_data_frame(State, StreamID, IsFin, {data, Data}) -> + {cow_http2:data(StreamID, IsFin, Data), + State}; +prepare_data_frame(State, StreamID, IsFin, Sendfile={sendfile, _, Bytes, _}) -> + {{cow_http2:data_header(StreamID, IsFin, Bytes), Sendfile}, + State}; +%% The stream is terminated in cow_http2_machine:prepare_trailers. +prepare_data_frame(State=#state{http2_machine=HTTP2Machine0}, + StreamID, nofin, {trailers, Trailers}) -> + {ok, HeaderBlock, HTTP2Machine} + = cow_http2_machine:prepare_trailers(StreamID, HTTP2Machine0, Trailers), + {cow_http2:headers(StreamID, fin, HeaderBlock), + State#state{http2_machine=HTTP2Machine}}. + +%% After we have sent or queued data we may need to set or clear an alarm. +%% We do this by comparing the HTTP2Machine buffer state before/after for +%% the relevant streams. +maybe_send_data_alarm(State=#state{opts=Opts, http2_machine=HTTP2Machine}, HTTP2Machine0, StreamID) -> + ConnBufferSizeBefore = cow_http2_machine:get_connection_local_buffer_size(HTTP2Machine0), + ConnBufferSizeAfter = cow_http2_machine:get_connection_local_buffer_size(HTTP2Machine), + {ok, StreamBufferSizeBefore} = cow_http2_machine:get_stream_local_buffer_size(StreamID, HTTP2Machine0), + %% When the stream ends up closed after it finished sending data, + %% we do not want to trigger an alarm. We act as if the buffer + %% size did not change. + StreamBufferSizeAfter = case cow_http2_machine:get_stream_local_buffer_size(StreamID, HTTP2Machine) of + {ok, BSA} -> BSA; + {error, closed} -> StreamBufferSizeBefore + end, + MaxConnBufferSize = maps:get(max_connection_buffer_size, Opts, 16000000), + MaxStreamBufferSize = maps:get(max_stream_buffer_size, Opts, 8000000), + %% I do not want to document these internal events yet. I am not yet + %% convinced it should be {alarm, Name, on|off} and not {internal_event, E} + %% or something else entirely. Though alarms are probably right. + if + ConnBufferSizeBefore >= MaxConnBufferSize, ConnBufferSizeAfter < MaxConnBufferSize -> + connection_alarm(State, connection_buffer_full, off); + ConnBufferSizeBefore < MaxConnBufferSize, ConnBufferSizeAfter >= MaxConnBufferSize -> + connection_alarm(State, connection_buffer_full, on); + StreamBufferSizeBefore >= MaxStreamBufferSize, StreamBufferSizeAfter < MaxStreamBufferSize -> + stream_alarm(State, StreamID, stream_buffer_full, off); + StreamBufferSizeBefore < MaxStreamBufferSize, StreamBufferSizeAfter >= MaxStreamBufferSize -> + stream_alarm(State, StreamID, stream_buffer_full, on); + true -> + State + end. + +connection_alarm(State0=#state{streams=Streams}, Name, Value) -> + lists:foldl(fun(StreamID, State) -> + stream_alarm(State, StreamID, Name, Value) + end, State0, maps:keys(Streams)). + +stream_alarm(State, StreamID, Name, Value) -> + info(State, StreamID, {alarm, Name, Value}). + +%% Terminate a stream or the connection. + +%% We may have to cancel streams even if we receive multiple +%% GOAWAY frames as the LastStreamID value may be lower than +%% the one previously received. +goaway(State0=#state{socket=Socket, transport=Transport, http2_machine=HTTP2Machine0, + http2_status=Status, streams=Streams0}, {goaway, LastStreamID, Reason, _}) + when Status =:= connected; Status =:= closing_initiated; Status =:= closing -> + Streams = goaway_streams(State0, maps:to_list(Streams0), LastStreamID, + {stop, {goaway, Reason}, 'The connection is going away.'}, []), + State = State0#state{streams=maps:from_list(Streams)}, + if + Status =:= connected; Status =:= closing_initiated -> + {OurLastStreamID, HTTP2Machine} = + cow_http2_machine:set_last_streamid(HTTP2Machine0), + Transport:send(Socket, cow_http2:goaway( + OurLastStreamID, no_error, <<>>)), + State#state{http2_status=closing, + http2_machine=HTTP2Machine}; + true -> + State + end; +%% We terminate the connection immediately if it hasn't fully been initialized. +goaway(State, {goaway, _, Reason, _}) -> + terminate(State, {stop, {goaway, Reason}, 'The connection is going away.'}). + +%% Cancel client-initiated streams that are above LastStreamID. +goaway_streams(_, [], _, _, Acc) -> + Acc; +goaway_streams(State, [{StreamID, #stream{state=StreamState}}|Tail], LastStreamID, Reason, Acc) + when StreamID > LastStreamID, (StreamID rem 2) =:= 0 -> + terminate_stream_handler(State, StreamID, Reason, StreamState), + goaway_streams(State, Tail, LastStreamID, Reason, Acc); +goaway_streams(State, [Stream|Tail], LastStreamID, Reason, Acc) -> + goaway_streams(State, Tail, LastStreamID, Reason, [Stream|Acc]). + +%% A server that is attempting to gracefully shut down a connection SHOULD send +%% an initial GOAWAY frame with the last stream identifier set to 2^31-1 and a +%% NO_ERROR code. This signals to the client that a shutdown is imminent and +%% that initiating further requests is prohibited. After allowing time for any +%% in-flight stream creation (at least one round-trip time), the server can send +%% another GOAWAY frame with an updated last stream identifier. This ensures +%% that a connection can be cleanly shut down without losing requests. +-spec initiate_closing(#state{}, _) -> #state{}. +initiate_closing(State=#state{http2_status=connected, socket=Socket, + transport=Transport, opts=Opts}, Reason) -> + Transport:send(Socket, cow_http2:goaway(16#7fffffff, no_error, <<>>)), + Timeout = maps:get(goaway_initial_timeout, Opts, 1000), + Message = {goaway_initial_timeout, Reason}, + set_timeout(State#state{http2_status=closing_initiated}, Timeout, Message); +initiate_closing(State=#state{http2_status=Status}, _Reason) + when Status =:= closing_initiated; Status =:= closing -> + %% This happens if sys:terminate/2,3 is called twice or if the supervisor + %% tells us to shutdown after sys:terminate/2,3 is called or vice versa. + State; +initiate_closing(State, Reason) -> + terminate(State, {stop, stop_reason(Reason), 'The connection is going away.'}). + +%% Switch to 'closing' state and stop accepting new streams. +-spec closing(#state{}, Reason :: term()) -> #state{}. +closing(State=#state{streams=Streams}, Reason) when Streams =:= #{} -> + terminate(State, Reason); +closing(State=#state{http2_status=closing_initiated, + http2_machine=HTTP2Machine0, socket=Socket, transport=Transport}, + Reason) -> + %% Stop accepting new streams. + {LastStreamID, HTTP2Machine} = + cow_http2_machine:set_last_streamid(HTTP2Machine0), + Transport:send(Socket, cow_http2:goaway(LastStreamID, no_error, <<>>)), + closing(State#state{http2_status=closing, http2_machine=HTTP2Machine}, Reason); +closing(State=#state{http2_status=closing, opts=Opts}, Reason) -> + %% If client sent GOAWAY, we may already be in 'closing' but without the + %% goaway complete timeout set. + Timeout = maps:get(goaway_complete_timeout, Opts, 3000), + Message = {goaway_complete_timeout, Reason}, + set_timeout(State, Timeout, Message). + +stop_reason({stop, Reason, _}) -> Reason; +stop_reason(Reason) -> Reason. + +-spec terminate(#state{}, _) -> no_return(). +terminate(undefined, Reason) -> + exit({shutdown, Reason}); +terminate(State=#state{socket=Socket, transport=Transport, http2_status=Status, + http2_machine=HTTP2Machine, streams=Streams, children=Children}, Reason) + when Status =:= connected; Status =:= closing_initiated; Status =:= closing -> + %% @todo We might want to optionally send the Reason value + %% as debug data in the GOAWAY frame here. Perhaps more. + if + Status =:= connected; Status =:= closing_initiated -> + Transport:send(Socket, cow_http2:goaway( + cow_http2_machine:get_last_streamid(HTTP2Machine), + terminate_reason(Reason), <<>>)); + %% We already sent the GOAWAY frame. + Status =:= closing -> + ok + end, + terminate_all_streams(State, maps:to_list(Streams), Reason), + cowboy_children:terminate(Children), + terminate_linger(State), + exit({shutdown, Reason}); +terminate(#state{socket=Socket, transport=Transport}, Reason) -> + Transport:close(Socket), + exit({shutdown, Reason}). + +terminate_reason({connection_error, Reason, _}) -> Reason; +terminate_reason({stop, _, _}) -> no_error; +terminate_reason({socket_error, _, _}) -> internal_error; +terminate_reason({internal_error, _, _}) -> internal_error. + +terminate_all_streams(_, [], _) -> + ok; +terminate_all_streams(State, [{StreamID, #stream{state=StreamState}}|Tail], Reason) -> + terminate_stream_handler(State, StreamID, Reason, StreamState), + terminate_all_streams(State, Tail, Reason). + +%% This code is copied from cowboy_http. +terminate_linger(State=#state{socket=Socket, transport=Transport, opts=Opts}) -> + case Transport:shutdown(Socket, write) of + ok -> + case maps:get(linger_timeout, Opts, 1000) of + 0 -> + ok; + infinity -> + terminate_linger_before_loop(State, undefined, Transport:messages()); + Timeout -> + TimerRef = erlang:start_timer(Timeout, self(), linger_timeout), + terminate_linger_before_loop(State, TimerRef, Transport:messages()) + end; + {error, _} -> + ok + end. + +terminate_linger_before_loop(State, TimerRef, Messages) -> + %% We may already be in active mode when we do this + %% but it's OK because we are shutting down anyway. + case setopts_active(State) of + ok -> + terminate_linger_loop(State, TimerRef, Messages); + {error, _} -> + ok + end. + +terminate_linger_loop(State=#state{socket=Socket}, TimerRef, Messages) -> + receive + {OK, Socket, _} when OK =:= element(1, Messages) -> + terminate_linger_loop(State, TimerRef, Messages); + {Closed, Socket} when Closed =:= element(2, Messages) -> + ok; + {Error, Socket, _} when Error =:= element(3, Messages) -> + ok; + {Passive, Socket} when Passive =:= tcp_passive; Passive =:= ssl_passive -> + terminate_linger_before_loop(State, TimerRef, Messages); + {timeout, TimerRef, linger_timeout} -> + ok; + _ -> + terminate_linger_loop(State, TimerRef, Messages) + end. + +%% @todo Don't send an RST_STREAM if one was already sent. +reset_stream(State0=#state{socket=Socket, transport=Transport, + http2_machine=HTTP2Machine0}, StreamID, Error) -> + Reason = case Error of + {internal_error, _, _} -> internal_error; + {stream_error, Reason0, _} -> Reason0 + end, + Transport:send(Socket, cow_http2:rst_stream(StreamID, Reason)), + State1 = case cow_http2_machine:reset_stream(StreamID, HTTP2Machine0) of + {ok, HTTP2Machine} -> + terminate_stream(State0#state{http2_machine=HTTP2Machine}, StreamID, Error); + {error, not_found} -> + terminate_stream(State0, StreamID, Error) + end, + case reset_rate(State1) of + {ok, State} -> + State; + error -> + terminate(State1, {connection_error, enhance_your_calm, + 'Stream reset rate larger than configuration allows. Flood? (CVE-2019-9514)'}) + end. + +reset_rate(State0=#state{reset_rate_num=Num0, reset_rate_time=Time}) -> + case Num0 - 1 of + 0 -> + CurrentTime = erlang:monotonic_time(millisecond), + if + CurrentTime < Time -> + error; + true -> + %% When the option has a period of infinity we cannot reach this clause. + {ok, init_reset_rate_limiting(State0, CurrentTime)} + end; + Num -> + {ok, State0#state{reset_rate_num=Num}} + end. + +stop_stream(State=#state{http2_machine=HTTP2Machine}, StreamID) -> + case cow_http2_machine:get_stream_local_state(StreamID, HTTP2Machine) of + %% When the stream terminates normally (without sending RST_STREAM) + %% and no response was sent, we need to send a proper response back to the client. + %% We delay the termination of the stream until the response is fully sent. + {ok, idle, _} -> + info(stopping(State, StreamID), StreamID, {response, 204, #{}, <<>>}); + %% When a response was sent but not terminated, we need to close the stream. + %% We delay the termination of the stream until the response is fully sent. + {ok, nofin, fin} -> + stopping(State, StreamID); + %% We only send a final DATA frame if there isn't one queued yet. + {ok, nofin, _} -> + info(stopping(State, StreamID), StreamID, {data, fin, <<>>}); + %% When a response was sent fully we can terminate the stream, + %% regardless of the stream being in half-closed or closed state. + _ -> + terminate_stream(State, StreamID) + end. + +stopping(State=#state{streams=Streams}, StreamID) -> + #{StreamID := Stream} = Streams, + State#state{streams=Streams#{StreamID => Stream#stream{status=stopping}}}. + +%% If we finished sending data and the stream is stopping, terminate it. +maybe_terminate_stream(State=#state{streams=Streams}, StreamID, fin) -> + case Streams of + #{StreamID := #stream{status=stopping}} -> + terminate_stream(State, StreamID); + _ -> + State + end; +maybe_terminate_stream(State, _, _) -> + State. + +%% When the stream stops normally without reading the request +%% body fully we need to tell the client to stop sending it. +%% We do this by sending an RST_STREAM with reason NO_ERROR. (RFC7540 8.1.0) +terminate_stream(State0=#state{socket=Socket, transport=Transport, + http2_machine=HTTP2Machine0}, StreamID) -> + State = case cow_http2_machine:get_stream_local_state(StreamID, HTTP2Machine0) of + {ok, fin, _} -> + Transport:send(Socket, cow_http2:rst_stream(StreamID, no_error)), + {ok, HTTP2Machine} = cow_http2_machine:reset_stream(StreamID, HTTP2Machine0), + State0#state{http2_machine=HTTP2Machine}; + {error, closed} -> + State0 + end, + terminate_stream(State, StreamID, normal). + +%% We remove the stream flow from the connection flow. Any further +%% data received for this stream is therefore fully contained within +%% the extra window we allocated for this stream. +terminate_stream(State=#state{flow=Flow, streams=Streams0, children=Children0}, StreamID, Reason) -> + case maps:take(StreamID, Streams0) of + {#stream{flow=StreamFlow, state=StreamState}, Streams} -> + terminate_stream_handler(State, StreamID, Reason, StreamState), + Children = cowboy_children:shutdown(Children0, StreamID), + State#state{flow=Flow - StreamFlow, streams=Streams, children=Children}; + error -> + State + end. + +terminate_stream_handler(#state{opts=Opts}, StreamID, Reason, StreamState) -> + try + cowboy_stream:terminate(StreamID, Reason, StreamState) + catch Class:Exception:Stacktrace -> + cowboy:log(cowboy_stream:make_error_log(terminate, + [StreamID, Reason, StreamState], + Class, Exception, Stacktrace), Opts) + end. + +%% System callbacks. + +-spec system_continue(_, _, {#state{}, binary()}) -> ok. +system_continue(_, _, {State, Buffer}) -> + loop(State, Buffer). + +-spec system_terminate(any(), _, _, {#state{}, binary()}) -> no_return(). +system_terminate(Reason0, _, _, {State, Buffer}) -> + Reason = {stop, {exit, Reason0}, 'sys:terminate/2,3 was called.'}, + loop(initiate_closing(State, Reason), Buffer). + +-spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::{#state{}, binary()}. +system_code_change(Misc, _, _, _) -> + {ok, Misc}. diff --git a/src/wsSrv/cowboy_loop.erl b/src/wsSrv/cowboy_loop.erl new file mode 100644 index 0000000..21eb96e --- /dev/null +++ b/src/wsSrv/cowboy_loop.erl @@ -0,0 +1,108 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_loop). +-behaviour(cowboy_sub_protocol). + +-export([upgrade/4]). +-export([upgrade/5]). +-export([loop/4]). + +-export([system_continue/3]). +-export([system_terminate/4]). +-export([system_code_change/4]). + +-callback init(Req, any()) + -> {ok | module(), Req, any()} + | {module(), Req, any(), any()} + when Req::cowboy_req:req(). + +-callback info(any(), Req, State) + -> {ok, Req, State} + | {ok, Req, State, hibernate} + | {stop, Req, State} + when Req::cowboy_req:req(), State::any(). + +-callback terminate(any(), cowboy_req:req(), any()) -> ok. +-optional_callbacks([terminate/3]). + +-spec upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} | {suspend, ?MODULE, loop, [any()]} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +upgrade(Req, Env, Handler, HandlerState) -> + loop(Req, Env, Handler, HandlerState). + +-spec upgrade(Req, Env, module(), any(), hibernate) + -> {suspend, ?MODULE, loop, [any()]} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +upgrade(Req, Env, Handler, HandlerState, hibernate) -> + suspend(Req, Env, Handler, HandlerState). + +-spec loop(Req, Env, module(), any()) + -> {ok, Req, Env} | {suspend, ?MODULE, loop, [any()]} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +%% @todo Handle system messages. +loop(Req=#{pid := Parent}, Env, Handler, HandlerState) -> + receive + %% System messages. + {'EXIT', Parent, Reason} -> + terminate(Req, Env, Handler, HandlerState, Reason); + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], + {Req, Env, Handler, HandlerState}); + %% Calls from supervisor module. + {'$gen_call', From, Call} -> + cowboy_children:handle_supervisor_call(Call, From, [], ?MODULE), + loop(Req, Env, Handler, HandlerState); + Message -> + call(Req, Env, Handler, HandlerState, Message) + end. + +call(Req0, Env, Handler, HandlerState0, Message) -> + try Handler:info(Message, Req0, HandlerState0) of + {ok, Req, HandlerState} -> + loop(Req, Env, Handler, HandlerState); + {ok, Req, HandlerState, hibernate} -> + suspend(Req, Env, Handler, HandlerState); + {stop, Req, HandlerState} -> + terminate(Req, Env, Handler, HandlerState, stop) + catch Class:Reason:Stacktrace -> + cowboy_handler:terminate({crash, Class, Reason}, Req0, HandlerState0, Handler), + erlang:raise(Class, Reason, Stacktrace) + end. + +suspend(Req, Env, Handler, HandlerState) -> + {suspend, ?MODULE, loop, [Req, Env, Handler, HandlerState]}. + +terminate(Req, Env, Handler, HandlerState, Reason) -> + Result = cowboy_handler:terminate(Reason, Req, HandlerState, Handler), + {ok, Req, Env#{result => Result}}. + +%% System callbacks. + +-spec system_continue(_, _, {Req, Env, module(), any()}) + -> {ok, Req, Env} | {suspend, ?MODULE, loop, [any()]} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +system_continue(_, _, {Req, Env, Handler, HandlerState}) -> + loop(Req, Env, Handler, HandlerState). + +-spec system_terminate(any(), _, _, {Req, Env, module(), any()}) + -> {ok, Req, Env} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +system_terminate(Reason, _, _, {Req, Env, Handler, HandlerState}) -> + terminate(Req, Env, Handler, HandlerState, Reason). + +-spec system_code_change(Misc, _, _, _) -> {ok, Misc} + when Misc::{cowboy_req:req(), cowboy_middleware:env(), module(), any()}. +system_code_change(Misc, _, _, _) -> + {ok, Misc}. diff --git a/src/wsSrv/cowboy_metrics_h.erl b/src/wsSrv/cowboy_metrics_h.erl new file mode 100644 index 0000000..4107aac --- /dev/null +++ b/src/wsSrv/cowboy_metrics_h.erl @@ -0,0 +1,331 @@ +%% Copyright (c) 2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_metrics_h). +-behavior(cowboy_stream). + +-export([init/3]). +-export([data/4]). +-export([info/3]). +-export([terminate/3]). +-export([early_error/5]). + +-type proc_metrics() :: #{pid() => #{ + %% Time at which the process spawned. + spawn := integer(), + + %% Time at which the process exited. + exit => integer(), + + %% Reason for the process exit. + reason => any() +}}. + +-type informational_metrics() :: #{ + %% Informational response status. + status := cowboy:http_status(), + + %% Headers sent with the informational response. + headers := cowboy:http_headers(), + + %% Time when the informational response was sent. + time := integer() +}. + +-type metrics() :: #{ + %% The identifier for this listener. + ref := ranch:ref(), + + %% The pid for this connection. + pid := pid(), + + %% The streamid also indicates the total number of requests on + %% this connection (StreamID div 2 + 1). + streamid := cowboy_stream:streamid(), + + %% The terminate reason is always useful. + reason := cowboy_stream:reason(), + + %% A filtered Req object or a partial Req object + %% depending on how far the request got to. + req => cowboy_req:req(), + partial_req => cowboy_stream:partial_req(), + + %% Response status. + resp_status := cowboy:http_status(), + + %% Filtered response headers. + resp_headers := cowboy:http_headers(), + + %% Start/end of the processing of the request. + %% + %% This represents the time from this stream handler's init + %% to terminate. + req_start => integer(), + req_end => integer(), + + %% Start/end of the receiving of the request body. + %% Begins when the first packet has been received. + req_body_start => integer(), + req_body_end => integer(), + + %% Start/end of the sending of the response. + %% Begins when we send the headers and ends on the final + %% packet of the response body. If everything is sent at + %% once these values are identical. + resp_start => integer(), + resp_end => integer(), + + %% For early errors all we get is the time we received it. + early_error_time => integer(), + + %% Start/end of spawned processes. This is where most of + %% the user code lies, excluding stream handlers. On a + %% default Cowboy configuration there should be only one + %% process: the request process. + procs => proc_metrics(), + + %% Informational responses sent before the final response. + informational => [informational_metrics()], + + %% Length of the request and response bodies. This does + %% not include the framing. + req_body_length => non_neg_integer(), + resp_body_length => non_neg_integer(), + + %% Additional metadata set by the user. + user_data => map() +}. +-export_type([metrics/0]). + +-type metrics_callback() :: fun((metrics()) -> any()). +-export_type([metrics_callback/0]). + +-record(state, { + next :: any(), + callback :: fun((metrics()) -> any()), + resp_headers_filter :: undefined | fun((cowboy:http_headers()) -> cowboy:http_headers()), + req :: map(), + resp_status :: undefined | cowboy:http_status(), + resp_headers :: undefined | cowboy:http_headers(), + ref :: ranch:ref(), + req_start :: integer(), + req_end :: undefined | integer(), + req_body_start :: undefined | integer(), + req_body_end :: undefined | integer(), + resp_start :: undefined | integer(), + resp_end :: undefined | integer(), + procs = #{} :: proc_metrics(), + informational = [] :: [informational_metrics()], + req_body_length = 0 :: non_neg_integer(), + resp_body_length = 0 :: non_neg_integer(), + user_data = #{} :: map() +}). + +-spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) + -> {[{spawn, pid(), timeout()}], #state{}}. +init(StreamID, Req=#{ref := Ref}, Opts=#{metrics_callback := Fun}) -> + ReqStart = erlang:monotonic_time(), + {Commands, Next} = cowboy_stream:init(StreamID, Req, Opts), + FilteredReq = case maps:get(metrics_req_filter, Opts, undefined) of + undefined -> Req; + ReqFilter -> ReqFilter(Req) + end, + RespHeadersFilter = maps:get(metrics_resp_headers_filter, Opts, undefined), + {Commands, fold(Commands, #state{ + next=Next, + callback=Fun, + resp_headers_filter=RespHeadersFilter, + req=FilteredReq, + ref=Ref, + req_start=ReqStart + })}. + +-spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +data(StreamID, IsFin=fin, Data, State=#state{req_body_start=undefined}) -> + ReqBody = erlang:monotonic_time(), + do_data(StreamID, IsFin, Data, State#state{ + req_body_start=ReqBody, + req_body_end=ReqBody, + req_body_length=byte_size(Data) + }); +data(StreamID, IsFin=fin, Data, State=#state{req_body_length=ReqBodyLen}) -> + ReqBodyEnd = erlang:monotonic_time(), + do_data(StreamID, IsFin, Data, State#state{ + req_body_end=ReqBodyEnd, + req_body_length=ReqBodyLen + byte_size(Data) + }); +data(StreamID, IsFin, Data, State=#state{req_body_start=undefined}) -> + ReqBodyStart = erlang:monotonic_time(), + do_data(StreamID, IsFin, Data, State#state{ + req_body_start=ReqBodyStart, + req_body_length=byte_size(Data) + }); +data(StreamID, IsFin, Data, State=#state{req_body_length=ReqBodyLen}) -> + do_data(StreamID, IsFin, Data, State#state{ + req_body_length=ReqBodyLen + byte_size(Data) + }). + +do_data(StreamID, IsFin, Data, State0=#state{next=Next0}) -> + {Commands, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0), + {Commands, fold(Commands, State0#state{next=Next})}. + +-spec info(cowboy_stream:streamid(), any(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +info(StreamID, Info={'EXIT', Pid, Reason}, State0=#state{procs=Procs}) -> + ProcEnd = erlang:monotonic_time(), + P = maps:get(Pid, Procs), + State = State0#state{procs=Procs#{Pid => P#{ + exit => ProcEnd, + reason => Reason + }}}, + do_info(StreamID, Info, State); +info(StreamID, Info, State) -> + do_info(StreamID, Info, State). + +do_info(StreamID, Info, State0=#state{next=Next0}) -> + {Commands, Next} = cowboy_stream:info(StreamID, Info, Next0), + {Commands, fold(Commands, State0#state{next=Next})}. + +fold([], State) -> + State; +fold([{spawn, Pid, _}|Tail], State0=#state{procs=Procs}) -> + ProcStart = erlang:monotonic_time(), + State = State0#state{procs=Procs#{Pid => #{spawn => ProcStart}}}, + fold(Tail, State); +fold([{inform, Status, Headers}|Tail], + State=#state{informational=Infos}) -> + Time = erlang:monotonic_time(), + fold(Tail, State#state{informational=[#{ + status => Status, + headers => Headers, + time => Time + }|Infos]}); +fold([{response, Status, Headers, Body}|Tail], + State=#state{resp_headers_filter=RespHeadersFilter}) -> + Resp = erlang:monotonic_time(), + fold(Tail, State#state{ + resp_status=Status, + resp_headers=case RespHeadersFilter of + undefined -> Headers; + _ -> RespHeadersFilter(Headers) + end, + resp_start=Resp, + resp_end=Resp, + resp_body_length=resp_body_length(Body) + }); +fold([{error_response, Status, Headers, Body}|Tail], + State=#state{resp_status=RespStatus}) -> + %% The error_response command only results in a response + %% if no response was sent before. + case RespStatus of + undefined -> + fold([{response, Status, Headers, Body}|Tail], State); + _ -> + fold(Tail, State) + end; +fold([{headers, Status, Headers}|Tail], + State=#state{resp_headers_filter=RespHeadersFilter}) -> + RespStart = erlang:monotonic_time(), + fold(Tail, State#state{ + resp_status=Status, + resp_headers=case RespHeadersFilter of + undefined -> Headers; + _ -> RespHeadersFilter(Headers) + end, + resp_start=RespStart + }); +%% @todo It might be worthwhile to keep the sendfile information around, +%% especially if these frames ultimately result in a sendfile syscall. +fold([{data, nofin, Data}|Tail], State=#state{resp_body_length=RespBodyLen}) -> + fold(Tail, State#state{ + resp_body_length=RespBodyLen + resp_body_length(Data) + }); +fold([{data, fin, Data}|Tail], State=#state{resp_body_length=RespBodyLen}) -> + RespEnd = erlang:monotonic_time(), + fold(Tail, State#state{ + resp_end=RespEnd, + resp_body_length=RespBodyLen + resp_body_length(Data) + }); +fold([{set_options, SetOpts}|Tail], State0=#state{user_data=OldUserData}) -> + State = case SetOpts of + #{metrics_user_data := NewUserData} -> + State0#state{user_data=maps:merge(OldUserData, NewUserData)}; + _ -> + State0 + end, + fold(Tail, State); +fold([_|Tail], State) -> + fold(Tail, State). + +-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), #state{}) -> any(). +terminate(StreamID, Reason, #state{next=Next, callback=Fun, + req=Req, resp_status=RespStatus, resp_headers=RespHeaders, ref=Ref, + req_start=ReqStart, req_body_start=ReqBodyStart, + req_body_end=ReqBodyEnd, resp_start=RespStart, resp_end=RespEnd, + procs=Procs, informational=Infos, user_data=UserData, + req_body_length=ReqBodyLen, resp_body_length=RespBodyLen}) -> + Res = cowboy_stream:terminate(StreamID, Reason, Next), + ReqEnd = erlang:monotonic_time(), + Metrics = #{ + ref => Ref, + pid => self(), + streamid => StreamID, + reason => Reason, + req => Req, + resp_status => RespStatus, + resp_headers => RespHeaders, + req_start => ReqStart, + req_end => ReqEnd, + req_body_start => ReqBodyStart, + req_body_end => ReqBodyEnd, + resp_start => RespStart, + resp_end => RespEnd, + procs => Procs, + informational => lists:reverse(Infos), + req_body_length => ReqBodyLen, + resp_body_length => RespBodyLen, + user_data => UserData + }, + Fun(Metrics), + Res. + +-spec early_error(cowboy_stream:streamid(), cowboy_stream:reason(), + cowboy_stream:partial_req(), Resp, cowboy:opts()) -> Resp + when Resp::cowboy_stream:resp_command(). +early_error(StreamID, Reason, PartialReq=#{ref := Ref}, Resp0, Opts=#{metrics_callback := Fun}) -> + Time = erlang:monotonic_time(), + Resp = {response, RespStatus, RespHeaders, RespBody} + = cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp0, Opts), + %% As far as metrics go we are limited in what we can provide + %% in this case. + Metrics = #{ + ref => Ref, + pid => self(), + streamid => StreamID, + reason => Reason, + partial_req => PartialReq, + resp_status => RespStatus, + resp_headers => RespHeaders, + early_error_time => Time, + resp_body_length => resp_body_length(RespBody) + }, + Fun(Metrics), + Resp. + +resp_body_length({sendfile, _, Len, _}) -> + Len; +resp_body_length(Data) -> + iolist_size(Data). diff --git a/src/wsSrv/cowboy_middleware.erl b/src/wsSrv/cowboy_middleware.erl new file mode 100644 index 0000000..9a739f1 --- /dev/null +++ b/src/wsSrv/cowboy_middleware.erl @@ -0,0 +1,24 @@ +%% Copyright (c) 2013-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_middleware). + +-type env() :: #{atom() => any()}. +-export_type([env/0]). + +-callback execute(Req, Env) + -> {ok, Req, Env} + | {suspend, module(), atom(), [any()]} + | {stop, Req} + when Req::cowboy_req:req(), Env::env(). diff --git a/src/wsSrv/cowboy_req.erl b/src/wsSrv/cowboy_req.erl new file mode 100644 index 0000000..90c5a3a --- /dev/null +++ b/src/wsSrv/cowboy_req.erl @@ -0,0 +1,1016 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% Copyright (c) 2011, Anthony Ramine +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_req). + +%% Request. +-export([method/1]). +-export([version/1]). +-export([peer/1]). +-export([sock/1]). +-export([cert/1]). +-export([scheme/1]). +-export([host/1]). +-export([host_info/1]). +-export([port/1]). +-export([path/1]). +-export([path_info/1]). +-export([qs/1]). +-export([parse_qs/1]). +-export([match_qs/2]). +-export([uri/1]). +-export([uri/2]). +-export([binding/2]). +-export([binding/3]). +-export([bindings/1]). +-export([header/2]). +-export([header/3]). +-export([headers/1]). +-export([parse_header/2]). +-export([parse_header/3]). +-export([filter_cookies/2]). +-export([parse_cookies/1]). +-export([match_cookies/2]). + +%% Request body. +-export([has_body/1]). +-export([body_length/1]). +-export([read_body/1]). +-export([read_body/2]). +-export([read_urlencoded_body/1]). +-export([read_urlencoded_body/2]). +-export([read_and_match_urlencoded_body/2]). +-export([read_and_match_urlencoded_body/3]). + +%% Multipart. +-export([read_part/1]). +-export([read_part/2]). +-export([read_part_body/1]). +-export([read_part_body/2]). + +%% Response. +-export([set_resp_cookie/3]). +-export([set_resp_cookie/4]). +-export([resp_header/2]). +-export([resp_header/3]). +-export([resp_headers/1]). +-export([set_resp_header/3]). +-export([set_resp_headers/2]). +-export([has_resp_header/2]). +-export([delete_resp_header/2]). +-export([set_resp_body/2]). +%% @todo set_resp_body/3 with a ContentType or even Headers argument, to set content headers. +-export([has_resp_body/1]). +-export([inform/2]). +-export([inform/3]). +-export([reply/2]). +-export([reply/3]). +-export([reply/4]). +-export([stream_reply/2]). +-export([stream_reply/3]). +%% @todo stream_body/2 (nofin) +-export([stream_body/3]). +%% @todo stream_events/2 (nofin) +-export([stream_events/3]). +-export([stream_trailers/2]). +-export([push/3]). +-export([push/4]). + +%% Stream handlers. +-export([cast/2]). + +%% Internal. +-export([response_headers/2]). + +-type read_body_opts() :: #{ + length => non_neg_integer() | infinity, + period => non_neg_integer(), + timeout => timeout() +}. +-export_type([read_body_opts/0]). + +%% While sendfile allows a Len of 0 that means "everything past Offset", +%% Cowboy expects the real length as it is used as metadata. +-type resp_body() :: iodata() + | {sendfile, non_neg_integer(), non_neg_integer(), file:name_all()}. +-export_type([resp_body/0]). + +-type push_opts() :: #{ + method => binary(), + scheme => binary(), + host => binary(), + port => inet:port_number(), + qs => binary() +}. +-export_type([push_opts/0]). + +-type req() :: #{ + %% Public interface. + method := binary(), + version := cowboy:http_version() | atom(), + scheme := binary(), + host := binary(), + port := inet:port_number(), + path := binary(), + qs := binary(), + headers := cowboy:http_headers(), + peer := {inet:ip_address(), inet:port_number()}, + sock := {inet:ip_address(), inet:port_number()}, + cert := binary() | undefined, + + %% Private interface. + ref := ranch:ref(), + pid := pid(), + streamid := cowboy_stream:streamid(), + + host_info => cowboy_router:tokens(), + path_info => cowboy_router:tokens(), + bindings => cowboy_router:bindings(), + + has_body := boolean(), + body_length := non_neg_integer() | undefined, + has_read_body => true, + multipart => {binary(), binary()} | done, + + has_sent_resp => headers | true, + resp_cookies => #{iodata() => iodata()}, + resp_headers => #{binary() => iodata()}, + resp_body => resp_body(), + + proxy_header => ranch_proxy_header:proxy_info(), + media_type => {binary(), binary(), [{binary(), binary()}]}, + language => binary() | undefined, + charset => binary() | undefined, + range => {binary(), binary() + | [{non_neg_integer(), non_neg_integer() | infinity} | neg_integer()]}, + websocket_version => 7 | 8 | 13, + + %% The user is encouraged to use the Req to store information + %% when no better solution is available. + _ => _ +}. +-export_type([req/0]). + +%% Request. + +-spec method(req()) -> binary(). +method(#{method := Method}) -> + Method. + +-spec version(req()) -> cowboy:http_version(). +version(#{version := Version}) -> + Version. + +-spec peer(req()) -> {inet:ip_address(), inet:port_number()}. +peer(#{peer := Peer}) -> + Peer. + +-spec sock(req()) -> {inet:ip_address(), inet:port_number()}. +sock(#{sock := Sock}) -> + Sock. + +-spec cert(req()) -> binary() | undefined. +cert(#{cert := Cert}) -> + Cert. + +-spec scheme(req()) -> binary(). +scheme(#{scheme := Scheme}) -> + Scheme. + +-spec host(req()) -> binary(). +host(#{host := Host}) -> + Host. + +%% @todo The host_info is undefined if cowboy_router isn't used. Do we want to crash? +-spec host_info(req()) -> cowboy_router:tokens() | undefined. +host_info(#{host_info := HostInfo}) -> + HostInfo. + +-spec port(req()) -> inet:port_number(). +port(#{port := Port}) -> + Port. + +-spec path(req()) -> binary(). +path(#{path := Path}) -> + Path. + +%% @todo The path_info is undefined if cowboy_router isn't used. Do we want to crash? +-spec path_info(req()) -> cowboy_router:tokens() | undefined. +path_info(#{path_info := PathInfo}) -> + PathInfo. + +-spec qs(req()) -> binary(). +qs(#{qs := Qs}) -> + Qs. + +%% @todo Might be useful to limit the number of keys. +-spec parse_qs(req()) -> [{binary(), binary() | true}]. +parse_qs(#{qs := Qs}) -> + try + cow_qs:parse_qs(Qs) + catch _:_:Stacktrace -> + erlang:raise(exit, {request_error, qs, + 'Malformed query string; application/x-www-form-urlencoded expected.' + }, Stacktrace) + end. + +-spec match_qs(cowboy:fields(), req()) -> map(). +match_qs(Fields, Req) -> + case filter(Fields, kvlist_to_map(Fields, parse_qs(Req))) of + {ok, Map} -> + Map; + {error, Errors} -> + exit({request_error, {match_qs, Errors}, + 'Query string validation constraints failed for the reasons provided.'}) + end. + +-spec uri(req()) -> iodata(). +uri(Req) -> + uri(Req, #{}). + +-spec uri(req(), map()) -> iodata(). +uri(#{scheme := Scheme0, host := Host0, port := Port0, + path := Path0, qs := Qs0}, Opts) -> + Scheme = case maps:get(scheme, Opts, Scheme0) of + S = undefined -> S; + S -> iolist_to_binary(S) + end, + Host = maps:get(host, Opts, Host0), + Port = maps:get(port, Opts, Port0), + {Path, Qs} = case maps:get(path, Opts, Path0) of + <<"*">> -> {<<>>, <<>>}; + P -> {P, maps:get(qs, Opts, Qs0)} + end, + Fragment = maps:get(fragment, Opts, undefined), + [uri_host(Scheme, Scheme0, Port, Host), uri_path(Path), uri_qs(Qs), uri_fragment(Fragment)]. + +uri_host(_, _, _, undefined) -> <<>>; +uri_host(Scheme, Scheme0, Port, Host) -> + case iolist_size(Host) of + 0 -> <<>>; + _ -> [uri_scheme(Scheme), <<"//">>, Host, uri_port(Scheme, Scheme0, Port)] + end. + +uri_scheme(undefined) -> <<>>; +uri_scheme(Scheme) -> + case iolist_size(Scheme) of + 0 -> Scheme; + _ -> [Scheme, $:] + end. + +uri_port(_, _, undefined) -> <<>>; +uri_port(undefined, <<"http">>, 80) -> <<>>; +uri_port(undefined, <<"https">>, 443) -> <<>>; +uri_port(<<"http">>, _, 80) -> <<>>; +uri_port(<<"https">>, _, 443) -> <<>>; +uri_port(_, _, Port) -> + [$:, integer_to_binary(Port)]. + +uri_path(undefined) -> <<>>; +uri_path(Path) -> Path. + +uri_qs(undefined) -> <<>>; +uri_qs(Qs) -> + case iolist_size(Qs) of + 0 -> Qs; + _ -> [$?, Qs] + end. + +uri_fragment(undefined) -> <<>>; +uri_fragment(Fragment) -> + case iolist_size(Fragment) of + 0 -> Fragment; + _ -> [$#, Fragment] + end. + +-ifdef(TEST). +uri1_test() -> + <<"http://localhost/path">> = iolist_to_binary(uri(#{ + scheme => <<"http">>, host => <<"localhost">>, port => 80, + path => <<"/path">>, qs => <<>>})), + <<"http://localhost:443/path">> = iolist_to_binary(uri(#{ + scheme => <<"http">>, host => <<"localhost">>, port => 443, + path => <<"/path">>, qs => <<>>})), + <<"http://localhost:8080/path">> = iolist_to_binary(uri(#{ + scheme => <<"http">>, host => <<"localhost">>, port => 8080, + path => <<"/path">>, qs => <<>>})), + <<"http://localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(#{ + scheme => <<"http">>, host => <<"localhost">>, port => 8080, + path => <<"/path">>, qs => <<"dummy=2785">>})), + <<"https://localhost/path">> = iolist_to_binary(uri(#{ + scheme => <<"https">>, host => <<"localhost">>, port => 443, + path => <<"/path">>, qs => <<>>})), + <<"https://localhost:8443/path">> = iolist_to_binary(uri(#{ + scheme => <<"https">>, host => <<"localhost">>, port => 8443, + path => <<"/path">>, qs => <<>>})), + <<"https://localhost:8443/path?dummy=2785">> = iolist_to_binary(uri(#{ + scheme => <<"https">>, host => <<"localhost">>, port => 8443, + path => <<"/path">>, qs => <<"dummy=2785">>})), + ok. + +uri2_test() -> + Req = #{ + scheme => <<"http">>, host => <<"localhost">>, port => 8080, + path => <<"/path">>, qs => <<"dummy=2785">> + }, + <<"http://localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{})), + %% Disable individual components. + <<"//localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{scheme => undefined})), + <<"/path?dummy=2785">> = iolist_to_binary(uri(Req, #{host => undefined})), + <<"http://localhost/path?dummy=2785">> = iolist_to_binary(uri(Req, #{port => undefined})), + <<"http://localhost:8080?dummy=2785">> = iolist_to_binary(uri(Req, #{path => undefined})), + <<"http://localhost:8080/path">> = iolist_to_binary(uri(Req, #{qs => undefined})), + <<"http://localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{fragment => undefined})), + <<"http://localhost:8080">> = iolist_to_binary(uri(Req, #{path => undefined, qs => undefined})), + <<>> = iolist_to_binary(uri(Req, #{host => undefined, path => undefined, qs => undefined})), + %% Empty values. + <<"//localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{scheme => <<>>})), + <<"//localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{scheme => ""})), + <<"//localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{scheme => [<<>>]})), + <<"/path?dummy=2785">> = iolist_to_binary(uri(Req, #{host => <<>>})), + <<"/path?dummy=2785">> = iolist_to_binary(uri(Req, #{host => ""})), + <<"/path?dummy=2785">> = iolist_to_binary(uri(Req, #{host => [<<>>]})), + <<"http://localhost:8080?dummy=2785">> = iolist_to_binary(uri(Req, #{path => <<>>})), + <<"http://localhost:8080?dummy=2785">> = iolist_to_binary(uri(Req, #{path => ""})), + <<"http://localhost:8080?dummy=2785">> = iolist_to_binary(uri(Req, #{path => [<<>>]})), + <<"http://localhost:8080/path">> = iolist_to_binary(uri(Req, #{qs => <<>>})), + <<"http://localhost:8080/path">> = iolist_to_binary(uri(Req, #{qs => ""})), + <<"http://localhost:8080/path">> = iolist_to_binary(uri(Req, #{qs => [<<>>]})), + <<"http://localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{fragment => <<>>})), + <<"http://localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{fragment => ""})), + <<"http://localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{fragment => [<<>>]})), + %% Port is integer() | undefined. + {'EXIT', _} = (catch iolist_to_binary(uri(Req, #{port => <<>>}))), + {'EXIT', _} = (catch iolist_to_binary(uri(Req, #{port => ""}))), + {'EXIT', _} = (catch iolist_to_binary(uri(Req, #{port => [<<>>]}))), + %% Update components. + <<"https://localhost:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{scheme => "https"})), + <<"http://example.org:8080/path?dummy=2785">> = iolist_to_binary(uri(Req, #{host => "example.org"})), + <<"http://localhost:123/path?dummy=2785">> = iolist_to_binary(uri(Req, #{port => 123})), + <<"http://localhost:8080/custom?dummy=2785">> = iolist_to_binary(uri(Req, #{path => "/custom"})), + <<"http://localhost:8080/path?smart=42">> = iolist_to_binary(uri(Req, #{qs => "smart=42"})), + <<"http://localhost:8080/path?dummy=2785#intro">> = iolist_to_binary(uri(Req, #{fragment => "intro"})), + %% Interesting combinations. + <<"http://localhost/path?dummy=2785">> = iolist_to_binary(uri(Req, #{port => 80})), + <<"https://localhost/path?dummy=2785">> = iolist_to_binary(uri(Req, #{scheme => "https", port => 443})), + ok. +-endif. + +-spec binding(atom(), req()) -> any() | undefined. +binding(Name, Req) -> + binding(Name, Req, undefined). + +-spec binding(atom(), req(), Default) -> any() | Default when Default::any(). +binding(Name, #{bindings := Bindings}, Default) when is_atom(Name) -> + case Bindings of + #{Name := Value} -> Value; + _ -> Default + end; +binding(Name, _, Default) when is_atom(Name) -> + Default. + +-spec bindings(req()) -> cowboy_router:bindings(). +bindings(#{bindings := Bindings}) -> + Bindings; +bindings(_) -> + #{}. + +-spec header(binary(), req()) -> binary() | undefined. +header(Name, Req) -> + header(Name, Req, undefined). + +-spec header(binary(), req(), Default) -> binary() | Default when Default::any(). +header(Name, #{headers := Headers}, Default) -> + maps:get(Name, Headers, Default). + +-spec headers(req()) -> cowboy:http_headers(). +headers(#{headers := Headers}) -> + Headers. + +-spec parse_header(binary(), Req) -> any() when Req::req(). +parse_header(Name = <<"content-length">>, Req) -> + parse_header(Name, Req, 0); +parse_header(Name = <<"cookie">>, Req) -> + parse_header(Name, Req, []); +parse_header(Name, Req) -> + parse_header(Name, Req, undefined). + +-spec parse_header(binary(), Req, any()) -> any() when Req::req(). +parse_header(Name, Req, Default) -> + try + parse_header(Name, Req, Default, parse_header_fun(Name)) + catch _:_:Stacktrace -> + erlang:raise(exit, {request_error, {header, Name}, + 'Malformed header. Please consult the relevant specification.' + }, Stacktrace) + end. + +parse_header_fun(<<"accept">>) -> fun cow_http_hd:parse_accept/1; +parse_header_fun(<<"accept-charset">>) -> fun cow_http_hd:parse_accept_charset/1; +parse_header_fun(<<"accept-encoding">>) -> fun cow_http_hd:parse_accept_encoding/1; +parse_header_fun(<<"accept-language">>) -> fun cow_http_hd:parse_accept_language/1; +parse_header_fun(<<"access-control-request-headers">>) -> fun cow_http_hd:parse_access_control_request_headers/1; +parse_header_fun(<<"access-control-request-method">>) -> fun cow_http_hd:parse_access_control_request_method/1; +parse_header_fun(<<"authorization">>) -> fun cow_http_hd:parse_authorization/1; +parse_header_fun(<<"connection">>) -> fun cow_http_hd:parse_connection/1; +parse_header_fun(<<"content-encoding">>) -> fun cow_http_hd:parse_content_encoding/1; +parse_header_fun(<<"content-language">>) -> fun cow_http_hd:parse_content_language/1; +parse_header_fun(<<"content-length">>) -> fun cow_http_hd:parse_content_length/1; +parse_header_fun(<<"content-type">>) -> fun cow_http_hd:parse_content_type/1; +parse_header_fun(<<"cookie">>) -> fun cow_cookie:parse_cookie/1; +parse_header_fun(<<"expect">>) -> fun cow_http_hd:parse_expect/1; +parse_header_fun(<<"if-match">>) -> fun cow_http_hd:parse_if_match/1; +parse_header_fun(<<"if-modified-since">>) -> fun cow_http_hd:parse_if_modified_since/1; +parse_header_fun(<<"if-none-match">>) -> fun cow_http_hd:parse_if_none_match/1; +parse_header_fun(<<"if-range">>) -> fun cow_http_hd:parse_if_range/1; +parse_header_fun(<<"if-unmodified-since">>) -> fun cow_http_hd:parse_if_unmodified_since/1; +parse_header_fun(<<"max-forwards">>) -> fun cow_http_hd:parse_max_forwards/1; +parse_header_fun(<<"origin">>) -> fun cow_http_hd:parse_origin/1; +parse_header_fun(<<"proxy-authorization">>) -> fun cow_http_hd:parse_proxy_authorization/1; +parse_header_fun(<<"range">>) -> fun cow_http_hd:parse_range/1; +parse_header_fun(<<"sec-websocket-extensions">>) -> fun cow_http_hd:parse_sec_websocket_extensions/1; +parse_header_fun(<<"sec-websocket-protocol">>) -> fun cow_http_hd:parse_sec_websocket_protocol_req/1; +parse_header_fun(<<"sec-websocket-version">>) -> fun cow_http_hd:parse_sec_websocket_version_req/1; +parse_header_fun(<<"trailer">>) -> fun cow_http_hd:parse_trailer/1; +parse_header_fun(<<"upgrade">>) -> fun cow_http_hd:parse_upgrade/1; +parse_header_fun(<<"x-forwarded-for">>) -> fun cow_http_hd:parse_x_forwarded_for/1. + +parse_header(Name, Req, Default, ParseFun) -> + case header(Name, Req) of + undefined -> Default; + Value -> ParseFun(Value) + end. + +-spec filter_cookies([atom() | binary()], Req) -> Req when Req::req(). +filter_cookies(Names0, Req=#{headers := Headers}) -> + Names = [if + is_atom(N) -> atom_to_binary(N, utf8); + true -> N + end || N <- Names0], + case header(<<"cookie">>, Req) of + undefined -> Req; + Value0 -> + Cookies0 = binary:split(Value0, <<$;>>), + Cookies = lists:filter(fun(Cookie) -> + lists:member(cookie_name(Cookie), Names) + end, Cookies0), + Value = iolist_to_binary(lists:join($;, Cookies)), + Req#{headers => Headers#{<<"cookie">> => Value}} + end. + +%% This is a specialized function to extract a cookie name +%% regardless of whether the name is valid or not. We skip +%% whitespace at the beginning and take whatever's left to +%% be the cookie name, up to the = sign. +cookie_name(<<$\s, Rest/binary>>) -> cookie_name(Rest); +cookie_name(<<$\t, Rest/binary>>) -> cookie_name(Rest); +cookie_name(Name) -> cookie_name(Name, <<>>). + +cookie_name(<<>>, Name) -> Name; +cookie_name(<<$=, _/bits>>, Name) -> Name; +cookie_name(<>, Acc) -> cookie_name(Rest, <>). + +-spec parse_cookies(req()) -> [{binary(), binary()}]. +parse_cookies(Req) -> + parse_header(<<"cookie">>, Req). + +-spec match_cookies(cowboy:fields(), req()) -> map(). +match_cookies(Fields, Req) -> + case filter(Fields, kvlist_to_map(Fields, parse_cookies(Req))) of + {ok, Map} -> + Map; + {error, Errors} -> + exit({request_error, {match_cookies, Errors}, + 'Cookie validation constraints failed for the reasons provided.'}) + end. + +%% Request body. + +-spec has_body(req()) -> boolean(). +has_body(#{has_body := HasBody}) -> + HasBody. + +%% The length may not be known if HTTP/1.1 with a transfer-encoding; +%% or HTTP/2 with no content-length header. The length is always +%% known once the body has been completely read. +-spec body_length(req()) -> undefined | non_neg_integer(). +body_length(#{body_length := Length}) -> + Length. + +-spec read_body(Req) -> {ok, binary(), Req} | {more, binary(), Req} when Req::req(). +read_body(Req) -> + read_body(Req, #{}). + +-spec read_body(Req, read_body_opts()) -> {ok, binary(), Req} | {more, binary(), Req} when Req::req(). +read_body(Req=#{has_body := false}, _) -> + {ok, <<>>, Req}; +read_body(Req=#{has_read_body := true}, _) -> + {ok, <<>>, Req}; +read_body(Req, Opts) -> + Length = maps:get(length, Opts, 8000000), + Period = maps:get(period, Opts, 15000), + Timeout = maps:get(timeout, Opts, Period + 1000), + Ref = make_ref(), + cast({read_body, self(), Ref, Length, Period}, Req), + receive + {request_body, Ref, nofin, Body} -> + {more, Body, Req}; + {request_body, Ref, fin, BodyLength, Body} -> + {ok, Body, set_body_length(Req, BodyLength)} + after Timeout -> + exit(timeout) + end. + +set_body_length(Req=#{headers := Headers}, BodyLength) -> + Req#{ + headers => Headers#{<<"content-length">> => integer_to_binary(BodyLength)}, + body_length => BodyLength, + has_read_body => true + }. + +-spec read_urlencoded_body(Req) -> {ok, [{binary(), binary() | true}], Req} when Req::req(). +read_urlencoded_body(Req) -> + read_urlencoded_body(Req, #{length => 64000, period => 5000}). + +-spec read_urlencoded_body(Req, read_body_opts()) -> {ok, [{binary(), binary() | true}], Req} when Req::req(). +read_urlencoded_body(Req0, Opts) -> + case read_body(Req0, Opts) of + {ok, Body, Req} -> + try + {ok, cow_qs:parse_qs(Body), Req} + catch _:_:Stacktrace -> + erlang:raise(exit, {request_error, urlencoded_body, + 'Malformed body; application/x-www-form-urlencoded expected.' + }, Stacktrace) + end; + {more, Body, _} -> + Length = maps:get(length, Opts, 64000), + if + byte_size(Body) < Length -> + exit({request_error, timeout, + 'The request body was not received within the configured time.'}); + true -> + exit({request_error, payload_too_large, + 'The request body is larger than allowed by configuration.'}) + end + end. + +-spec read_and_match_urlencoded_body(cowboy:fields(), Req) + -> {ok, map(), Req} when Req::req(). +read_and_match_urlencoded_body(Fields, Req) -> + read_and_match_urlencoded_body(Fields, Req, #{length => 64000, period => 5000}). + +-spec read_and_match_urlencoded_body(cowboy:fields(), Req, read_body_opts()) + -> {ok, map(), Req} when Req::req(). +read_and_match_urlencoded_body(Fields, Req0, Opts) -> + {ok, Qs, Req} = read_urlencoded_body(Req0, Opts), + case filter(Fields, kvlist_to_map(Fields, Qs)) of + {ok, Map} -> + {ok, Map, Req}; + {error, Errors} -> + exit({request_error, {read_and_match_urlencoded_body, Errors}, + 'Urlencoded request body validation constraints failed for the reasons provided.'}) + end. + +%% Multipart. + +-spec read_part(Req) + -> {ok, cowboy:http_headers(), Req} | {done, Req} + when Req::req(). +read_part(Req) -> + read_part(Req, #{length => 64000, period => 5000}). + +-spec read_part(Req, read_body_opts()) + -> {ok, cowboy:http_headers(), Req} | {done, Req} + when Req::req(). +read_part(Req, Opts) -> + case maps:is_key(multipart, Req) of + true -> + {Data, Req2} = stream_multipart(Req, Opts, headers), + read_part(Data, Opts, Req2); + false -> + read_part(init_multipart(Req), Opts) + end. + +read_part(Buffer, Opts, Req=#{multipart := {Boundary, _}}) -> + try cow_multipart:parse_headers(Buffer, Boundary) of + more -> + {Data, Req2} = stream_multipart(Req, Opts, headers), + read_part(<< Buffer/binary, Data/binary >>, Opts, Req2); + {more, Buffer2} -> + {Data, Req2} = stream_multipart(Req, Opts, headers), + read_part(<< Buffer2/binary, Data/binary >>, Opts, Req2); + {ok, Headers0, Rest} -> + Headers = maps:from_list(Headers0), + %% Reject multipart content containing duplicate headers. + true = map_size(Headers) =:= length(Headers0), + {ok, Headers, Req#{multipart => {Boundary, Rest}}}; + %% Ignore epilogue. + {done, _} -> + {done, Req#{multipart => done}} + catch _:_:Stacktrace -> + erlang:raise(exit, {request_error, {multipart, headers}, + 'Malformed body; multipart expected.' + }, Stacktrace) + end. + +-spec read_part_body(Req) + -> {ok, binary(), Req} | {more, binary(), Req} + when Req::req(). +read_part_body(Req) -> + read_part_body(Req, #{}). + +-spec read_part_body(Req, read_body_opts()) + -> {ok, binary(), Req} | {more, binary(), Req} + when Req::req(). +read_part_body(Req, Opts) -> + case maps:is_key(multipart, Req) of + true -> + read_part_body(<<>>, Opts, Req, <<>>); + false -> + read_part_body(init_multipart(Req), Opts) + end. + +read_part_body(Buffer, Opts, Req=#{multipart := {Boundary, _}}, Acc) -> + Length = maps:get(length, Opts, 8000000), + case byte_size(Acc) > Length of + true -> + {more, Acc, Req#{multipart => {Boundary, Buffer}}}; + false -> + {Data, Req2} = stream_multipart(Req, Opts, body), + case cow_multipart:parse_body(<< Buffer/binary, Data/binary >>, Boundary) of + {ok, Body} -> + read_part_body(<<>>, Opts, Req2, << Acc/binary, Body/binary >>); + {ok, Body, Rest} -> + read_part_body(Rest, Opts, Req2, << Acc/binary, Body/binary >>); + done -> + {ok, Acc, Req2}; + {done, Body} -> + {ok, << Acc/binary, Body/binary >>, Req2}; + {done, Body, Rest} -> + {ok, << Acc/binary, Body/binary >>, + Req2#{multipart => {Boundary, Rest}}} + end + end. + +init_multipart(Req) -> + {<<"multipart">>, _, Params} = parse_header(<<"content-type">>, Req), + case lists:keyfind(<<"boundary">>, 1, Params) of + {_, Boundary} -> + Req#{multipart => {Boundary, <<>>}}; + false -> + exit({request_error, {multipart, boundary}, + 'Missing boundary parameter for multipart media type.'}) + end. + +stream_multipart(Req=#{multipart := done}, _, _) -> + {<<>>, Req}; +stream_multipart(Req=#{multipart := {_, <<>>}}, Opts, Type) -> + case read_body(Req, Opts) of + {more, Data, Req2} -> + {Data, Req2}; + %% We crash when the data ends unexpectedly. + {ok, <<>>, _} -> + exit({request_error, {multipart, Type}, + 'Malformed body; multipart expected.'}); + {ok, Data, Req2} -> + {Data, Req2} + end; +stream_multipart(Req=#{multipart := {Boundary, Buffer}}, _, _) -> + {Buffer, Req#{multipart => {Boundary, <<>>}}}. + +%% Response. + +-spec set_resp_cookie(iodata(), iodata(), Req) + -> Req when Req::req(). +set_resp_cookie(Name, Value, Req) -> + set_resp_cookie(Name, Value, Req, #{}). + +%% The cookie name cannot contain any of the following characters: +%% =,;\s\t\r\n\013\014 +%% +%% The cookie value cannot contain any of the following characters: +%% ,; \t\r\n\013\014 +-spec set_resp_cookie(binary(), iodata(), Req, cow_cookie:cookie_opts()) + -> Req when Req::req(). +set_resp_cookie(Name, Value, Req, Opts) -> + Cookie = cow_cookie:setcookie(Name, Value, Opts), + RespCookies = maps:get(resp_cookies, Req, #{}), + Req#{resp_cookies => RespCookies#{Name => Cookie}}. + +%% @todo We could add has_resp_cookie and delete_resp_cookie now. + +-spec set_resp_header(binary(), iodata(), Req) + -> Req when Req::req(). +set_resp_header(Name, Value, Req=#{resp_headers := RespHeaders}) -> + Req#{resp_headers => RespHeaders#{Name => Value}}; +set_resp_header(Name,Value, Req) -> + Req#{resp_headers => #{Name => Value}}. + +-spec set_resp_headers(cowboy:http_headers(), Req) + -> Req when Req::req(). +set_resp_headers(Headers, Req=#{resp_headers := RespHeaders}) -> + Req#{resp_headers => maps:merge(RespHeaders, Headers)}; +set_resp_headers(Headers, Req) -> + Req#{resp_headers => Headers}. + +-spec resp_header(binary(), req()) -> binary() | undefined. +resp_header(Name, Req) -> + resp_header(Name, Req, undefined). + +-spec resp_header(binary(), req(), Default) + -> binary() | Default when Default::any(). +resp_header(Name, #{resp_headers := Headers}, Default) -> + maps:get(Name, Headers, Default); +resp_header(_, #{}, Default) -> + Default. + +-spec resp_headers(req()) -> cowboy:http_headers(). +resp_headers(#{resp_headers := RespHeaders}) -> + RespHeaders; +resp_headers(#{}) -> + #{}. + +-spec set_resp_body(resp_body(), Req) -> Req when Req::req(). +set_resp_body(Body, Req) -> + Req#{resp_body => Body}. + +-spec has_resp_header(binary(), req()) -> boolean(). +has_resp_header(Name, #{resp_headers := RespHeaders}) -> + maps:is_key(Name, RespHeaders); +has_resp_header(_, _) -> + false. + +-spec has_resp_body(req()) -> boolean(). +has_resp_body(#{resp_body := {sendfile, _, _, _}}) -> + true; +has_resp_body(#{resp_body := RespBody}) -> + iolist_size(RespBody) > 0; +has_resp_body(_) -> + false. + +-spec delete_resp_header(binary(), Req) + -> Req when Req::req(). +delete_resp_header(Name, Req=#{resp_headers := RespHeaders}) -> + Req#{resp_headers => maps:remove(Name, RespHeaders)}; +%% There are no resp headers so we have nothing to delete. +delete_resp_header(_, Req) -> + Req. + +-spec inform(cowboy:http_status(), req()) -> ok. +inform(Status, Req) -> + inform(Status, #{}, Req). + +-spec inform(cowboy:http_status(), cowboy:http_headers(), req()) -> ok. +inform(_, _, #{has_sent_resp := _}) -> + error(function_clause); %% @todo Better error message. +inform(Status, Headers, Req) when is_integer(Status); is_binary(Status) -> + cast({inform, Status, Headers}, Req). + +-spec reply(cowboy:http_status(), Req) -> Req when Req::req(). +reply(Status, Req) -> + reply(Status, #{}, Req). + +-spec reply(cowboy:http_status(), cowboy:http_headers(), Req) + -> Req when Req::req(). +reply(Status, Headers, Req=#{resp_body := Body}) -> + reply(Status, Headers, Body, Req); +reply(Status, Headers, Req) -> + reply(Status, Headers, <<>>, Req). + +-spec reply(cowboy:http_status(), cowboy:http_headers(), resp_body(), Req) + -> Req when Req::req(). +reply(_, _, _, #{has_sent_resp := _}) -> + error(function_clause); %% @todo Better error message. +reply(Status, Headers, {sendfile, _, 0, _}, Req) + when is_integer(Status); is_binary(Status) -> + do_reply(Status, Headers#{ + <<"content-length">> => <<"0">> + }, <<>>, Req); +reply(Status, Headers, SendFile = {sendfile, _, Len, _}, Req) + when is_integer(Status); is_binary(Status) -> + do_reply(Status, Headers#{ + <<"content-length">> => integer_to_binary(Len) + }, SendFile, Req); +%% 204 responses must not include content-length. 304 responses may +%% but only when set explicitly. (RFC7230 3.3.1, RFC7230 3.3.2) +%% Neither status code must include a response body. (RFC7230 3.3) +reply(Status, Headers, Body, Req) + when Status =:= 204; Status =:= 304 -> + 0 = iolist_size(Body), + do_reply(Status, Headers, Body, Req); +reply(Status = <<"204",_/bits>>, Headers, Body, Req) -> + 0 = iolist_size(Body), + do_reply(Status, Headers, Body, Req); +reply(Status = <<"304",_/bits>>, Headers, Body, Req) -> + 0 = iolist_size(Body), + do_reply(Status, Headers, Body, Req); +reply(Status, Headers, Body, Req) + when is_integer(Status); is_binary(Status) -> + do_reply(Status, Headers#{ + <<"content-length">> => integer_to_binary(iolist_size(Body)) + }, Body, Req). + +%% Don't send any body for HEAD responses. While the protocol code is +%% supposed to enforce this rule, we prefer to avoid copying too much +%% data around if we can avoid it. +do_reply(Status, Headers, _, Req=#{method := <<"HEAD">>}) -> + cast({response, Status, response_headers(Headers, Req), <<>>}, Req), + done_replying(Req, true); +do_reply(Status, Headers, Body, Req) -> + cast({response, Status, response_headers(Headers, Req), Body}, Req), + done_replying(Req, true). + +done_replying(Req, HasSentResp) -> + maps:without([resp_cookies, resp_headers, resp_body], Req#{has_sent_resp => HasSentResp}). + +-spec stream_reply(cowboy:http_status(), Req) -> Req when Req::req(). +stream_reply(Status, Req) -> + stream_reply(Status, #{}, Req). + +-spec stream_reply(cowboy:http_status(), cowboy:http_headers(), Req) + -> Req when Req::req(). +stream_reply(_, _, #{has_sent_resp := _}) -> + error(function_clause); +%% 204 and 304 responses must NOT send a body. We therefore +%% transform the call to a full response and expect the user +%% to NOT call stream_body/3 afterwards. (RFC7230 3.3) +stream_reply(Status = 204, Headers=#{}, Req) -> + reply(Status, Headers, <<>>, Req); +stream_reply(Status = <<"204",_/bits>>, Headers=#{}, Req) -> + reply(Status, Headers, <<>>, Req); +stream_reply(Status = 304, Headers=#{}, Req) -> + reply(Status, Headers, <<>>, Req); +stream_reply(Status = <<"304",_/bits>>, Headers=#{}, Req) -> + reply(Status, Headers, <<>>, Req); +stream_reply(Status, Headers=#{}, Req) when is_integer(Status); is_binary(Status) -> + cast({headers, Status, response_headers(Headers, Req)}, Req), + done_replying(Req, headers). + +-spec stream_body(resp_body(), fin | nofin, req()) -> ok. +%% Error out if headers were not sent. +%% Don't send any body for HEAD responses. +stream_body(_, _, #{method := <<"HEAD">>, has_sent_resp := headers}) -> + ok; +%% Don't send a message if the data is empty, except for the +%% very last message with IsFin=fin. When using sendfile this +%% is converted to a data tuple, however. +stream_body({sendfile, _, 0, _}, nofin, _) -> + ok; +stream_body({sendfile, _, 0, _}, IsFin=fin, Req=#{has_sent_resp := headers}) -> + stream_body({data, self(), IsFin, <<>>}, Req); +stream_body({sendfile, O, B, P}, IsFin, Req=#{has_sent_resp := headers}) + when is_integer(O), O >= 0, is_integer(B), B > 0 -> + stream_body({data, self(), IsFin, {sendfile, O, B, P}}, Req); +stream_body(Data, IsFin=nofin, Req=#{has_sent_resp := headers}) + when not is_tuple(Data) -> + case iolist_size(Data) of + 0 -> ok; + _ -> stream_body({data, self(), IsFin, Data}, Req) + end; +stream_body(Data, IsFin, Req=#{has_sent_resp := headers}) + when not is_tuple(Data) -> + stream_body({data, self(), IsFin, Data}, Req). + +%% @todo Do we need a timeout? +stream_body(Msg, Req=#{pid := Pid}) -> + cast(Msg, Req), + receive {data_ack, Pid} -> ok end. + +-spec stream_events(cow_sse:event() | [cow_sse:event()], fin | nofin, req()) -> ok. +stream_events(Event, IsFin, Req) when is_map(Event) -> + stream_events([Event], IsFin, Req); +stream_events(Events, IsFin, Req=#{has_sent_resp := headers}) -> + stream_body({data, self(), IsFin, cow_sse:events(Events)}, Req). + +-spec stream_trailers(cowboy:http_headers(), req()) -> ok. +stream_trailers(Trailers, Req=#{has_sent_resp := headers}) -> + cast({trailers, Trailers}, Req). + +-spec push(iodata(), cowboy:http_headers(), req()) -> ok. +push(Path, Headers, Req) -> + push(Path, Headers, Req, #{}). + +%% @todo Optimization: don't send anything at all for HTTP/1.0 and HTTP/1.1. +%% @todo Path, Headers, Opts, everything should be in proper binary, +%% or normalized when creating the Req object. +-spec push(iodata(), cowboy:http_headers(), req(), push_opts()) -> ok. +push(Path, Headers, Req=#{scheme := Scheme0, host := Host0, port := Port0}, Opts) -> + Method = maps:get(method, Opts, <<"GET">>), + Scheme = maps:get(scheme, Opts, Scheme0), + Host = maps:get(host, Opts, Host0), + Port = maps:get(port, Opts, Port0), + Qs = maps:get(qs, Opts, <<>>), + cast({push, Method, Scheme, Host, Port, Path, Qs, Headers}, Req). + +%% Stream handlers. + +-spec cast(any(), req()) -> ok. +cast(Msg, #{pid := Pid, streamid := StreamID}) -> + Pid ! {{Pid, StreamID}, Msg}, + ok. + +%% Internal. + +%% @todo What about set-cookie headers set through set_resp_header or reply? +-spec response_headers(Headers, req()) -> Headers when Headers::cowboy:http_headers(). +response_headers(Headers0, Req) -> + RespHeaders = maps:get(resp_headers, Req, #{}), + Headers = maps:merge(#{ + <<"date">> => cowboy_clock:rfc1123(), + <<"server">> => <<"Cowboy">> + }, maps:merge(RespHeaders, Headers0)), + %% The set-cookie header is special; we can only send one cookie per header. + %% We send the list of values for many cookies in one key of the map, + %% and let the protocols deal with it directly. + case maps:get(resp_cookies, Req, undefined) of + undefined -> Headers; + RespCookies -> Headers#{<<"set-cookie">> => maps:values(RespCookies)} + end. + +%% Create map, convert keys to atoms and group duplicate keys into lists. +%% Keys that are not found in the user provided list are entirely skipped. +%% @todo Can probably be done directly while parsing. +kvlist_to_map(Fields, KvList) -> + Keys = [case K of + {Key, _} -> Key; + {Key, _, _} -> Key; + Key -> Key + end || K <- Fields], + kvlist_to_map(Keys, KvList, #{}). + +kvlist_to_map(_, [], Map) -> + Map; +kvlist_to_map(Keys, [{Key, Value}|Tail], Map) -> + try binary_to_existing_atom(Key, utf8) of + Atom -> + case lists:member(Atom, Keys) of + true -> + case maps:find(Atom, Map) of + {ok, MapValue} when is_list(MapValue) -> + kvlist_to_map(Keys, Tail, + Map#{Atom => [Value|MapValue]}); + {ok, MapValue} -> + kvlist_to_map(Keys, Tail, + Map#{Atom => [Value, MapValue]}); + error -> + kvlist_to_map(Keys, Tail, + Map#{Atom => Value}) + end; + false -> + kvlist_to_map(Keys, Tail, Map) + end + catch error:badarg -> + kvlist_to_map(Keys, Tail, Map) + end. + +filter(Fields, Map0) -> + filter(Fields, Map0, #{}). + +%% Loop through fields, if value is missing and no default, +%% record the error; else if value is missing and has a +%% default, set default; otherwise apply constraints. If +%% constraint fails, record the error. +%% +%% When there is an error at the end, crash. +filter([], Map, Errors) -> + case maps:size(Errors) of + 0 -> {ok, Map}; + _ -> {error, Errors} + end; +filter([{Key, Constraints}|Tail], Map, Errors) -> + filter_constraints(Tail, Map, Errors, Key, maps:get(Key, Map), Constraints); +filter([{Key, Constraints, Default}|Tail], Map, Errors) -> + case maps:find(Key, Map) of + {ok, Value} -> + filter_constraints(Tail, Map, Errors, Key, Value, Constraints); + error -> + filter(Tail, Map#{Key => Default}, Errors) + end; +filter([Key|Tail], Map, Errors) -> + case maps:is_key(Key, Map) of + true -> + filter(Tail, Map, Errors); + false -> + filter(Tail, Map, Errors#{Key => required}) + end. + +filter_constraints(Tail, Map, Errors, Key, Value0, Constraints) -> + case cowboy_constraints:validate(Value0, Constraints) of + {ok, Value} -> + filter(Tail, Map#{Key => Value}, Errors); + {error, Reason} -> + filter(Tail, Map, Errors#{Key => Reason}) + end. diff --git a/src/wsSrv/cowboy_rest.erl b/src/wsSrv/cowboy_rest.erl new file mode 100644 index 0000000..7d0fe80 --- /dev/null +++ b/src/wsSrv/cowboy_rest.erl @@ -0,0 +1,1637 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Originally based on the Webmachine Diagram from Alan Dean and +%% Justin Sheehy. +-module(cowboy_rest). +-behaviour(cowboy_sub_protocol). + +-export([upgrade/4]). +-export([upgrade/5]). + +-type switch_handler() :: {switch_handler, module()} + | {switch_handler, module(), any()}. + +%% Common handler callbacks. + +-callback init(Req, any()) + -> {ok | module(), Req, any()} + | {module(), Req, any(), any()} + when Req::cowboy_req:req(). + +-callback terminate(any(), cowboy_req:req(), any()) -> ok. +-optional_callbacks([terminate/3]). + +%% REST handler callbacks. + +-callback allowed_methods(Req, State) + -> {[binary()], Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([allowed_methods/2]). + +-callback allow_missing_post(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([allow_missing_post/2]). + +-callback charsets_provided(Req, State) + -> {[binary()], Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([charsets_provided/2]). + +-callback content_types_accepted(Req, State) + -> {[{'*' | binary() | {binary(), binary(), '*' | [{binary(), binary()}]}, atom()}], Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([content_types_accepted/2]). + +-callback content_types_provided(Req, State) + -> {[{binary() | {binary(), binary(), '*' | [{binary(), binary()}]}, atom()}], Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([content_types_provided/2]). + +-callback delete_completed(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([delete_completed/2]). + +-callback delete_resource(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([delete_resource/2]). + +-callback expires(Req, State) + -> {calendar:datetime() | binary() | undefined, Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([expires/2]). + +-callback forbidden(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([forbidden/2]). + +-callback generate_etag(Req, State) + -> {binary() | {weak | strong, binary()}, Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([generate_etag/2]). + +-callback is_authorized(Req, State) + -> {true | {false, iodata()}, Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([is_authorized/2]). + +-callback is_conflict(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([is_conflict/2]). + +-callback known_methods(Req, State) + -> {[binary()], Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([known_methods/2]). + +-callback languages_provided(Req, State) + -> {[binary()], Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([languages_provided/2]). + +-callback last_modified(Req, State) + -> {calendar:datetime(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([last_modified/2]). + +-callback malformed_request(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([malformed_request/2]). + +-callback moved_permanently(Req, State) + -> {{true, iodata()} | false, Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([moved_permanently/2]). + +-callback moved_temporarily(Req, State) + -> {{true, iodata()} | false, Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([moved_temporarily/2]). + +-callback multiple_choices(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([multiple_choices/2]). + +-callback options(Req, State) + -> {ok, Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([options/2]). + +-callback previously_existed(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([previously_existed/2]). + +-callback range_satisfiable(Req, State) + -> {boolean() | {false, non_neg_integer() | iodata()}, Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([range_satisfiable/2]). + +-callback ranges_provided(Req, State) + -> {[{binary(), atom()}], Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([ranges_provided/2]). + +-callback rate_limited(Req, State) + -> {{true, non_neg_integer() | calendar:datetime()} | false, Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([rate_limited/2]). + +-callback resource_exists(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([resource_exists/2]). + +-callback service_available(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([service_available/2]). + +-callback uri_too_long(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([uri_too_long/2]). + +-callback valid_content_headers(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([valid_content_headers/2]). + +-callback valid_entity_length(Req, State) + -> {boolean(), Req, State} + | {stop, Req, State} + | {switch_handler(), Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([valid_entity_length/2]). + +-callback variances(Req, State) + -> {[binary()], Req, State} + when Req::cowboy_req:req(), State::any(). +-optional_callbacks([variances/2]). + +%% End of REST callbacks. Whew! + +-record(state, { + method = undefined :: binary(), + + %% Handler. + handler :: atom(), + handler_state :: any(), + + %% Allowed methods. Only used for OPTIONS requests. + allowed_methods :: [binary()] | undefined, + + %% Media type. + content_types_p = [] :: + [{binary() | {binary(), binary(), [{binary(), binary()}] | '*'}, + atom()}], + content_type_a :: undefined + | {binary() | {binary(), binary(), [{binary(), binary()}] | '*'}, + atom()}, + + %% Language. + languages_p = [] :: [binary()], + language_a :: undefined | binary(), + + %% Charset. + charsets_p = undefined :: undefined | [binary()], + charset_a :: undefined | binary(), + + %% Range units. + ranges_a = [] :: [{binary(), atom()}], + + %% Whether the resource exists. + exists = false :: boolean(), + + %% Cached resource calls. + etag :: undefined | no_call | {strong | weak, binary()}, + last_modified :: undefined | no_call | calendar:datetime(), + expires :: undefined | no_call | calendar:datetime() | binary() +}). + +-spec upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +upgrade(Req0, Env, Handler, HandlerState0) -> + Method = cowboy_req:method(Req0), + case service_available(Req0, #state{method=Method, + handler=Handler, handler_state=HandlerState0}) of + {ok, Req, Result} -> + {ok, Req, Env#{result => Result}}; + {Mod, Req, HandlerState} -> + Mod:upgrade(Req, Env, Handler, HandlerState); + {Mod, Req, HandlerState, Opts} -> + Mod:upgrade(Req, Env, Handler, HandlerState, Opts) + end. + +-spec upgrade(Req, Env, module(), any(), any()) + -> {ok, Req, Env} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +%% cowboy_rest takes no options. +upgrade(Req, Env, Handler, HandlerState, _Opts) -> + upgrade(Req, Env, Handler, HandlerState). + +service_available(Req, State) -> + expect(Req, State, service_available, true, fun known_methods/2, 503). + +%% known_methods/2 should return a list of binary methods. +known_methods(Req, State=#state{method=Method}) -> + case call(Req, State, known_methods) of + no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">>; + Method =:= <<"POST">>; Method =:= <<"PUT">>; + Method =:= <<"PATCH">>; Method =:= <<"DELETE">>; + Method =:= <<"OPTIONS">> -> + next(Req, State, fun uri_too_long/2); + no_call -> + next(Req, State, 501); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {List, Req2, State2} -> + case lists:member(Method, List) of + true -> next(Req2, State2, fun uri_too_long/2); + false -> next(Req2, State2, 501) + end + end. + +uri_too_long(Req, State) -> + expect(Req, State, uri_too_long, false, fun allowed_methods/2, 414). + +%% allowed_methods/2 should return a list of binary methods. +allowed_methods(Req, State=#state{method=Method}) -> + case call(Req, State, allowed_methods) of + no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">> -> + next(Req, State, fun malformed_request/2); + no_call when Method =:= <<"OPTIONS">> -> + next(Req, State#state{allowed_methods= + [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]}, + fun malformed_request/2); + no_call -> + method_not_allowed(Req, State, + [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {List, Req2, State2} -> + case lists:member(Method, List) of + true when Method =:= <<"OPTIONS">> -> + next(Req2, State2#state{allowed_methods=List}, + fun malformed_request/2); + true -> + next(Req2, State2, fun malformed_request/2); + false -> + method_not_allowed(Req2, State2, List) + end + end. + +method_not_allowed(Req, State, []) -> + Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req), + respond(Req2, State, 405); +method_not_allowed(Req, State, Methods) -> + << ", ", Allow/binary >> = << << ", ", M/binary >> || M <- Methods >>, + Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req), + respond(Req2, State, 405). + +malformed_request(Req, State) -> + expect(Req, State, malformed_request, false, fun is_authorized/2, 400). + +%% is_authorized/2 should return true or {false, WwwAuthenticateHeader}. +is_authorized(Req, State) -> + case call(Req, State, is_authorized) of + no_call -> + forbidden(Req, State); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {true, Req2, State2} -> + forbidden(Req2, State2); + {{false, AuthHead}, Req2, State2} -> + Req3 = cowboy_req:set_resp_header( + <<"www-authenticate">>, AuthHead, Req2), + respond(Req3, State2, 401) + end. + +forbidden(Req, State) -> + expect(Req, State, forbidden, false, fun rate_limited/2, 403). + +rate_limited(Req, State) -> + case call(Req, State, rate_limited) of + no_call -> + valid_content_headers(Req, State); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {false, Req2, State2} -> + valid_content_headers(Req2, State2); + {{true, RetryAfter0}, Req2, State2} -> + RetryAfter = if + is_integer(RetryAfter0), RetryAfter0 >= 0 -> + integer_to_binary(RetryAfter0); + is_tuple(RetryAfter0) -> + cowboy_clock:rfc1123(RetryAfter0) + end, + Req3 = cowboy_req:set_resp_header(<<"retry-after">>, RetryAfter, Req2), + respond(Req3, State2, 429) + end. + +valid_content_headers(Req, State) -> + expect(Req, State, valid_content_headers, true, + fun valid_entity_length/2, 501). + +valid_entity_length(Req, State) -> + expect(Req, State, valid_entity_length, true, fun options/2, 413). + +%% If you need to add additional headers to the response at this point, +%% you should do it directly in the options/2 call using set_resp_headers. +options(Req, State=#state{allowed_methods=Methods, method= <<"OPTIONS">>}) -> + case call(Req, State, options) of + no_call when Methods =:= [] -> + Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req), + respond(Req2, State, 200); + no_call -> + << ", ", Allow/binary >> + = << << ", ", M/binary >> || M <- Methods >>, + Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req), + respond(Req2, State, 200); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {ok, Req2, State2} -> + respond(Req2, State2, 200) + end; +options(Req, State) -> + content_types_provided(Req, State). + +%% content_types_provided/2 should return a list of content types and their +%% associated callback function as a tuple: {{Type, SubType, Params}, Fun}. +%% Type and SubType are the media type as binary. Params is a list of +%% Key/Value tuple, with Key and Value a binary. Fun is the name of the +%% callback that will be used to return the content of the response. It is +%% given as an atom. +%% +%% An example of such return value would be: +%% {{<<"text">>, <<"html">>, []}, to_html} +%% +%% Note that it is also possible to return a binary content type that will +%% then be parsed by Cowboy. However note that while this may make your +%% resources a little more readable, this is a lot less efficient. +%% +%% An example of such return value would be: +%% {<<"text/html">>, to_html} +content_types_provided(Req, State) -> + case call(Req, State, content_types_provided) of + no_call -> + State2 = State#state{ + content_types_p=[{{<<"text">>, <<"html">>, '*'}, to_html}]}, + try cowboy_req:parse_header(<<"accept">>, Req) of + undefined -> + languages_provided( + Req#{media_type => {<<"text">>, <<"html">>, []}}, + State2#state{content_type_a={{<<"text">>, <<"html">>, []}, to_html}}); + Accept -> + choose_media_type(Req, State2, prioritize_accept(Accept)) + catch _:_ -> + respond(Req, State2, 400) + end; + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {[], Req2, State2} -> + not_acceptable(Req2, State2); + {CTP, Req2, State2} -> + CTP2 = [normalize_content_types(P) || P <- CTP], + State3 = State2#state{content_types_p=CTP2}, + try cowboy_req:parse_header(<<"accept">>, Req2) of + undefined -> + {PMT0, _Fun} = HeadCTP = hd(CTP2), + %% We replace the wildcard by an empty list of parameters. + PMT = case PMT0 of + {Type, SubType, '*'} -> {Type, SubType, []}; + _ -> PMT0 + end, + languages_provided( + Req2#{media_type => PMT}, + State3#state{content_type_a=HeadCTP}); + Accept -> + choose_media_type(Req2, State3, prioritize_accept(Accept)) + catch _:_ -> + respond(Req2, State3, 400) + end + end. + +normalize_content_types({ContentType, Callback}) + when is_binary(ContentType) -> + {cow_http_hd:parse_content_type(ContentType), Callback}; +normalize_content_types(Normalized) -> + Normalized. + +prioritize_accept(Accept) -> + lists:sort( + fun ({MediaTypeA, Quality, _AcceptParamsA}, + {MediaTypeB, Quality, _AcceptParamsB}) -> + %% Same quality, check precedence in more details. + prioritize_mediatype(MediaTypeA, MediaTypeB); + ({_MediaTypeA, QualityA, _AcceptParamsA}, + {_MediaTypeB, QualityB, _AcceptParamsB}) -> + %% Just compare the quality. + QualityA > QualityB + end, Accept). + +%% Media ranges can be overridden by more specific media ranges or +%% specific media types. If more than one media range applies to a given +%% type, the most specific reference has precedence. +%% +%% We always choose B over A when we can't decide between the two. +prioritize_mediatype({TypeA, SubTypeA, ParamsA}, {TypeB, SubTypeB, ParamsB}) -> + case TypeB of + TypeA -> + case SubTypeB of + SubTypeA -> length(ParamsA) > length(ParamsB); + <<"*">> -> true; + _Any -> false + end; + <<"*">> -> true; + _Any -> false + end. + +%% Ignoring the rare AcceptParams. Not sure what should be done about them. +choose_media_type(Req, State, []) -> + not_acceptable(Req, State); +choose_media_type(Req, State=#state{content_types_p=CTP}, + [MediaType|Tail]) -> + match_media_type(Req, State, Tail, CTP, MediaType). + +match_media_type(Req, State, Accept, [], _MediaType) -> + choose_media_type(Req, State, Accept); +match_media_type(Req, State, Accept, CTP, + MediaType = {{<<"*">>, <<"*">>, _Params_A}, _QA, _APA}) -> + match_media_type_params(Req, State, Accept, CTP, MediaType); +match_media_type(Req, State, Accept, + CTP = [{{Type, SubType_P, _PP}, _Fun}|_Tail], + MediaType = {{Type, SubType_A, _PA}, _QA, _APA}) + when SubType_P =:= SubType_A; SubType_A =:= <<"*">> -> + match_media_type_params(Req, State, Accept, CTP, MediaType); +match_media_type(Req, State, Accept, [_Any|Tail], MediaType) -> + match_media_type(Req, State, Accept, Tail, MediaType). + +match_media_type_params(Req, State, Accept, + [Provided = {{TP, STP, '*'}, _Fun}|Tail], + MediaType = {{TA, _STA, Params_A0}, _QA, _APA}) -> + case lists:keytake(<<"charset">>, 1, Params_A0) of + {value, {_, Charset}, Params_A} when TA =:= <<"text">> -> + %% When we match against a wildcard, the media type is text + %% and has a charset parameter, we call charsets_provided + %% and check that the charset is provided. If the callback + %% is not exported, we accept inconditionally but ignore + %% the given charset so as to not send a wrong value back. + case call(Req, State, charsets_provided) of + no_call -> + languages_provided(Req#{media_type => {TP, STP, Params_A0}}, + State#state{content_type_a=Provided}); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {CP, Req2, State2} -> + State3 = State2#state{charsets_p=CP}, + case lists:member(Charset, CP) of + false -> + match_media_type(Req2, State3, Accept, Tail, MediaType); + true -> + languages_provided(Req2#{media_type => {TP, STP, Params_A}}, + State3#state{content_type_a=Provided, + charset_a=Charset}) + end + end; + _ -> + languages_provided(Req#{media_type => {TP, STP, Params_A0}}, + State#state{content_type_a=Provided}) + end; +match_media_type_params(Req, State, Accept, + [Provided = {PMT = {TP, STP, Params_P0}, Fun}|Tail], + MediaType = {{_TA, _STA, Params_A}, _QA, _APA}) -> + case lists:sort(Params_P0) =:= lists:sort(Params_A) of + true when TP =:= <<"text">> -> + %% When a charset was provided explicitly in both the charset header + %% and the media types provided and the negotiation is successful, + %% we keep the charset and don't call charsets_provided. This only + %% applies to text media types, however. + {Charset, Params_P} = case lists:keytake(<<"charset">>, 1, Params_P0) of + false -> {undefined, Params_P0}; + {value, {_, Charset0}, Params_P1} -> {Charset0, Params_P1} + end, + languages_provided(Req#{media_type => {TP, STP, Params_P}}, + State#state{content_type_a={{TP, STP, Params_P}, Fun}, + charset_a=Charset}); + true -> + languages_provided(Req#{media_type => PMT}, + State#state{content_type_a=Provided}); + false -> + match_media_type(Req, State, Accept, Tail, MediaType) + end. + +%% languages_provided should return a list of binary values indicating +%% which languages are accepted by the resource. +%% +%% @todo I suppose we should also ask the resource if it wants to +%% set a language itself or if it wants it to be automatically chosen. +languages_provided(Req, State) -> + case call(Req, State, languages_provided) of + no_call -> + charsets_provided(Req, State); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {[], Req2, State2} -> + not_acceptable(Req2, State2); + {LP, Req2, State2} -> + State3 = State2#state{languages_p=LP}, + case cowboy_req:parse_header(<<"accept-language">>, Req2) of + undefined -> + set_language(Req2, State3#state{language_a=hd(LP)}); + AcceptLanguage -> + AcceptLanguage2 = prioritize_languages(AcceptLanguage), + choose_language(Req2, State3, AcceptLanguage2) + end + end. + +%% A language-range matches a language-tag if it exactly equals the tag, +%% or if it exactly equals a prefix of the tag such that the first tag +%% character following the prefix is "-". The special range "*", if +%% present in the Accept-Language field, matches every tag not matched +%% by any other range present in the Accept-Language field. +%% +%% @todo The last sentence probably means we should always put '*' +%% at the end of the list. +prioritize_languages(AcceptLanguages) -> + lists:sort( + fun ({_TagA, QualityA}, {_TagB, QualityB}) -> + QualityA > QualityB + end, AcceptLanguages). + +choose_language(Req, State, []) -> + not_acceptable(Req, State); +choose_language(Req, State=#state{languages_p=LP}, [Language|Tail]) -> + match_language(Req, State, Tail, LP, Language). + +match_language(Req, State, Accept, [], _Language) -> + choose_language(Req, State, Accept); +match_language(Req, State, _Accept, [Provided|_Tail], {'*', _Quality}) -> + set_language(Req, State#state{language_a=Provided}); +match_language(Req, State, _Accept, [Provided|_Tail], {Provided, _Quality}) -> + set_language(Req, State#state{language_a=Provided}); +match_language(Req, State, Accept, [Provided|Tail], + Language = {Tag, _Quality}) -> + Length = byte_size(Tag), + case Provided of + << Tag:Length/binary, $-, _Any/bits >> -> + set_language(Req, State#state{language_a=Provided}); + _Any -> + match_language(Req, State, Accept, Tail, Language) + end. + +set_language(Req, State=#state{language_a=Language}) -> + Req2 = cowboy_req:set_resp_header(<<"content-language">>, Language, Req), + charsets_provided(Req2#{language => Language}, State). + +%% charsets_provided should return a list of binary values indicating +%% which charsets are accepted by the resource. +%% +%% A charset may have been selected while negotiating the accept header. +%% There's no need to select one again. +charsets_provided(Req, State=#state{charset_a=Charset}) + when Charset =/= undefined -> + set_content_type(Req, State); +%% If charsets_p is defined, use it instead of calling charsets_provided +%% again. We also call this clause during normal execution to avoid +%% duplicating code. +charsets_provided(Req, State=#state{charsets_p=[]}) -> + not_acceptable(Req, State); +charsets_provided(Req, State=#state{charsets_p=CP}) + when CP =/= undefined -> + case cowboy_req:parse_header(<<"accept-charset">>, Req) of + undefined -> + set_content_type(Req, State#state{charset_a=hd(CP)}); + AcceptCharset0 -> + AcceptCharset = prioritize_charsets(AcceptCharset0), + choose_charset(Req, State, AcceptCharset) + end; +charsets_provided(Req, State) -> + case call(Req, State, charsets_provided) of + no_call -> + set_content_type(Req, State); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {CP, Req2, State2} -> + charsets_provided(Req2, State2#state{charsets_p=CP}) + end. + +prioritize_charsets(AcceptCharsets) -> + lists:sort( + fun ({_CharsetA, QualityA}, {_CharsetB, QualityB}) -> + QualityA > QualityB + end, AcceptCharsets). + +choose_charset(Req, State, []) -> + not_acceptable(Req, State); +%% A q-value of 0 means not acceptable. +choose_charset(Req, State, [{_, 0}|Tail]) -> + choose_charset(Req, State, Tail); +choose_charset(Req, State=#state{charsets_p=CP}, [Charset|Tail]) -> + match_charset(Req, State, Tail, CP, Charset). + +match_charset(Req, State, Accept, [], _Charset) -> + choose_charset(Req, State, Accept); +match_charset(Req, State, _Accept, [Provided|_], {<<"*">>, _}) -> + set_content_type(Req, State#state{charset_a=Provided}); +match_charset(Req, State, _Accept, [Provided|_], {Provided, _}) -> + set_content_type(Req, State#state{charset_a=Provided}); +match_charset(Req, State, Accept, [_|Tail], Charset) -> + match_charset(Req, State, Accept, Tail, Charset). + +set_content_type(Req, State=#state{ + content_type_a={{Type, SubType, Params}, _Fun}, + charset_a=Charset}) -> + ParamsBin = set_content_type_build_params(Params, []), + ContentType = [Type, <<"/">>, SubType, ParamsBin], + ContentType2 = case {Type, Charset} of + {<<"text">>, Charset} when Charset =/= undefined -> + [ContentType, <<"; charset=">>, Charset]; + _ -> + ContentType + end, + Req2 = cowboy_req:set_resp_header(<<"content-type">>, ContentType2, Req), + encodings_provided(Req2#{charset => Charset}, State). + +set_content_type_build_params('*', []) -> + <<>>; +set_content_type_build_params([], []) -> + <<>>; +set_content_type_build_params([], Acc) -> + lists:reverse(Acc); +set_content_type_build_params([{Attr, Value}|Tail], Acc) -> + set_content_type_build_params(Tail, [[Attr, <<"=">>, Value], <<";">>|Acc]). + +%% @todo Match for identity as we provide nothing else for now. +%% @todo Don't forget to set the Content-Encoding header when we reply a body +%% and the found encoding is something other than identity. +encodings_provided(Req, State) -> + ranges_provided(Req, State). + +not_acceptable(Req, State) -> + respond(Req, State, 406). + +ranges_provided(Req, State) -> + case call(Req, State, ranges_provided) of + no_call -> + variances(Req, State); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {[], Req2, State2} -> + Req3 = cowboy_req:set_resp_header(<<"accept-ranges">>, <<"none">>, Req2), + variances(Req3, State2#state{ranges_a=[]}); + {RP, Req2, State2} -> + <<", ", AcceptRanges/binary>> = <<<<", ", R/binary>> || {R, _} <- RP>>, + Req3 = cowboy_req:set_resp_header(<<"accept-ranges">>, AcceptRanges, Req2), + variances(Req3, State2#state{ranges_a=RP}) + end. + +%% variances/2 should return a list of headers that will be added +%% to the Vary response header. The Accept, Accept-Language, +%% Accept-Charset and Accept-Encoding headers do not need to be +%% specified. +%% +%% @todo Do Accept-Encoding too when we handle it. +%% @todo Does the order matter? +variances(Req, State=#state{content_types_p=CTP, + languages_p=LP, charsets_p=CP}) -> + Variances = case CTP of + [] -> []; + [_] -> []; + [_|_] -> [<<"accept">>] + end, + Variances2 = case LP of + [] -> Variances; + [_] -> Variances; + [_|_] -> [<<"accept-language">>|Variances] + end, + Variances3 = case CP of + undefined -> Variances2; + [] -> Variances2; + [_] -> Variances2; + [_|_] -> [<<"accept-charset">>|Variances2] + end, + try variances(Req, State, Variances3) of + {Variances4, Req2, State2} -> + case [[<<", ">>, V] || V <- Variances4] of + [] -> + resource_exists(Req2, State2); + [[<<", ">>, H]|Variances5] -> + Req3 = cowboy_req:set_resp_header( + <<"vary">>, [H|Variances5], Req2), + resource_exists(Req3, State2) + end + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +variances(Req, State, Variances) -> + case unsafe_call(Req, State, variances) of + no_call -> + {Variances, Req, State}; + {HandlerVariances, Req2, State2} -> + {Variances ++ HandlerVariances, Req2, State2} + end. + +resource_exists(Req, State) -> + expect(Req, State, resource_exists, true, + fun if_match_exists/2, fun if_match_must_not_exist/2). + +if_match_exists(Req, State) -> + State2 = State#state{exists=true}, + case cowboy_req:parse_header(<<"if-match">>, Req) of + undefined -> + if_unmodified_since_exists(Req, State2); + '*' -> + if_unmodified_since_exists(Req, State2); + ETagsList -> + if_match(Req, State2, ETagsList) + end. + +if_match(Req, State, EtagsList) -> + try generate_etag(Req, State) of + %% Strong Etag comparison: weak Etag never matches. + {{weak, _}, Req2, State2} -> + precondition_failed(Req2, State2); + {Etag, Req2, State2} -> + case lists:member(Etag, EtagsList) of + true -> if_none_match_exists(Req2, State2); + %% Etag may be `undefined' which cannot be a member. + false -> precondition_failed(Req2, State2) + end + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +if_match_must_not_exist(Req, State) -> + case cowboy_req:header(<<"if-match">>, Req) of + undefined -> is_put_to_missing_resource(Req, State); + _ -> precondition_failed(Req, State) + end. + +if_unmodified_since_exists(Req, State) -> + try cowboy_req:parse_header(<<"if-unmodified-since">>, Req) of + undefined -> + if_none_match_exists(Req, State); + IfUnmodifiedSince -> + if_unmodified_since(Req, State, IfUnmodifiedSince) + catch _:_ -> + if_none_match_exists(Req, State) + end. + +%% If LastModified is the atom 'no_call', we continue. +if_unmodified_since(Req, State, IfUnmodifiedSince) -> + try last_modified(Req, State) of + {LastModified, Req2, State2} -> + case LastModified > IfUnmodifiedSince of + true -> precondition_failed(Req2, State2); + false -> if_none_match_exists(Req2, State2) + end + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +if_none_match_exists(Req, State) -> + case cowboy_req:parse_header(<<"if-none-match">>, Req) of + undefined -> + if_modified_since_exists(Req, State); + '*' -> + precondition_is_head_get(Req, State); + EtagsList -> + if_none_match(Req, State, EtagsList) + end. + +if_none_match(Req, State, EtagsList) -> + try generate_etag(Req, State) of + {Etag, Req2, State2} -> + case Etag of + undefined -> + precondition_failed(Req2, State2); + Etag -> + case is_weak_match(Etag, EtagsList) of + true -> precondition_is_head_get(Req2, State2); + false -> method(Req2, State2) + end + end + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +%% Weak Etag comparison: only check the opaque tag. +is_weak_match(_, []) -> + false; +is_weak_match({_, Tag}, [{_, Tag}|_]) -> + true; +is_weak_match(Etag, [_|Tail]) -> + is_weak_match(Etag, Tail). + +precondition_is_head_get(Req, State=#state{method=Method}) + when Method =:= <<"HEAD">>; Method =:= <<"GET">> -> + not_modified(Req, State); +precondition_is_head_get(Req, State) -> + precondition_failed(Req, State). + +if_modified_since_exists(Req, State) -> + try cowboy_req:parse_header(<<"if-modified-since">>, Req) of + undefined -> + method(Req, State); + IfModifiedSince -> + if_modified_since_now(Req, State, IfModifiedSince) + catch _:_ -> + method(Req, State) + end. + +if_modified_since_now(Req, State, IfModifiedSince) -> + case IfModifiedSince > erlang:universaltime() of + true -> method(Req, State); + false -> if_modified_since(Req, State, IfModifiedSince) + end. + +if_modified_since(Req, State, IfModifiedSince) -> + try last_modified(Req, State) of + {undefined, Req2, State2} -> + method(Req2, State2); + {LastModified, Req2, State2} -> + case LastModified > IfModifiedSince of + true -> method(Req2, State2); + false -> not_modified(Req2, State2) + end + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +not_modified(Req, State) -> + Req2 = cowboy_req:delete_resp_header(<<"content-type">>, Req), + try set_resp_etag(Req2, State) of + {Req3, State2} -> + try set_resp_expires(Req3, State2) of + {Req4, State3} -> + respond(Req4, State3, 304) + catch Class:Reason:Stacktrace -> + error_terminate(Req, State2, Class, Reason, Stacktrace) + end + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +precondition_failed(Req, State) -> + respond(Req, State, 412). + +is_put_to_missing_resource(Req, State=#state{method= <<"PUT">>}) -> + moved_permanently(Req, State, fun is_conflict/2); +is_put_to_missing_resource(Req, State) -> + previously_existed(Req, State). + +%% moved_permanently/2 should return either false or {true, Location} +%% with Location the full new URI of the resource. +moved_permanently(Req, State, OnFalse) -> + case call(Req, State, moved_permanently) of + {{true, Location}, Req2, State2} -> + Req3 = cowboy_req:set_resp_header( + <<"location">>, Location, Req2), + respond(Req3, State2, 301); + {false, Req2, State2} -> + OnFalse(Req2, State2); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + no_call -> + OnFalse(Req, State) + end. + +previously_existed(Req, State) -> + expect(Req, State, previously_existed, false, + fun (R, S) -> is_post_to_missing_resource(R, S, 404) end, + fun (R, S) -> moved_permanently(R, S, fun moved_temporarily/2) end). + +%% moved_temporarily/2 should return either false or {true, Location} +%% with Location the full new URI of the resource. +moved_temporarily(Req, State) -> + case call(Req, State, moved_temporarily) of + {{true, Location}, Req2, State2} -> + Req3 = cowboy_req:set_resp_header( + <<"location">>, Location, Req2), + respond(Req3, State2, 307); + {false, Req2, State2} -> + is_post_to_missing_resource(Req2, State2, 410); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + no_call -> + is_post_to_missing_resource(Req, State, 410) + end. + +is_post_to_missing_resource(Req, State=#state{method= <<"POST">>}, OnFalse) -> + allow_missing_post(Req, State, OnFalse); +is_post_to_missing_resource(Req, State, OnFalse) -> + respond(Req, State, OnFalse). + +allow_missing_post(Req, State, OnFalse) -> + expect(Req, State, allow_missing_post, true, fun accept_resource/2, OnFalse). + +method(Req, State=#state{method= <<"DELETE">>}) -> + delete_resource(Req, State); +method(Req, State=#state{method= <<"PUT">>}) -> + is_conflict(Req, State); +method(Req, State=#state{method=Method}) + when Method =:= <<"POST">>; Method =:= <<"PATCH">> -> + accept_resource(Req, State); +method(Req, State=#state{method=Method}) + when Method =:= <<"GET">>; Method =:= <<"HEAD">> -> + set_resp_body_etag(Req, State); +method(Req, State) -> + multiple_choices(Req, State). + +%% delete_resource/2 should start deleting the resource and return. +delete_resource(Req, State) -> + expect(Req, State, delete_resource, false, 500, fun delete_completed/2). + +%% delete_completed/2 indicates whether the resource has been deleted yet. +delete_completed(Req, State) -> + expect(Req, State, delete_completed, true, fun has_resp_body/2, 202). + +is_conflict(Req, State) -> + expect(Req, State, is_conflict, false, fun accept_resource/2, 409). + +%% content_types_accepted should return a list of media types and their +%% associated callback functions in the same format as content_types_provided. +%% +%% The callback will then be called and is expected to process the content +%% pushed to the resource in the request body. +%% +%% content_types_accepted SHOULD return a different list +%% for each HTTP method. +accept_resource(Req, State) -> + case call(Req, State, content_types_accepted) of + no_call -> + respond(Req, State, 415); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {CTA, Req2, State2} -> + CTA2 = [normalize_content_types(P) || P <- CTA], + try cowboy_req:parse_header(<<"content-type">>, Req2) of + %% We do not match against the boundary parameter for multipart. + {Type = <<"multipart">>, SubType, Params} -> + ContentType = {Type, SubType, lists:keydelete(<<"boundary">>, 1, Params)}, + choose_content_type(Req2, State2, ContentType, CTA2); + ContentType -> + choose_content_type(Req2, State2, ContentType, CTA2) + catch _:_ -> + respond(Req2, State2, 415) + end + end. + +%% The special content type '*' will always match. It can be used as a +%% catch-all content type for accepting any kind of request content. +%% Note that because it will always match, it should be the last of the +%% list of content types, otherwise it'll shadow the ones following. +choose_content_type(Req, State, _ContentType, []) -> + respond(Req, State, 415); +choose_content_type(Req, State, ContentType, [{Accepted, Fun}|_Tail]) + when Accepted =:= '*'; Accepted =:= ContentType -> + process_content_type(Req, State, Fun); +%% The special parameter '*' will always match any kind of content type +%% parameters. +%% Note that because it will always match, it should be the last of the +%% list for specific content type, otherwise it'll shadow the ones following. +choose_content_type(Req, State, {Type, SubType, Param}, + [{{Type, SubType, AcceptedParam}, Fun}|_Tail]) + when AcceptedParam =:= '*'; AcceptedParam =:= Param -> + process_content_type(Req, State, Fun); +choose_content_type(Req, State, ContentType, [_Any|Tail]) -> + choose_content_type(Req, State, ContentType, Tail). + +process_content_type(Req, State=#state{method=Method, exists=Exists}, Fun) -> + try case call(Req, State, Fun) of + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {true, Req2, State2} when Exists -> + next(Req2, State2, fun has_resp_body/2); + {true, Req2, State2} -> + next(Req2, State2, fun maybe_created/2); + {false, Req2, State2} -> + respond(Req2, State2, 400); + {{created, ResURL}, Req2, State2} when Method =:= <<"POST">> -> + Req3 = cowboy_req:set_resp_header( + <<"location">>, ResURL, Req2), + respond(Req3, State2, 201); + {{see_other, ResURL}, Req2, State2} when Method =:= <<"POST">> -> + Req3 = cowboy_req:set_resp_header( + <<"location">>, ResURL, Req2), + respond(Req3, State2, 303); + {{true, ResURL}, Req2, State2} when Method =:= <<"POST">> -> + Req3 = cowboy_req:set_resp_header( + <<"location">>, ResURL, Req2), + if + Exists -> respond(Req3, State2, 303); + true -> respond(Req3, State2, 201) + end + end catch Class:Reason = {case_clause, no_call}:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +%% If PUT was used then the resource has been created at the current URL. +%% Otherwise, if a location header has been set then the resource has been +%% created at a new URL. If not, send a 200 or 204 as expected from a +%% POST or PATCH request. +maybe_created(Req, State=#state{method= <<"PUT">>}) -> + respond(Req, State, 201); +maybe_created(Req, State) -> + case cowboy_req:has_resp_header(<<"location">>, Req) of + true -> respond(Req, State, 201); + false -> has_resp_body(Req, State) + end. + +has_resp_body(Req, State) -> + case cowboy_req:has_resp_body(Req) of + true -> multiple_choices(Req, State); + false -> respond(Req, State, 204) + end. + +%% Set the Etag header if any for the response provided. +set_resp_body_etag(Req, State) -> + try set_resp_etag(Req, State) of + {Req2, State2} -> + set_resp_body_last_modified(Req2, State2) + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +%% Set the Last-Modified header if any for the response provided. +set_resp_body_last_modified(Req, State) -> + try last_modified(Req, State) of + {LastModified, Req2, State2} -> + case LastModified of + LastModified when is_atom(LastModified) -> + set_resp_body_expires(Req2, State2); + LastModified -> + LastModifiedBin = cowboy_clock:rfc1123(LastModified), + Req3 = cowboy_req:set_resp_header( + <<"last-modified">>, LastModifiedBin, Req2), + set_resp_body_expires(Req3, State2) + end + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +%% Set the Expires header if any for the response provided. +set_resp_body_expires(Req, State) -> + try set_resp_expires(Req, State) of + {Req2, State2} -> + if_range(Req2, State2) + catch Class:Reason:Stacktrace -> + error_terminate(Req, State, Class, Reason, Stacktrace) + end. + +%% When both the if-range and range headers are set, we perform +%% a strong comparison. If it fails, we send a full response. +if_range(Req=#{headers := #{<<"if-range">> := _, <<"range">> := _}}, + State=#state{etag=Etag}) -> + try cowboy_req:parse_header(<<"if-range">>, Req) of + %% Strong etag comparison is an exact match with the generate_etag result. + Etag={strong, _} -> + range(Req, State); + %% We cannot do a strong date comparison because we have + %% no way of knowing whether the representation changed + %% twice during the second covered by the presented + %% validator. (RFC7232 2.2.2) + _ -> + set_resp_body(Req, State) + catch _:_ -> + set_resp_body(Req, State) + end; +if_range(Req, State) -> + range(Req, State). + +range(Req, State=#state{ranges_a=[]}) -> + set_resp_body(Req, State); +range(Req, State) -> + try cowboy_req:parse_header(<<"range">>, Req) of + undefined -> + set_resp_body(Req, State); + %% @todo Maybe change parse_header to return <<"bytes">> in 3.0. + {bytes, BytesRange} -> + choose_range(Req, State, {<<"bytes">>, BytesRange}); + Range -> + choose_range(Req, State, Range) + catch _:_ -> + %% We send a 416 response back when we can't parse the + %% range header at all. I'm not sure this is the right + %% way to go but at least this can help clients identify + %% what went wrong when their range requests never work. + range_not_satisfiable(Req, State, undefined) + end. + +choose_range(Req, State=#state{ranges_a=RangesAccepted}, Range={RangeUnit, _}) -> + case lists:keyfind(RangeUnit, 1, RangesAccepted) of + {_, Callback} -> + %% We pass the selected range onward in the Req. + range_satisfiable(Req#{range => Range}, State, Callback); + false -> + set_resp_body(Req, State) + end. + +range_satisfiable(Req, State, Callback) -> + case call(Req, State, range_satisfiable) of + no_call -> + set_ranged_body(Req, State, Callback); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {true, Req2, State2} -> + set_ranged_body(Req2, State2, Callback); + {false, Req2, State2} -> + range_not_satisfiable(Req2, State2, undefined); + {{false, Int}, Req2, State2} when is_integer(Int) -> + range_not_satisfiable(Req2, State2, [<<"*/">>, integer_to_binary(Int)]); + {{false, Iodata}, Req2, State2} when is_binary(Iodata); is_list(Iodata) -> + range_not_satisfiable(Req2, State2, Iodata) + end. + +%% When the callback selected is 'auto' and the range unit +%% is bytes, we call the normal provide callback and split +%% the content automatically. +set_ranged_body(Req=#{range := {<<"bytes">>, _}}, State, auto) -> + set_ranged_body_auto(Req, State); +set_ranged_body(Req, State, Callback) -> + set_ranged_body_callback(Req, State, Callback). + +set_ranged_body_auto(Req, State=#state{handler=Handler, content_type_a={_, Callback}}) -> + try case call(Req, State, Callback) of + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {Body, Req2, State2} -> + maybe_set_ranged_body_auto(Req2, State2, Body) + end catch Class:{case_clause, no_call}:Stacktrace -> + error_terminate(Req, State, Class, {error, {missing_callback, {Handler, Callback, 2}}, + 'A callback specified in content_types_provided/2 is not exported.'}, + Stacktrace) + end. + +maybe_set_ranged_body_auto(Req=#{range := {_, Ranges}}, State, Body) -> + Size = case Body of + {sendfile, _, Bytes, _} -> Bytes; + _ -> iolist_size(Body) + end, + Checks = [case Range of + {From, infinity} -> From < Size; + {From, To} -> (From < Size) andalso (From =< To) andalso (To =< Size); + Neg -> (Neg =/= 0) andalso (-Neg < Size) + end || Range <- Ranges], + case lists:usort(Checks) of + [true] -> set_ranged_body_auto(Req, State, Body); + _ -> range_not_satisfiable(Req, State, [<<"*/">>, integer_to_binary(Size)]) + end. + +%% We might also want to have some checks about range order, +%% number of ranges, and perhaps also join ranges that are +%% too close into one contiguous range. Some of these can +%% be done before calling the ProvideCallback. + +set_ranged_body_auto(Req=#{range := {_, Ranges}}, State, Body) -> + Parts = [ranged_partition(Range, Body) || Range <- Ranges], + case Parts of + [OnePart] -> set_one_ranged_body(Req, State, OnePart); + _ when is_tuple(Body) -> send_multipart_ranged_body(Req, State, Parts); + _ -> set_multipart_ranged_body(Req, State, Parts) + end. + +ranged_partition(Range, {sendfile, Offset0, Bytes0, Path}) -> + {From, To, Offset, Bytes} = case Range of + {From0, infinity} -> {From0, Bytes0 - 1, Offset0 + From0, Bytes0 - From0}; + {From0, To0} -> {From0, To0, Offset0 + From0, 1 + To0 - From0}; + Neg -> {Bytes0 + Neg, Bytes0 - 1, Offset0 + Bytes0 + Neg, -Neg} + end, + {{From, To, Bytes0}, {sendfile, Offset, Bytes, Path}}; +ranged_partition(Range, Data0) -> + Total = iolist_size(Data0), + {From, To, Data} = case Range of + {From0, infinity} -> + {_, Data1} = cow_iolists:split(From0, Data0), + {From0, Total - 1, Data1}; + {From0, To0} -> + {_, Data1} = cow_iolists:split(From0, Data0), + {Data2, _} = cow_iolists:split(To0 - From0 + 1, Data1), + {From0, To0, Data2}; + Neg -> + {_, Data1} = cow_iolists:split(Total + Neg, Data0), + {Total + Neg, Total - 1, Data1} + end, + {{From, To, Total}, Data}. + +-ifdef(TEST). +ranged_partition_test_() -> + Tests = [ + %% Sendfile with open-ended range. + {{0, infinity}, {sendfile, 0, 12, "t"}, {{0, 11, 12}, {sendfile, 0, 12, "t"}}}, + {{6, infinity}, {sendfile, 0, 12, "t"}, {{6, 11, 12}, {sendfile, 6, 6, "t"}}}, + {{11, infinity}, {sendfile, 0, 12, "t"}, {{11, 11, 12}, {sendfile, 11, 1, "t"}}}, + %% Sendfile with open-ended range. Sendfile tuple has an offset originally. + {{0, infinity}, {sendfile, 3, 12, "t"}, {{0, 11, 12}, {sendfile, 3, 12, "t"}}}, + {{6, infinity}, {sendfile, 3, 12, "t"}, {{6, 11, 12}, {sendfile, 9, 6, "t"}}}, + {{11, infinity}, {sendfile, 3, 12, "t"}, {{11, 11, 12}, {sendfile, 14, 1, "t"}}}, + %% Sendfile with a specific range. + {{0, 11}, {sendfile, 0, 12, "t"}, {{0, 11, 12}, {sendfile, 0, 12, "t"}}}, + {{6, 11}, {sendfile, 0, 12, "t"}, {{6, 11, 12}, {sendfile, 6, 6, "t"}}}, + {{11, 11}, {sendfile, 0, 12, "t"}, {{11, 11, 12}, {sendfile, 11, 1, "t"}}}, + {{1, 10}, {sendfile, 0, 12, "t"}, {{1, 10, 12}, {sendfile, 1, 10, "t"}}}, + %% Sendfile with a specific range. Sendfile tuple has an offset originally. + {{0, 11}, {sendfile, 3, 12, "t"}, {{0, 11, 12}, {sendfile, 3, 12, "t"}}}, + {{6, 11}, {sendfile, 3, 12, "t"}, {{6, 11, 12}, {sendfile, 9, 6, "t"}}}, + {{11, 11}, {sendfile, 3, 12, "t"}, {{11, 11, 12}, {sendfile, 14, 1, "t"}}}, + {{1, 10}, {sendfile, 3, 12, "t"}, {{1, 10, 12}, {sendfile, 4, 10, "t"}}}, + %% Sendfile with negative range. + {-12, {sendfile, 0, 12, "t"}, {{0, 11, 12}, {sendfile, 0, 12, "t"}}}, + {-6, {sendfile, 0, 12, "t"}, {{6, 11, 12}, {sendfile, 6, 6, "t"}}}, + {-1, {sendfile, 0, 12, "t"}, {{11, 11, 12}, {sendfile, 11, 1, "t"}}}, + %% Sendfile with negative range. Sendfile tuple has an offset originally. + {-12, {sendfile, 3, 12, "t"}, {{0, 11, 12}, {sendfile, 3, 12, "t"}}}, + {-6, {sendfile, 3, 12, "t"}, {{6, 11, 12}, {sendfile, 9, 6, "t"}}}, + {-1, {sendfile, 3, 12, "t"}, {{11, 11, 12}, {sendfile, 14, 1, "t"}}}, + %% Iodata with open-ended range. + {{0, infinity}, <<"Hello world!">>, {{0, 11, 12}, <<"Hello world!">>}}, + {{6, infinity}, <<"Hello world!">>, {{6, 11, 12}, <<"world!">>}}, + {{11, infinity}, <<"Hello world!">>, {{11, 11, 12}, <<"!">>}}, + %% Iodata with a specific range. The resulting data is + %% wrapped in a list because of how cow_iolists:split/2 works. + {{0, 11}, <<"Hello world!">>, {{0, 11, 12}, [<<"Hello world!">>]}}, + {{6, 11}, <<"Hello world!">>, {{6, 11, 12}, [<<"world!">>]}}, + {{11, 11}, <<"Hello world!">>, {{11, 11, 12}, [<<"!">>]}}, + {{1, 10}, <<"Hello world!">>, {{1, 10, 12}, [<<"ello world">>]}}, + %% Iodata with negative range. + {-12, <<"Hello world!">>, {{0, 11, 12}, <<"Hello world!">>}}, + {-6, <<"Hello world!">>, {{6, 11, 12}, <<"world!">>}}, + {-1, <<"Hello world!">>, {{11, 11, 12}, <<"!">>}} + ], + [{iolist_to_binary(io_lib:format("range ~p data ~p", [VR, VD])), + fun() -> R = ranged_partition(VR, VD) end} || {VR, VD, R} <- Tests]. +-endif. + +set_ranged_body_callback(Req, State=#state{handler=Handler}, Callback) -> + try case call(Req, State, Callback) of + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + %% When we receive a single range, we send it directly. + {[OneRange], Req2, State2} -> + set_one_ranged_body(Req2, State2, OneRange); + %% When we receive multiple ranges we have to send them as multipart/byteranges. + %% This also applies to non-bytes units. (RFC7233 A) If users don't want to use + %% this for non-bytes units they can always return a single range with a binary + %% content-range information. + {Ranges, Req2, State2} when length(Ranges) > 1 -> + %% We have to check whether there are sendfile tuples in the + %% ranges to be sent. If there are we must use stream_reply. + HasSendfile = [] =/= [true || {_, {sendfile, _, _, _}} <- Ranges], + case HasSendfile of + true -> send_multipart_ranged_body(Req2, State2, Ranges); + false -> set_multipart_ranged_body(Req2, State2, Ranges) + end + end catch Class:{case_clause, no_call}:Stacktrace -> + error_terminate(Req, State, Class, {error, {missing_callback, {Handler, Callback, 2}}, + 'A callback specified in ranges_provided/2 is not exported.'}, + Stacktrace) + end. + +set_one_ranged_body(Req0, State, OneRange) -> + {ContentRange, Body} = prepare_range(Req0, OneRange), + Req1 = cowboy_req:set_resp_header(<<"content-range">>, ContentRange, Req0), + Req = cowboy_req:set_resp_body(Body, Req1), + respond(Req, State, 206). + +set_multipart_ranged_body(Req, State, [FirstRange|MoreRanges]) -> + Boundary = cow_multipart:boundary(), + ContentType = cowboy_req:resp_header(<<"content-type">>, Req), + {FirstContentRange, FirstPartBody} = prepare_range(Req, FirstRange), + FirstPartHead = cow_multipart:first_part(Boundary, [ + {<<"content-type">>, ContentType}, + {<<"content-range">>, FirstContentRange} + ]), + MoreParts = [begin + {NextContentRange, NextPartBody} = prepare_range(Req, NextRange), + NextPartHead = cow_multipart:part(Boundary, [ + {<<"content-type">>, ContentType}, + {<<"content-range">>, NextContentRange} + ]), + [NextPartHead, NextPartBody] + end || NextRange <- MoreRanges], + Body = [FirstPartHead, FirstPartBody, MoreParts, cow_multipart:close(Boundary)], + Req2 = cowboy_req:set_resp_header(<<"content-type">>, + [<<"multipart/byteranges; boundary=">>, Boundary], Req), + Req3 = cowboy_req:set_resp_body(Body, Req2), + respond(Req3, State, 206). + +%% Similar to set_multipart_ranged_body except we have to stream +%% the data because the parts contain sendfile tuples. +send_multipart_ranged_body(Req, State, [FirstRange|MoreRanges]) -> + Boundary = cow_multipart:boundary(), + ContentType = cowboy_req:resp_header(<<"content-type">>, Req), + Req2 = cowboy_req:set_resp_header(<<"content-type">>, + [<<"multipart/byteranges; boundary=">>, Boundary], Req), + Req3 = cowboy_req:stream_reply(206, Req2), + {FirstContentRange, FirstPartBody} = prepare_range(Req, FirstRange), + FirstPartHead = cow_multipart:first_part(Boundary, [ + {<<"content-type">>, ContentType}, + {<<"content-range">>, FirstContentRange} + ]), + cowboy_req:stream_body(FirstPartHead, nofin, Req3), + cowboy_req:stream_body(FirstPartBody, nofin, Req3), + _ = [begin + {NextContentRange, NextPartBody} = prepare_range(Req, NextRange), + NextPartHead = cow_multipart:part(Boundary, [ + {<<"content-type">>, ContentType}, + {<<"content-range">>, NextContentRange} + ]), + cowboy_req:stream_body(NextPartHead, nofin, Req3), + cowboy_req:stream_body(NextPartBody, nofin, Req3), + [NextPartHead, NextPartBody] + end || NextRange <- MoreRanges], + cowboy_req:stream_body(cow_multipart:close(Boundary), fin, Req3), + terminate(Req3, State). + +prepare_range(#{range := {RangeUnit, _}}, {{From, To, Total0}, Body}) -> + Total = case Total0 of + '*' -> <<"*">>; + _ -> integer_to_binary(Total0) + end, + ContentRange = [RangeUnit, $\s, integer_to_binary(From), + $-, integer_to_binary(To), $/, Total], + {ContentRange, Body}; +prepare_range(#{range := {RangeUnit, _}}, {RangeData, Body}) -> + {[RangeUnit, $\s, RangeData], Body}. + +%% We send the content-range header when we can on error. +range_not_satisfiable(Req, State, undefined) -> + respond(Req, State, 416); +range_not_satisfiable(Req0=#{range := {RangeUnit, _}}, State, RangeData) -> + Req = cowboy_req:set_resp_header(<<"content-range">>, + [RangeUnit, $\s, RangeData], Req0), + respond(Req, State, 416). + +%% Set the response headers and call the callback found using +%% content_types_provided/2 to obtain the request body and add +%% it to the response. +set_resp_body(Req, State=#state{handler=Handler, content_type_a={_, Callback}}) -> + try case call(Req, State, Callback) of + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {Body, Req2, State2} -> + Req3 = cowboy_req:set_resp_body(Body, Req2), + multiple_choices(Req3, State2) + end catch Class:{case_clause, no_call}:Stacktrace -> + error_terminate(Req, State, Class, {error, {missing_callback, {Handler, Callback, 2}}, + 'A callback specified in content_types_provided/2 is not exported.'}, + Stacktrace) + end. + +multiple_choices(Req, State) -> + expect(Req, State, multiple_choices, false, 200, 300). + +%% Response utility functions. + +set_resp_etag(Req, State) -> + {Etag, Req2, State2} = generate_etag(Req, State), + case Etag of + undefined -> + {Req2, State2}; + Etag -> + Req3 = cowboy_req:set_resp_header( + <<"etag">>, encode_etag(Etag), Req2), + {Req3, State2} + end. + +-spec encode_etag({strong | weak, binary()}) -> iolist(). +encode_etag({strong, Etag}) -> [$",Etag,$"]; +encode_etag({weak, Etag}) -> ["W/\"",Etag,$"]. + +set_resp_expires(Req, State) -> + {Expires, Req2, State2} = expires(Req, State), + case Expires of + Expires when is_atom(Expires) -> + {Req2, State2}; + Expires when is_binary(Expires) -> + Req3 = cowboy_req:set_resp_header( + <<"expires">>, Expires, Req2), + {Req3, State2}; + Expires -> + ExpiresBin = cowboy_clock:rfc1123(Expires), + Req3 = cowboy_req:set_resp_header( + <<"expires">>, ExpiresBin, Req2), + {Req3, State2} + end. + +%% Info retrieval. No logic. + +generate_etag(Req, State=#state{etag=no_call}) -> + {undefined, Req, State}; +generate_etag(Req, State=#state{etag=undefined}) -> + case unsafe_call(Req, State, generate_etag) of + no_call -> + {undefined, Req, State#state{etag=no_call}}; + {Etag, Req2, State2} when is_binary(Etag) -> + Etag2 = cow_http_hd:parse_etag(Etag), + {Etag2, Req2, State2#state{etag=Etag2}}; + {Etag, Req2, State2} -> + {Etag, Req2, State2#state{etag=Etag}} + end; +generate_etag(Req, State=#state{etag=Etag}) -> + {Etag, Req, State}. + +last_modified(Req, State=#state{last_modified=no_call}) -> + {undefined, Req, State}; +last_modified(Req, State=#state{last_modified=undefined}) -> + case unsafe_call(Req, State, last_modified) of + no_call -> + {undefined, Req, State#state{last_modified=no_call}}; + {LastModified, Req2, State2} -> + {LastModified, Req2, State2#state{last_modified=LastModified}} + end; +last_modified(Req, State=#state{last_modified=LastModified}) -> + {LastModified, Req, State}. + +expires(Req, State=#state{expires=no_call}) -> + {undefined, Req, State}; +expires(Req, State=#state{expires=undefined}) -> + case unsafe_call(Req, State, expires) of + no_call -> + {undefined, Req, State#state{expires=no_call}}; + {Expires, Req2, State2} -> + {Expires, Req2, State2#state{expires=Expires}} + end; +expires(Req, State=#state{expires=Expires}) -> + {Expires, Req, State}. + +%% REST primitives. + +expect(Req, State, Callback, Expected, OnTrue, OnFalse) -> + case call(Req, State, Callback) of + no_call -> + next(Req, State, OnTrue); + {stop, Req2, State2} -> + terminate(Req2, State2); + {Switch, Req2, State2} when element(1, Switch) =:= switch_handler -> + switch_handler(Switch, Req2, State2); + {Expected, Req2, State2} -> + next(Req2, State2, OnTrue); + {_Unexpected, Req2, State2} -> + next(Req2, State2, OnFalse) + end. + +call(Req0, State=#state{handler=Handler, + handler_state=HandlerState0}, Callback) -> + case erlang:function_exported(Handler, Callback, 2) of + true -> + try Handler:Callback(Req0, HandlerState0) of + no_call -> + no_call; + {Result, Req, HandlerState} -> + {Result, Req, State#state{handler_state=HandlerState}} + catch Class:Reason:Stacktrace -> + error_terminate(Req0, State, Class, Reason, Stacktrace) + end; + false -> + no_call + end. + +unsafe_call(Req0, State=#state{handler=Handler, + handler_state=HandlerState0}, Callback) -> + case erlang:function_exported(Handler, Callback, 2) of + false -> + no_call; + true -> + case Handler:Callback(Req0, HandlerState0) of + no_call -> + no_call; + {Result, Req, HandlerState} -> + {Result, Req, State#state{handler_state=HandlerState}} + end + end. + +next(Req, State, Next) when is_function(Next) -> + Next(Req, State); +next(Req, State, StatusCode) when is_integer(StatusCode) -> + respond(Req, State, StatusCode). + +respond(Req0, State, StatusCode) -> + %% We remove the content-type header when there is no body, + %% except when the status code is 200 because it might have + %% been intended (for example sending an empty file). + Req = case cowboy_req:has_resp_body(Req0) of + true when StatusCode =:= 200 -> Req0; + true -> Req0; + false -> cowboy_req:delete_resp_header(<<"content-type">>, Req0) + end, + terminate(cowboy_req:reply(StatusCode, Req), State). + +switch_handler({switch_handler, Mod}, Req, #state{handler_state=HandlerState}) -> + {Mod, Req, HandlerState}; +switch_handler({switch_handler, Mod, Opts}, Req, #state{handler_state=HandlerState}) -> + {Mod, Req, HandlerState, Opts}. + +-spec error_terminate(cowboy_req:req(), #state{}, atom(), any(), any()) -> no_return(). +error_terminate(Req, #state{handler=Handler, handler_state=HandlerState}, Class, Reason, Stacktrace) -> + cowboy_handler:terminate({crash, Class, Reason}, Req, HandlerState, Handler), + erlang:raise(Class, Reason, Stacktrace). + +terminate(Req, #state{handler=Handler, handler_state=HandlerState}) -> + Result = cowboy_handler:terminate(normal, Req, HandlerState, Handler), + {ok, Req, Result}. diff --git a/src/wsSrv/cowboy_router.erl b/src/wsSrv/cowboy_router.erl new file mode 100644 index 0000000..0b7fe41 --- /dev/null +++ b/src/wsSrv/cowboy_router.erl @@ -0,0 +1,603 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Routing middleware. +%% +%% Resolve the handler to be used for the request based on the +%% routing information found in the dispatch environment value. +%% When found, the handler module and associated data are added to +%% the environment as the handler and handler_opts values +%% respectively. +%% +%% If the route cannot be found, processing stops with either +%% a 400 or a 404 reply. +-module(cowboy_router). +-behaviour(cowboy_middleware). + +-export([compile/1]). +-export([execute/2]). + +-type bindings() :: #{atom() => any()}. +-type tokens() :: [binary()]. +-export_type([bindings/0]). +-export_type([tokens/0]). + +-type route_match() :: '_' | iodata(). +-type route_path() :: {Path::route_match(), Handler::module(), Opts::any()} + | {Path::route_match(), cowboy:fields(), Handler::module(), Opts::any()}. +-type route_rule() :: {Host::route_match(), Paths::[route_path()]} + | {Host::route_match(), cowboy:fields(), Paths::[route_path()]}. +-type routes() :: [route_rule()]. +-export_type([routes/0]). + +-type dispatch_match() :: '_' | <<_:8>> | [binary() | '_' | '...' | atom()]. +-type dispatch_path() :: {dispatch_match(), cowboy:fields(), module(), any()}. +-type dispatch_rule() :: {Host::dispatch_match(), cowboy:fields(), Paths::[dispatch_path()]}. +-opaque dispatch_rules() :: [dispatch_rule()]. +-export_type([dispatch_rules/0]). + +-spec compile(routes()) -> dispatch_rules(). +compile(Routes) -> + compile(Routes, []). + +compile([], Acc) -> + lists:reverse(Acc); +compile([{Host, Paths}|Tail], Acc) -> + compile([{Host, [], Paths}|Tail], Acc); +compile([{HostMatch, Fields, Paths}|Tail], Acc) -> + HostRules = case HostMatch of + '_' -> '_'; + _ -> compile_host(HostMatch) + end, + PathRules = compile_paths(Paths, []), + Hosts = case HostRules of + '_' -> [{'_', Fields, PathRules}]; + _ -> [{R, Fields, PathRules} || R <- HostRules] + end, + compile(Tail, Hosts ++ Acc). + +compile_host(HostMatch) when is_list(HostMatch) -> + compile_host(list_to_binary(HostMatch)); +compile_host(HostMatch) when is_binary(HostMatch) -> + compile_rules(HostMatch, $., [], [], <<>>). + +compile_paths([], Acc) -> + lists:reverse(Acc); +compile_paths([{PathMatch, Handler, Opts}|Tail], Acc) -> + compile_paths([{PathMatch, [], Handler, Opts}|Tail], Acc); +compile_paths([{PathMatch, Fields, Handler, Opts}|Tail], Acc) + when is_list(PathMatch) -> + compile_paths([{iolist_to_binary(PathMatch), + Fields, Handler, Opts}|Tail], Acc); +compile_paths([{'_', Fields, Handler, Opts}|Tail], Acc) -> + compile_paths(Tail, [{'_', Fields, Handler, Opts}] ++ Acc); +compile_paths([{<<"*">>, Fields, Handler, Opts}|Tail], Acc) -> + compile_paths(Tail, [{<<"*">>, Fields, Handler, Opts}|Acc]); +compile_paths([{<< $/, PathMatch/bits >>, Fields, Handler, Opts}|Tail], + Acc) -> + PathRules = compile_rules(PathMatch, $/, [], [], <<>>), + Paths = [{lists:reverse(R), Fields, Handler, Opts} || R <- PathRules], + compile_paths(Tail, Paths ++ Acc); +compile_paths([{PathMatch, _, _, _}|_], _) -> + error({badarg, "The following route MUST begin with a slash: " + ++ binary_to_list(PathMatch)}). + +compile_rules(<<>>, _, Segments, Rules, <<>>) -> + [Segments|Rules]; +compile_rules(<<>>, _, Segments, Rules, Acc) -> + [[Acc|Segments]|Rules]; +compile_rules(<< S, Rest/bits >>, S, Segments, Rules, <<>>) -> + compile_rules(Rest, S, Segments, Rules, <<>>); +compile_rules(<< S, Rest/bits >>, S, Segments, Rules, Acc) -> + compile_rules(Rest, S, [Acc|Segments], Rules, <<>>); +%% Colon on path segment start is special, otherwise allow. +compile_rules(<< $:, Rest/bits >>, S, Segments, Rules, <<>>) -> + {NameBin, Rest2} = compile_binding(Rest, S, <<>>), + Name = binary_to_atom(NameBin, utf8), + compile_rules(Rest2, S, Segments, Rules, Name); +compile_rules(<< $[, $., $., $., $], Rest/bits >>, S, Segments, Rules, Acc) + when Acc =:= <<>> -> + compile_rules(Rest, S, ['...'|Segments], Rules, Acc); +compile_rules(<< $[, $., $., $., $], Rest/bits >>, S, Segments, Rules, Acc) -> + compile_rules(Rest, S, ['...', Acc|Segments], Rules, Acc); +compile_rules(<< $[, S, Rest/bits >>, S, Segments, Rules, Acc) -> + compile_brackets(Rest, S, [Acc|Segments], Rules); +compile_rules(<< $[, Rest/bits >>, S, Segments, Rules, <<>>) -> + compile_brackets(Rest, S, Segments, Rules); +%% Open bracket in the middle of a segment. +compile_rules(<< $[, _/bits >>, _, _, _, _) -> + error(badarg); +%% Missing an open bracket. +compile_rules(<< $], _/bits >>, _, _, _, _) -> + error(badarg); +compile_rules(<< C, Rest/bits >>, S, Segments, Rules, Acc) -> + compile_rules(Rest, S, Segments, Rules, << Acc/binary, C >>). + +%% Everything past $: until the segment separator ($. for hosts, +%% $/ for paths) or $[ or $] or end of binary is the binding name. +compile_binding(<<>>, _, <<>>) -> + error(badarg); +compile_binding(Rest = <<>>, _, Acc) -> + {Acc, Rest}; +compile_binding(Rest = << C, _/bits >>, S, Acc) + when C =:= S; C =:= $[; C =:= $] -> + {Acc, Rest}; +compile_binding(<< C, Rest/bits >>, S, Acc) -> + compile_binding(Rest, S, << Acc/binary, C >>). + +compile_brackets(Rest, S, Segments, Rules) -> + {Bracket, Rest2} = compile_brackets_split(Rest, <<>>, 0), + Rules1 = compile_rules(Rest2, S, Segments, [], <<>>), + Rules2 = compile_rules(<< Bracket/binary, Rest2/binary >>, + S, Segments, [], <<>>), + Rules ++ Rules2 ++ Rules1. + +%% Missing a close bracket. +compile_brackets_split(<<>>, _, _) -> + error(badarg); +%% Make sure we don't confuse the closing bracket we're looking for. +compile_brackets_split(<< C, Rest/bits >>, Acc, N) when C =:= $[ -> + compile_brackets_split(Rest, << Acc/binary, C >>, N + 1); +compile_brackets_split(<< C, Rest/bits >>, Acc, N) when C =:= $], N > 0 -> + compile_brackets_split(Rest, << Acc/binary, C >>, N - 1); +%% That's the right one. +compile_brackets_split(<< $], Rest/bits >>, Acc, 0) -> + {Acc, Rest}; +compile_brackets_split(<< C, Rest/bits >>, Acc, N) -> + compile_brackets_split(Rest, << Acc/binary, C >>, N). + +-spec execute(Req, Env) + -> {ok, Req, Env} | {stop, Req} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +execute(Req=#{host := Host, path := Path}, Env=#{dispatch := Dispatch0}) -> + Dispatch = case Dispatch0 of + {persistent_term, Key} -> persistent_term:get(Key); + _ -> Dispatch0 + end, + case match(Dispatch, Host, Path) of + {ok, Handler, HandlerOpts, Bindings, HostInfo, PathInfo} -> + {ok, Req#{ + host_info => HostInfo, + path_info => PathInfo, + bindings => Bindings + }, Env#{ + handler => Handler, + handler_opts => HandlerOpts + }}; + {error, notfound, host} -> + {stop, cowboy_req:reply(400, Req)}; + {error, badrequest, path} -> + {stop, cowboy_req:reply(400, Req)}; + {error, notfound, path} -> + {stop, cowboy_req:reply(404, Req)} + end. + +%% Internal. + +%% Match hostname tokens and path tokens against dispatch rules. +%% +%% It is typically used for matching tokens for the hostname and path of +%% the request against a global dispatch rule for your listener. +%% +%% Dispatch rules are a list of {Hostname, PathRules} tuples, with +%% PathRules being a list of {Path, HandlerMod, HandlerOpts}. +%% +%% Hostname and Path are match rules and can be either the +%% atom '_', which matches everything, `<<"*">>', which match the +%% wildcard path, or a list of tokens. +%% +%% Each token can be either a binary, the atom '_', +%% the atom '...' or a named atom. A binary token must match exactly, +%% '_' matches everything for a single token, '...' matches +%% everything for the rest of the tokens and a named atom will bind the +%% corresponding token value and return it. +%% +%% The list of hostname tokens is reversed before matching. For example, if +%% we were to match "www.ninenines.eu", we would first match "eu", then +%% "ninenines", then "www". This means that in the context of hostnames, +%% the '...' atom matches properly the lower levels of the domain +%% as would be expected. +%% +%% When a result is found, this function will return the handler module and +%% options found in the dispatch list, a key-value list of bindings and +%% the tokens that were matched by the '...' atom for both the +%% hostname and path. +-spec match(dispatch_rules(), Host::binary() | tokens(), Path::binary()) + -> {ok, module(), any(), bindings(), + HostInfo::undefined | tokens(), + PathInfo::undefined | tokens()} + | {error, notfound, host} | {error, notfound, path} + | {error, badrequest, path}. +match([], _, _) -> + {error, notfound, host}; +%% If the host is '_' then there can be no constraints. +match([{'_', [], PathMatchs}|_Tail], _, Path) -> + match_path(PathMatchs, undefined, Path, #{}); +match([{HostMatch, Fields, PathMatchs}|Tail], Tokens, Path) + when is_list(Tokens) -> + case list_match(Tokens, HostMatch, #{}) of + false -> + match(Tail, Tokens, Path); + {true, Bindings, HostInfo} -> + HostInfo2 = case HostInfo of + undefined -> undefined; + _ -> lists:reverse(HostInfo) + end, + case check_constraints(Fields, Bindings) of + {ok, Bindings2} -> + match_path(PathMatchs, HostInfo2, Path, Bindings2); + nomatch -> + match(Tail, Tokens, Path) + end + end; +match(Dispatch, Host, Path) -> + match(Dispatch, split_host(Host), Path). + +-spec match_path([dispatch_path()], + HostInfo::undefined | tokens(), binary() | tokens(), bindings()) + -> {ok, module(), any(), bindings(), + HostInfo::undefined | tokens(), + PathInfo::undefined | tokens()} + | {error, notfound, path} | {error, badrequest, path}. +match_path([], _, _, _) -> + {error, notfound, path}; +%% If the path is '_' then there can be no constraints. +match_path([{'_', [], Handler, Opts}|_Tail], HostInfo, _, Bindings) -> + {ok, Handler, Opts, Bindings, HostInfo, undefined}; +match_path([{<<"*">>, _, Handler, Opts}|_Tail], HostInfo, <<"*">>, Bindings) -> + {ok, Handler, Opts, Bindings, HostInfo, undefined}; +match_path([_|Tail], HostInfo, <<"*">>, Bindings) -> + match_path(Tail, HostInfo, <<"*">>, Bindings); +match_path([{PathMatch, Fields, Handler, Opts}|Tail], HostInfo, Tokens, + Bindings) when is_list(Tokens) -> + case list_match(Tokens, PathMatch, Bindings) of + false -> + match_path(Tail, HostInfo, Tokens, Bindings); + {true, PathBinds, PathInfo} -> + case check_constraints(Fields, PathBinds) of + {ok, PathBinds2} -> + {ok, Handler, Opts, PathBinds2, HostInfo, PathInfo}; + nomatch -> + match_path(Tail, HostInfo, Tokens, Bindings) + end + end; +match_path(_Dispatch, _HostInfo, badrequest, _Bindings) -> + {error, badrequest, path}; +match_path(Dispatch, HostInfo, Path, Bindings) -> + match_path(Dispatch, HostInfo, split_path(Path), Bindings). + +check_constraints([], Bindings) -> + {ok, Bindings}; +check_constraints([Field|Tail], Bindings) when is_atom(Field) -> + check_constraints(Tail, Bindings); +check_constraints([Field|Tail], Bindings) -> + Name = element(1, Field), + case Bindings of + #{Name := Value0} -> + Constraints = element(2, Field), + case cowboy_constraints:validate(Value0, Constraints) of + {ok, Value} -> + check_constraints(Tail, Bindings#{Name => Value}); + {error, _} -> + nomatch + end; + _ -> + check_constraints(Tail, Bindings) + end. + +-spec split_host(binary()) -> tokens(). +split_host(Host) -> + split_host(Host, []). + +split_host(Host, Acc) -> + case binary:match(Host, <<".">>) of + nomatch when Host =:= <<>> -> + Acc; + nomatch -> + [Host|Acc]; + {Pos, _} -> + << Segment:Pos/binary, _:8, Rest/bits >> = Host, + false = byte_size(Segment) == 0, + split_host(Rest, [Segment|Acc]) + end. + +%% Following RFC2396, this function may return path segments containing any +%% character, including / if, and only if, a / was escaped +%% and part of a path segment. +-spec split_path(binary()) -> tokens() | badrequest. +split_path(<< $/, Path/bits >>) -> + split_path(Path, []); +split_path(_) -> + badrequest. + +split_path(Path, Acc) -> + try + case binary:match(Path, <<"/">>) of + nomatch when Path =:= <<>> -> + remove_dot_segments(lists:reverse([cow_uri:urldecode(S) || S <- Acc]), []); + nomatch -> + remove_dot_segments(lists:reverse([cow_uri:urldecode(S) || S <- [Path|Acc]]), []); + {Pos, _} -> + << Segment:Pos/binary, _:8, Rest/bits >> = Path, + split_path(Rest, [Segment|Acc]) + end + catch error:_ -> + badrequest + end. + +remove_dot_segments([], Acc) -> + lists:reverse(Acc); +remove_dot_segments([<<".">>|Segments], Acc) -> + remove_dot_segments(Segments, Acc); +remove_dot_segments([<<"..">>|Segments], Acc=[]) -> + remove_dot_segments(Segments, Acc); +remove_dot_segments([<<"..">>|Segments], [_|Acc]) -> + remove_dot_segments(Segments, Acc); +remove_dot_segments([S|Segments], Acc) -> + remove_dot_segments(Segments, [S|Acc]). + +-ifdef(TEST). +remove_dot_segments_test_() -> + Tests = [ + {[<<"a">>, <<"b">>, <<"c">>, <<".">>, <<"..">>, <<"..">>, <<"g">>], [<<"a">>, <<"g">>]}, + {[<<"mid">>, <<"content=5">>, <<"..">>, <<"6">>], [<<"mid">>, <<"6">>]}, + {[<<"..">>, <<"a">>], [<<"a">>]} + ], + [fun() -> R = remove_dot_segments(S, []) end || {S, R} <- Tests]. +-endif. + +-spec list_match(tokens(), dispatch_match(), bindings()) + -> {true, bindings(), undefined | tokens()} | false. +%% Atom '...' matches any trailing path, stop right now. +list_match(List, ['...'], Binds) -> + {true, Binds, List}; +%% Atom '_' matches anything, continue. +list_match([_E|Tail], ['_'|TailMatch], Binds) -> + list_match(Tail, TailMatch, Binds); +%% Both values match, continue. +list_match([E|Tail], [E|TailMatch], Binds) -> + list_match(Tail, TailMatch, Binds); +%% Bind E to the variable name V and continue, +%% unless V was already defined and E isn't identical to the previous value. +list_match([E|Tail], [V|TailMatch], Binds) when is_atom(V) -> + case Binds of + %% @todo This isn't right, the constraint must be applied FIRST + %% otherwise we can't check for example ints in both host/path. + #{V := E} -> + list_match(Tail, TailMatch, Binds); + #{V := _} -> + false; + _ -> + list_match(Tail, TailMatch, Binds#{V => E}) + end; +%% Match complete. +list_match([], [], Binds) -> + {true, Binds, undefined}; +%% Values don't match, stop. +list_match(_List, _Match, _Binds) -> + false. + +%% Tests. + +-ifdef(TEST). +compile_test_() -> + Tests = [ + %% Match any host and path. + {[{'_', [{'_', h, o}]}], + [{'_', [], [{'_', [], h, o}]}]}, + {[{"cowboy.example.org", + [{"/", ha, oa}, {"/path/to/resource", hb, ob}]}], + [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [ + {[], [], ha, oa}, + {[<<"path">>, <<"to">>, <<"resource">>], [], hb, ob}]}]}, + {[{'_', [{"/path/to/resource/", h, o}]}], + [{'_', [], [{[<<"path">>, <<"to">>, <<"resource">>], [], h, o}]}]}, + % Cyrillic from a latin1 encoded file. + {[{'_', [{[47,208,191,209,131,209,130,209,140,47,208,186,47,209,128, + 208,181,209,129,209,131,209,128,209,129,209,131,47], h, o}]}], + [{'_', [], [{[<<208,191,209,131,209,130,209,140>>, <<208,186>>, + <<209,128,208,181,209,129,209,131,209,128,209,129,209,131>>], + [], h, o}]}]}, + {[{"cowboy.example.org.", [{'_', h, o}]}], + [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [{'_', [], h, o}]}]}, + {[{".cowboy.example.org", [{'_', h, o}]}], + [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [{'_', [], h, o}]}]}, + % Cyrillic from a latin1 encoded file. + {[{[208,189,208,181,208,186,208,184,208,185,46,209,129,208,176, + 208,185,209,130,46,209,128,209,132,46], [{'_', h, o}]}], + [{[<<209,128,209,132>>, <<209,129,208,176,208,185,209,130>>, + <<208,189,208,181,208,186,208,184,208,185>>], + [], [{'_', [], h, o}]}]}, + {[{":subdomain.example.org", [{"/hats/:name/prices", h, o}]}], + [{[<<"org">>, <<"example">>, subdomain], [], [ + {[<<"hats">>, name, <<"prices">>], [], h, o}]}]}, + {[{"ninenines.:_", [{"/hats/:_", h, o}]}], + [{['_', <<"ninenines">>], [], [{[<<"hats">>, '_'], [], h, o}]}]}, + {[{"[www.]ninenines.eu", + [{"/horses", h, o}, {"/hats/[page/:number]", h, o}]}], [ + {[<<"eu">>, <<"ninenines">>], [], [ + {[<<"horses">>], [], h, o}, + {[<<"hats">>], [], h, o}, + {[<<"hats">>, <<"page">>, number], [], h, o}]}, + {[<<"eu">>, <<"ninenines">>, <<"www">>], [], [ + {[<<"horses">>], [], h, o}, + {[<<"hats">>], [], h, o}, + {[<<"hats">>, <<"page">>, number], [], h, o}]}]}, + {[{'_', [{"/hats/:page/:number", h, o}]}], [{'_', [], [ + {[<<"hats">>, page, number], [], h, o}]}]}, + {[{'_', [{"/hats/[page/[:number]]", h, o}]}], [{'_', [], [ + {[<<"hats">>], [], h, o}, + {[<<"hats">>, <<"page">>], [], h, o}, + {[<<"hats">>, <<"page">>, number], [], h, o}]}]}, + {[{"[...]ninenines.eu", [{"/hats/[...]", h, o}]}], + [{[<<"eu">>, <<"ninenines">>, '...'], [], [ + {[<<"hats">>, '...'], [], h, o}]}]}, + %% Path segment containing a colon. + {[{'_', [{"/foo/bar:blah", h, o}]}], [{'_', [], [ + {[<<"foo">>, <<"bar:blah">>], [], h, o}]}]} + ], + [{lists:flatten(io_lib:format("~p", [Rt])), + fun() -> Rs = compile(Rt) end} || {Rt, Rs} <- Tests]. + +split_host_test_() -> + Tests = [ + {<<"">>, []}, + {<<"*">>, [<<"*">>]}, + {<<"cowboy.ninenines.eu">>, + [<<"eu">>, <<"ninenines">>, <<"cowboy">>]}, + {<<"ninenines.eu">>, + [<<"eu">>, <<"ninenines">>]}, + {<<"ninenines.eu.">>, + [<<"eu">>, <<"ninenines">>]}, + {<<"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z">>, + [<<"z">>, <<"y">>, <<"x">>, <<"w">>, <<"v">>, <<"u">>, <<"t">>, + <<"s">>, <<"r">>, <<"q">>, <<"p">>, <<"o">>, <<"n">>, <<"m">>, + <<"l">>, <<"k">>, <<"j">>, <<"i">>, <<"h">>, <<"g">>, <<"f">>, + <<"e">>, <<"d">>, <<"c">>, <<"b">>, <<"a">>]} + ], + [{H, fun() -> R = split_host(H) end} || {H, R} <- Tests]. + +split_path_test_() -> + Tests = [ + {<<"/">>, []}, + {<<"/extend//cowboy">>, [<<"extend">>, <<>>, <<"cowboy">>]}, + {<<"/users">>, [<<"users">>]}, + {<<"/users/42/friends">>, [<<"users">>, <<"42">>, <<"friends">>]}, + {<<"/users/a%20b/c%21d">>, [<<"users">>, <<"a b">>, <<"c!d">>]} + ], + [{P, fun() -> R = split_path(P) end} || {P, R} <- Tests]. + +match_test_() -> + Dispatch = [ + {[<<"eu">>, <<"ninenines">>, '_', <<"www">>], [], [ + {[<<"users">>, '_', <<"mails">>], [], match_any_subdomain_users, []} + ]}, + {[<<"eu">>, <<"ninenines">>], [], [ + {[<<"users">>, id, <<"friends">>], [], match_extend_users_friends, []}, + {'_', [], match_extend, []} + ]}, + {[var, <<"ninenines">>], [], [ + {[<<"threads">>, var], [], match_duplicate_vars, + [we, {expect, two}, var, here]} + ]}, + {[ext, <<"erlang">>], [], [ + {'_', [], match_erlang_ext, []} + ]}, + {'_', [], [ + {[<<"users">>, id, <<"friends">>], [], match_users_friends, []}, + {'_', [], match_any, []} + ]} + ], + Tests = [ + {<<"any">>, <<"/">>, {ok, match_any, [], #{}}}, + {<<"www.any.ninenines.eu">>, <<"/users/42/mails">>, + {ok, match_any_subdomain_users, [], #{}}}, + {<<"www.ninenines.eu">>, <<"/users/42/mails">>, + {ok, match_any, [], #{}}}, + {<<"www.ninenines.eu">>, <<"/">>, + {ok, match_any, [], #{}}}, + {<<"www.any.ninenines.eu">>, <<"/not_users/42/mails">>, + {error, notfound, path}}, + {<<"ninenines.eu">>, <<"/">>, + {ok, match_extend, [], #{}}}, + {<<"ninenines.eu">>, <<"/users/42/friends">>, + {ok, match_extend_users_friends, [], #{id => <<"42">>}}}, + {<<"erlang.fr">>, '_', + {ok, match_erlang_ext, [], #{ext => <<"fr">>}}}, + {<<"any">>, <<"/users/444/friends">>, + {ok, match_users_friends, [], #{id => <<"444">>}}}, + {<<"any">>, <<"/users//friends">>, + {ok, match_users_friends, [], #{id => <<>>}}} + ], + [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() -> + {ok, Handler, Opts, Binds, undefined, undefined} + = match(Dispatch, H, P) + end} || {H, P, {ok, Handler, Opts, Binds}} <- Tests]. + +match_info_test_() -> + Dispatch = [ + {[<<"eu">>, <<"ninenines">>, <<"www">>], [], [ + {[<<"pathinfo">>, <<"is">>, <<"next">>, '...'], [], match_path, []} + ]}, + {[<<"eu">>, <<"ninenines">>, '...'], [], [ + {'_', [], match_any, []} + ]} + ], + Tests = [ + {<<"ninenines.eu">>, <<"/">>, + {ok, match_any, [], #{}, [], undefined}}, + {<<"bugs.ninenines.eu">>, <<"/">>, + {ok, match_any, [], #{}, [<<"bugs">>], undefined}}, + {<<"cowboy.bugs.ninenines.eu">>, <<"/">>, + {ok, match_any, [], #{}, [<<"cowboy">>, <<"bugs">>], undefined}}, + {<<"www.ninenines.eu">>, <<"/pathinfo/is/next">>, + {ok, match_path, [], #{}, undefined, []}}, + {<<"www.ninenines.eu">>, <<"/pathinfo/is/next/path_info">>, + {ok, match_path, [], #{}, undefined, [<<"path_info">>]}}, + {<<"www.ninenines.eu">>, <<"/pathinfo/is/next/foo/bar">>, + {ok, match_path, [], #{}, undefined, [<<"foo">>, <<"bar">>]}} + ], + [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() -> + R = match(Dispatch, H, P) + end} || {H, P, R} <- Tests]. + +match_constraints_test() -> + Dispatch0 = [{'_', [], + [{[<<"path">>, value], [{value, int}], match, []}]}], + {ok, _, [], #{value := 123}, _, _} = match(Dispatch0, + <<"ninenines.eu">>, <<"/path/123">>), + {ok, _, [], #{value := 123}, _, _} = match(Dispatch0, + <<"ninenines.eu">>, <<"/path/123/">>), + {error, notfound, path} = match(Dispatch0, + <<"ninenines.eu">>, <<"/path/NaN/">>), + Dispatch1 = [{'_', [], + [{[<<"path">>, value, <<"more">>], [{value, nonempty}], match, []}]}], + {ok, _, [], #{value := <<"something">>}, _, _} = match(Dispatch1, + <<"ninenines.eu">>, <<"/path/something/more">>), + {error, notfound, path} = match(Dispatch1, + <<"ninenines.eu">>, <<"/path//more">>), + Dispatch2 = [{'_', [], [{[<<"path">>, username], + [{username, fun(_, Value) -> + case cowboy_bstr:to_lower(Value) of + Value -> {ok, Value}; + _ -> {error, not_lowercase} + end end}], + match, []}]}], + {ok, _, [], #{username := <<"essen">>}, _, _} = match(Dispatch2, + <<"ninenines.eu">>, <<"/path/essen">>), + {error, notfound, path} = match(Dispatch2, + <<"ninenines.eu">>, <<"/path/ESSEN">>), + ok. + +match_same_bindings_test() -> + Dispatch = [{[same, same], [], [{'_', [], match, []}]}], + {ok, _, [], #{same := <<"eu">>}, _, _} = match(Dispatch, + <<"eu.eu">>, <<"/">>), + {error, notfound, host} = match(Dispatch, + <<"ninenines.eu">>, <<"/">>), + Dispatch2 = [{[<<"eu">>, <<"ninenines">>, user], [], + [{[<<"path">>, user], [], match, []}]}], + {ok, _, [], #{user := <<"essen">>}, _, _} = match(Dispatch2, + <<"essen.ninenines.eu">>, <<"/path/essen">>), + {ok, _, [], #{user := <<"essen">>}, _, _} = match(Dispatch2, + <<"essen.ninenines.eu">>, <<"/path/essen/">>), + {error, notfound, path} = match(Dispatch2, + <<"essen.ninenines.eu">>, <<"/path/notessen">>), + Dispatch3 = [{'_', [], [{[same, same], [], match, []}]}], + {ok, _, [], #{same := <<"path">>}, _, _} = match(Dispatch3, + <<"ninenines.eu">>, <<"/path/path">>), + {error, notfound, path} = match(Dispatch3, + <<"ninenines.eu">>, <<"/path/to">>), + ok. +-endif. diff --git a/src/wsSrv/cowboy_static.erl b/src/wsSrv/cowboy_static.erl new file mode 100644 index 0000000..b0cf146 --- /dev/null +++ b/src/wsSrv/cowboy_static.erl @@ -0,0 +1,418 @@ +%% Copyright (c) 2013-2017, Loïc Hoguin +%% Copyright (c) 2011, Magnus Klaar +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_static). + +-export([init/2]). +-export([malformed_request/2]). +-export([forbidden/2]). +-export([content_types_provided/2]). +-export([charsets_provided/2]). +-export([ranges_provided/2]). +-export([resource_exists/2]). +-export([last_modified/2]). +-export([generate_etag/2]). +-export([get_file/2]). + +-type extra_charset() :: {charset, module(), function()} | {charset, binary()}. +-type extra_etag() :: {etag, module(), function()} | {etag, false}. +-type extra_mimetypes() :: {mimetypes, module(), function()} + | {mimetypes, binary() | {binary(), binary(), [{binary(), binary()}]}}. +-type extra() :: [extra_charset() | extra_etag() | extra_mimetypes()]. +-type opts() :: {file | dir, string() | binary()} + | {file | dir, string() | binary(), extra()} + | {priv_file | priv_dir, atom(), string() | binary()} + | {priv_file | priv_dir, atom(), string() | binary(), extra()}. +-export_type([opts/0]). + +-include_lib("kernel/include/file.hrl"). + +-type state() :: {binary(), {direct | archive, #file_info{}} + | {error, atom()}, extra()}. + +%% Resolve the file that will be sent and get its file information. +%% If the handler is configured to manage a directory, check that the +%% requested file is inside the configured directory. + +-spec init(Req, opts()) -> {cowboy_rest, Req, error | state()} when Req::cowboy_req:req(). +init(Req, {Name, Path}) -> + init_opts(Req, {Name, Path, []}); +init(Req, {Name, App, Path}) + when Name =:= priv_file; Name =:= priv_dir -> + init_opts(Req, {Name, App, Path, []}); +init(Req, Opts) -> + init_opts(Req, Opts). + +init_opts(Req, {priv_file, App, Path, Extra}) -> + {PrivPath, HowToAccess} = priv_path(App, Path), + init_info(Req, absname(PrivPath), HowToAccess, Extra); +init_opts(Req, {file, Path, Extra}) -> + init_info(Req, absname(Path), direct, Extra); +init_opts(Req, {priv_dir, App, Path, Extra}) -> + {PrivPath, HowToAccess} = priv_path(App, Path), + init_dir(Req, PrivPath, HowToAccess, Extra); +init_opts(Req, {dir, Path, Extra}) -> + init_dir(Req, Path, direct, Extra). + +priv_path(App, Path) -> + case code:priv_dir(App) of + {error, bad_name} -> + error({badarg, "Can't resolve the priv_dir of application " + ++ atom_to_list(App)}); + PrivDir when is_list(Path) -> + { + PrivDir ++ "/" ++ Path, + how_to_access_app_priv(PrivDir) + }; + PrivDir when is_binary(Path) -> + { + << (list_to_binary(PrivDir))/binary, $/, Path/binary >>, + how_to_access_app_priv(PrivDir) + } + end. + +how_to_access_app_priv(PrivDir) -> + %% If the priv directory is not a directory, it must be + %% inside an Erlang application .ez archive. We call + %% how_to_access_app_priv1() to find the corresponding archive. + case filelib:is_dir(PrivDir) of + true -> direct; + false -> how_to_access_app_priv1(PrivDir) + end. + +how_to_access_app_priv1(Dir) -> + %% We go "up" by one path component at a time and look for a + %% regular file. + Archive = filename:dirname(Dir), + case Archive of + Dir -> + %% filename:dirname() returned its argument: + %% we reach the root directory. We found no + %% archive so we return 'direct': the given priv + %% directory doesn't exist. + direct; + _ -> + case filelib:is_regular(Archive) of + true -> {archive, Archive}; + false -> how_to_access_app_priv1(Archive) + end + end. + +absname(Path) when is_list(Path) -> + filename:absname(list_to_binary(Path)); +absname(Path) when is_binary(Path) -> + filename:absname(Path). + +init_dir(Req, Path, HowToAccess, Extra) when is_list(Path) -> + init_dir(Req, list_to_binary(Path), HowToAccess, Extra); +init_dir(Req, Path, HowToAccess, Extra) -> + Dir = fullpath(filename:absname(Path)), + case cowboy_req:path_info(Req) of + %% When dir/priv_dir are used and there is no path_info + %% this is a configuration error and we abort immediately. + undefined -> + {ok, cowboy_req:reply(500, Req), error}; + PathInfo -> + case validate_reserved(PathInfo) of + error -> + {cowboy_rest, Req, error}; + ok -> + Filepath = filename:join([Dir|PathInfo]), + Len = byte_size(Dir), + case fullpath(Filepath) of + << Dir:Len/binary, $/, _/binary >> -> + init_info(Req, Filepath, HowToAccess, Extra); + << Dir:Len/binary >> -> + init_info(Req, Filepath, HowToAccess, Extra); + _ -> + {cowboy_rest, Req, error} + end + end + end. + +validate_reserved([]) -> + ok; +validate_reserved([P|Tail]) -> + case validate_reserved1(P) of + ok -> validate_reserved(Tail); + error -> error + end. + +%% We always reject forward slash, backward slash and NUL as +%% those have special meanings across the supported platforms. +%% We could support the backward slash on some platforms but +%% for the sake of consistency and simplicity we don't. +validate_reserved1(<<>>) -> + ok; +validate_reserved1(<<$/, _/bits>>) -> + error; +validate_reserved1(<<$\\, _/bits>>) -> + error; +validate_reserved1(<<0, _/bits>>) -> + error; +validate_reserved1(<<_, Rest/bits>>) -> + validate_reserved1(Rest). + +fullpath(Path) -> + fullpath(filename:split(Path), []). +fullpath([], Acc) -> + filename:join(lists:reverse(Acc)); +fullpath([<<".">>|Tail], Acc) -> + fullpath(Tail, Acc); +fullpath([<<"..">>|Tail], Acc=[_]) -> + fullpath(Tail, Acc); +fullpath([<<"..">>|Tail], [_|Acc]) -> + fullpath(Tail, Acc); +fullpath([Segment|Tail], Acc) -> + fullpath(Tail, [Segment|Acc]). + +init_info(Req, Path, HowToAccess, Extra) -> + Info = read_file_info(Path, HowToAccess), + {cowboy_rest, Req, {Path, Info, Extra}}. + +read_file_info(Path, direct) -> + case file:read_file_info(Path, [{time, universal}]) of + {ok, Info} -> {direct, Info}; + Error -> Error + end; +read_file_info(Path, {archive, Archive}) -> + case file:read_file_info(Archive, [{time, universal}]) of + {ok, ArchiveInfo} -> + %% The Erlang application archive is fine. + %% Now check if the requested file is in that + %% archive. We also need the file_info to merge + %% them with the archive's one. + PathS = binary_to_list(Path), + case erl_prim_loader:read_file_info(PathS) of + {ok, ContainedFileInfo} -> + Info = fix_archived_file_info( + ArchiveInfo, + ContainedFileInfo), + {archive, Info}; + error -> + {error, enoent} + end; + Error -> + Error + end. + +fix_archived_file_info(ArchiveInfo, ContainedFileInfo) -> + %% We merge the archive and content #file_info because we are + %% interested by the timestamps of the archive, but the type and + %% size of the contained file/directory. + %% + %% We reset the access to 'read', because we won't rewrite the + %% archive. + ArchiveInfo#file_info{ + size = ContainedFileInfo#file_info.size, + type = ContainedFileInfo#file_info.type, + access = read + }. + +-ifdef(TEST). +fullpath_test_() -> + Tests = [ + {<<"/home/cowboy">>, <<"/home/cowboy">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/./">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/./././././.">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/abc/..">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/abc/../">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/abc/./../.">>}, + {<<"/">>, <<"/home/cowboy/../../../../../..">>}, + {<<"/etc/passwd">>, <<"/home/cowboy/../../etc/passwd">>} + ], + [{P, fun() -> R = fullpath(P) end} || {R, P} <- Tests]. + +good_path_check_test_() -> + Tests = [ + <<"/home/cowboy/file">>, + <<"/home/cowboy/file/">>, + <<"/home/cowboy/./file">>, + <<"/home/cowboy/././././././file">>, + <<"/home/cowboy/abc/../file">>, + <<"/home/cowboy/abc/../file">>, + <<"/home/cowboy/abc/./.././file">> + ], + [{P, fun() -> + case fullpath(P) of + << "/home/cowboy/", _/bits >> -> ok + end + end} || P <- Tests]. + +bad_path_check_test_() -> + Tests = [ + <<"/home/cowboy/../../../../../../file">>, + <<"/home/cowboy/../../etc/passwd">> + ], + [{P, fun() -> + error = case fullpath(P) of + << "/home/cowboy/", _/bits >> -> ok; + _ -> error + end + end} || P <- Tests]. + +good_path_win32_check_test_() -> + Tests = case os:type() of + {unix, _} -> + []; + {win32, _} -> + [ + <<"c:/home/cowboy/file">>, + <<"c:/home/cowboy/file/">>, + <<"c:/home/cowboy/./file">>, + <<"c:/home/cowboy/././././././file">>, + <<"c:/home/cowboy/abc/../file">>, + <<"c:/home/cowboy/abc/../file">>, + <<"c:/home/cowboy/abc/./.././file">> + ] + end, + [{P, fun() -> + case fullpath(P) of + << "c:/home/cowboy/", _/bits >> -> ok + end + end} || P <- Tests]. + +bad_path_win32_check_test_() -> + Tests = case os:type() of + {unix, _} -> + []; + {win32, _} -> + [ + <<"c:/home/cowboy/../../secretfile.bat">>, + <<"c:/home/cowboy/c:/secretfile.bat">>, + <<"c:/home/cowboy/..\\..\\secretfile.bat">>, + <<"c:/home/cowboy/c:\\secretfile.bat">> + ] + end, + [{P, fun() -> + error = case fullpath(P) of + << "c:/home/cowboy/", _/bits >> -> ok; + _ -> error + end + end} || P <- Tests]. +-endif. + +%% Reject requests that tried to access a file outside +%% the target directory, or used reserved characters. + +-spec malformed_request(Req, State) + -> {boolean(), Req, State}. +malformed_request(Req, State) -> + {State =:= error, Req, State}. + +%% Directories, files that can't be accessed at all and +%% files with no read flag are forbidden. + +-spec forbidden(Req, State) + -> {boolean(), Req, State} + when State::state(). +forbidden(Req, State={_, {_, #file_info{type=directory}}, _}) -> + {true, Req, State}; +forbidden(Req, State={_, {error, eacces}, _}) -> + {true, Req, State}; +forbidden(Req, State={_, {_, #file_info{access=Access}}, _}) + when Access =:= write; Access =:= none -> + {true, Req, State}; +forbidden(Req, State) -> + {false, Req, State}. + +%% Detect the mimetype of the file. + +-spec content_types_provided(Req, State) + -> {[{binary(), get_file}], Req, State} + when State::state(). +content_types_provided(Req, State={Path, _, Extra}) when is_list(Extra) -> + case lists:keyfind(mimetypes, 1, Extra) of + false -> + {[{cow_mimetypes:web(Path), get_file}], Req, State}; + {mimetypes, Module, Function} -> + {[{Module:Function(Path), get_file}], Req, State}; + {mimetypes, Type} -> + {[{Type, get_file}], Req, State} + end. + +%% Detect the charset of the file. + +-spec charsets_provided(Req, State) + -> {[binary()], Req, State} + when State::state(). +charsets_provided(Req, State={Path, _, Extra}) -> + case lists:keyfind(charset, 1, Extra) of + %% We simulate the callback not being exported. + false -> + no_call; + {charset, Module, Function} -> + {[Module:Function(Path)], Req, State}; + {charset, Charset} when is_binary(Charset) -> + {[Charset], Req, State} + end. + +%% Enable support for range requests. + +-spec ranges_provided(Req, State) + -> {[{binary(), auto}], Req, State} + when State::state(). +ranges_provided(Req, State) -> + {[{<<"bytes">>, auto}], Req, State}. + +%% Assume the resource doesn't exist if it's not a regular file. + +-spec resource_exists(Req, State) + -> {boolean(), Req, State} + when State::state(). +resource_exists(Req, State={_, {_, #file_info{type=regular}}, _}) -> + {true, Req, State}; +resource_exists(Req, State) -> + {false, Req, State}. + +%% Generate an etag for the file. + +-spec generate_etag(Req, State) + -> {{strong | weak, binary()}, Req, State} + when State::state(). +generate_etag(Req, State={Path, {_, #file_info{size=Size, mtime=Mtime}}, + Extra}) -> + case lists:keyfind(etag, 1, Extra) of + false -> + {generate_default_etag(Size, Mtime), Req, State}; + {etag, Module, Function} -> + {Module:Function(Path, Size, Mtime), Req, State}; + {etag, false} -> + {undefined, Req, State} + end. + +generate_default_etag(Size, Mtime) -> + {strong, integer_to_binary(erlang:phash2({Size, Mtime}, 16#ffffffff))}. + +%% Return the time of last modification of the file. + +-spec last_modified(Req, State) + -> {calendar:datetime(), Req, State} + when State::state(). +last_modified(Req, State={_, {_, #file_info{mtime=Modified}}, _}) -> + {Modified, Req, State}. + +%% Stream the file. + +-spec get_file(Req, State) + -> {{sendfile, 0, non_neg_integer(), binary()}, Req, State} + when State::state(). +get_file(Req, State={Path, {direct, #file_info{size=Size}}, _}) -> + {{sendfile, 0, Size, Path}, Req, State}; +get_file(Req, State={Path, {archive, _}, _}) -> + PathS = binary_to_list(Path), + {ok, Bin, _} = erl_prim_loader:get_file(PathS), + {Bin, Req, State}. diff --git a/src/wsSrv/cowboy_stream.erl b/src/wsSrv/cowboy_stream.erl new file mode 100644 index 0000000..2dad6d0 --- /dev/null +++ b/src/wsSrv/cowboy_stream.erl @@ -0,0 +1,193 @@ +%% Copyright (c) 2015-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_stream). + +-type state() :: any(). +-type human_reason() :: atom(). + +-type streamid() :: any(). +-export_type([streamid/0]). + +-type fin() :: fin | nofin. +-export_type([fin/0]). + +%% @todo Perhaps it makes more sense to have resp_body in this module? + +-type resp_command() + :: {response, cowboy:http_status(), cowboy:http_headers(), cowboy_req:resp_body()}. +-export_type([resp_command/0]). + +-type commands() :: [{inform, cowboy:http_status(), cowboy:http_headers()} + | resp_command() + | {headers, cowboy:http_status(), cowboy:http_headers()} + | {data, fin(), cowboy_req:resp_body()} + | {trailers, cowboy:http_headers()} + | {push, binary(), binary(), binary(), inet:port_number(), + binary(), binary(), cowboy:http_headers()} + | {flow, pos_integer()} + | {spawn, pid(), timeout()} + | {error_response, cowboy:http_status(), cowboy:http_headers(), iodata()} + | {switch_protocol, cowboy:http_headers(), module(), state()} + | {internal_error, any(), human_reason()} + | {set_options, map()} + | {log, logger:level(), io:format(), list()} + | stop]. +-export_type([commands/0]). + +-type reason() :: normal | switch_protocol + | {internal_error, timeout | {error | exit | throw, any()}, human_reason()} + | {socket_error, closed | atom(), human_reason()} + | {stream_error, cow_http2:error(), human_reason()} + | {connection_error, cow_http2:error(), human_reason()} + | {stop, cow_http2:frame() | {exit, any()}, human_reason()}. +-export_type([reason/0]). + +-type partial_req() :: map(). %% @todo Take what's in cowboy_req with everything? optional. +-export_type([partial_req/0]). + +-callback init(streamid(), cowboy_req:req(), cowboy:opts()) -> {commands(), state()}. +-callback data(streamid(), fin(), binary(), State) -> {commands(), State} when State::state(). +-callback info(streamid(), any(), State) -> {commands(), State} when State::state(). +-callback terminate(streamid(), reason(), state()) -> any(). +-callback early_error(streamid(), reason(), partial_req(), Resp, cowboy:opts()) + -> Resp when Resp::resp_command(). + +%% @todo To optimize the number of active timers we could have a command +%% that enables a timeout that is called in the absence of any other call, +%% similar to what gen_server does. However the nice thing about this is +%% that the connection process can keep a single timer around (the same +%% one that would be used to detect half-closed sockets) and use this +%% timer and other events to trigger the timeout in streams at their +%% intended time. +%% +%% This same timer can be used to try and send PING frames to help detect +%% that the connection is indeed unresponsive. + +-export([init/3]). +-export([data/4]). +-export([info/3]). +-export([terminate/3]). +-export([early_error/5]). +-export([make_error_log/5]). + +%% Note that this and other functions in this module do NOT catch +%% exceptions. We want the exception to go all the way down to the +%% protocol code. +%% +%% OK the failure scenario is not so clear. The problem is +%% that the failure at any point in init/3 will result in the +%% corresponding state being lost. I am unfortunately not +%% confident we can do anything about this. If the crashing +%% handler just created a process, we'll never know about it. +%% Therefore at this time I choose to leave all failure handling +%% to the protocol process. +%% +%% Note that a failure in init/3 will result in terminate/3 +%% NOT being called. This is because the state is not available. + +-spec init(streamid(), cowboy_req:req(), cowboy:opts()) + -> {commands(), {module(), state()} | undefined}. +init(StreamID, Req, Opts) -> + case maps:get(stream_handlers, Opts, [cowboy_stream_h]) of + [] -> + {[], undefined}; + [Handler|Tail] -> + %% We call the next handler and remove it from the list of + %% stream handlers. This means that handlers that run after + %% it have no knowledge it exists. Should user require this + %% knowledge they can just define a separate option that will + %% be left untouched. + {Commands, State} = Handler:init(StreamID, Req, Opts#{stream_handlers => Tail}), + {Commands, {Handler, State}} + end. + +-spec data(streamid(), fin(), binary(), {Handler, State} | undefined) + -> {commands(), {Handler, State} | undefined} + when Handler::module(), State::state(). +data(_, _, _, undefined) -> + {[], undefined}; +data(StreamID, IsFin, Data, {Handler, State0}) -> + {Commands, State} = Handler:data(StreamID, IsFin, Data, State0), + {Commands, {Handler, State}}. + +-spec info(streamid(), any(), {Handler, State} | undefined) + -> {commands(), {Handler, State} | undefined} + when Handler::module(), State::state(). +info(_, _, undefined) -> + {[], undefined}; +info(StreamID, Info, {Handler, State0}) -> + {Commands, State} = Handler:info(StreamID, Info, State0), + {Commands, {Handler, State}}. + +-spec terminate(streamid(), reason(), {module(), state()} | undefined) -> ok. +terminate(_, _, undefined) -> + ok; +terminate(StreamID, Reason, {Handler, State}) -> + _ = Handler:terminate(StreamID, Reason, State), + ok. + +-spec early_error(streamid(), reason(), partial_req(), Resp, cowboy:opts()) + -> Resp when Resp::resp_command(). +early_error(StreamID, Reason, PartialReq, Resp, Opts) -> + case maps:get(stream_handlers, Opts, [cowboy_stream_h]) of + [] -> + Resp; + [Handler|Tail] -> + %% This is the same behavior as in init/3. + Handler:early_error(StreamID, Reason, + PartialReq, Resp, Opts#{stream_handlers => Tail}) + end. + +-spec make_error_log(init | data | info | terminate | early_error, + list(), error | exit | throw, any(), list()) + -> {log, error, string(), list()}. +make_error_log(init, [StreamID, Req, Opts], Class, Exception, Stacktrace) -> + {log, error, + "Unhandled exception ~p:~p in cowboy_stream:init(~p, Req, Opts)~n" + "Stacktrace: ~p~n" + "Req: ~p~n" + "Opts: ~p~n", + [Class, Exception, StreamID, Stacktrace, Req, Opts]}; +make_error_log(data, [StreamID, IsFin, Data, State], Class, Exception, Stacktrace) -> + {log, error, + "Unhandled exception ~p:~p in cowboy_stream:data(~p, ~p, Data, State)~n" + "Stacktrace: ~p~n" + "Data: ~p~n" + "State: ~p~n", + [Class, Exception, StreamID, IsFin, Stacktrace, Data, State]}; +make_error_log(info, [StreamID, Msg, State], Class, Exception, Stacktrace) -> + {log, error, + "Unhandled exception ~p:~p in cowboy_stream:info(~p, Msg, State)~n" + "Stacktrace: ~p~n" + "Msg: ~p~n" + "State: ~p~n", + [Class, Exception, StreamID, Stacktrace, Msg, State]}; +make_error_log(terminate, [StreamID, Reason, State], Class, Exception, Stacktrace) -> + {log, error, + "Unhandled exception ~p:~p in cowboy_stream:terminate(~p, Reason, State)~n" + "Stacktrace: ~p~n" + "Reason: ~p~n" + "State: ~p~n", + [Class, Exception, StreamID, Stacktrace, Reason, State]}; +make_error_log(early_error, [StreamID, Reason, PartialReq, Resp, Opts], + Class, Exception, Stacktrace) -> + {log, error, + "Unhandled exception ~p:~p in cowboy_stream:early_error(~p, Reason, PartialReq, Resp, Opts)~n" + "Stacktrace: ~p~n" + "Reason: ~p~n" + "PartialReq: ~p~n" + "Resp: ~p~n" + "Opts: ~p~n", + [Class, Exception, StreamID, Stacktrace, Reason, PartialReq, Resp, Opts]}. diff --git a/src/wsSrv/cowboy_stream_h.erl b/src/wsSrv/cowboy_stream_h.erl new file mode 100644 index 0000000..f516f3d --- /dev/null +++ b/src/wsSrv/cowboy_stream_h.erl @@ -0,0 +1,324 @@ +%% Copyright (c) 2016-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_stream_h). +-behavior(cowboy_stream). + +-export([init/3]). +-export([data/4]). +-export([info/3]). +-export([terminate/3]). +-export([early_error/5]). + +-export([request_process/3]). +-export([resume/5]). + +-record(state, { + next :: any(), + ref = undefined :: ranch:ref(), + pid = undefined :: pid(), + expect = undefined :: undefined | continue, + read_body_pid = undefined :: pid() | undefined, + read_body_ref = undefined :: reference() | undefined, + read_body_timer_ref = undefined :: reference() | undefined, + read_body_length = 0 :: non_neg_integer() | infinity | auto, + read_body_is_fin = nofin :: nofin | {fin, non_neg_integer()}, + read_body_buffer = <<>> :: binary(), + body_length = 0 :: non_neg_integer(), + stream_body_pid = undefined :: pid() | undefined, + stream_body_status = normal :: normal | blocking | blocked +}). + +-spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) + -> {[{spawn, pid(), timeout()}], #state{}}. +init(StreamID, Req=#{ref := Ref}, Opts) -> + Env = maps:get(env, Opts, #{}), + Middlewares = maps:get(middlewares, Opts, [cowboy_router, cowboy_handler]), + Shutdown = maps:get(shutdown_timeout, Opts, 5000), + Pid = proc_lib:spawn_link(?MODULE, request_process, [Req, Env, Middlewares]), + Expect = expect(Req), + {Commands, Next} = cowboy_stream:init(StreamID, Req, Opts), + {[{spawn, Pid, Shutdown}|Commands], + #state{next=Next, ref=Ref, pid=Pid, expect=Expect}}. + +%% Ignore the expect header in HTTP/1.0. +expect(#{version := 'HTTP/1.0'}) -> + undefined; +expect(Req) -> + try cowboy_req:parse_header(<<"expect">>, Req) of + Expect -> + Expect + catch _:_ -> + undefined + end. + +%% If we receive data and stream is waiting for data: +%% If we accumulated enough data or IsFin=fin, send it. +%% If we are in auto mode, send it and update flow control. +%% If not, buffer it. +%% If not, buffer it. +%% +%% We always reset the expect field when we receive data, +%% since the client started sending the request body before +%% we could send a 100 continue response. + +-spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +%% Stream isn't waiting for data. +data(StreamID, IsFin, Data, State=#state{ + read_body_ref=undefined, read_body_buffer=Buffer, body_length=BodyLen}) -> + do_data(StreamID, IsFin, Data, [], State#state{ + expect=undefined, + read_body_is_fin=IsFin, + read_body_buffer= << Buffer/binary, Data/binary >>, + body_length=BodyLen + byte_size(Data) + }); +%% Stream is waiting for data using auto mode. +%% +%% There is no buffering done in auto mode. +data(StreamID, IsFin, Data, State=#state{read_body_pid=Pid, read_body_ref=Ref, + read_body_length=auto, body_length=BodyLen}) -> + send_request_body(Pid, Ref, IsFin, BodyLen, Data), + do_data(StreamID, IsFin, Data, [{flow, byte_size(Data)}], State#state{ + read_body_ref=undefined, + %% @todo This is wrong, it's missing byte_size(Data). + body_length=BodyLen + }); +%% Stream is waiting for data but we didn't receive enough to send yet. +data(StreamID, IsFin=nofin, Data, State=#state{ + read_body_length=ReadLen, read_body_buffer=Buffer, body_length=BodyLen}) + when byte_size(Data) + byte_size(Buffer) < ReadLen -> + do_data(StreamID, IsFin, Data, [], State#state{ + expect=undefined, + read_body_buffer= << Buffer/binary, Data/binary >>, + body_length=BodyLen + byte_size(Data) + }); +%% Stream is waiting for data and we received enough to send. +data(StreamID, IsFin, Data, State=#state{read_body_pid=Pid, read_body_ref=Ref, + read_body_timer_ref=TRef, read_body_buffer=Buffer, body_length=BodyLen0}) -> + BodyLen = BodyLen0 + byte_size(Data), + ok = erlang:cancel_timer(TRef, [{async, true}, {info, false}]), + send_request_body(Pid, Ref, IsFin, BodyLen, <>), + do_data(StreamID, IsFin, Data, [], State#state{ + expect=undefined, + read_body_ref=undefined, + read_body_timer_ref=undefined, + read_body_buffer= <<>>, + body_length=BodyLen + }). + +do_data(StreamID, IsFin, Data, Commands1, State=#state{next=Next0}) -> + {Commands2, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0), + {Commands1 ++ Commands2, State#state{next=Next}}. + +-spec info(cowboy_stream:streamid(), any(), State) + -> {cowboy_stream:commands(), State} when State::#state{}. +info(StreamID, Info={'EXIT', Pid, normal}, State=#state{pid=Pid}) -> + do_info(StreamID, Info, [stop], State); +info(StreamID, Info={'EXIT', Pid, {{request_error, Reason, _HumanReadable}, _}}, + State=#state{pid=Pid}) -> + Status = case Reason of + timeout -> 408; + payload_too_large -> 413; + _ -> 400 + end, + %% @todo Headers? Details in body? Log the crash? More stuff in debug only? + do_info(StreamID, Info, [ + {error_response, Status, #{<<"content-length">> => <<"0">>}, <<>>}, + stop + ], State); +info(StreamID, Exit={'EXIT', Pid, {Reason, Stacktrace}}, State=#state{ref=Ref, pid=Pid}) -> + Commands0 = [{internal_error, Exit, 'Stream process crashed.'}], + Commands = case Reason of + normal -> Commands0; + shutdown -> Commands0; + {shutdown, _} -> Commands0; + _ -> [{log, error, + "Ranch listener ~p, connection process ~p, stream ~p " + "had its request process ~p exit with reason " + "~999999p and stacktrace ~999999p~n", + [Ref, self(), StreamID, Pid, Reason, Stacktrace]} + |Commands0] + end, + do_info(StreamID, Exit, [ + {error_response, 500, #{<<"content-length">> => <<"0">>}, <<>>} + |Commands], State); +%% Request body, auto mode, no body buffered. +info(StreamID, Info={read_body, Pid, Ref, auto, infinity}, State=#state{read_body_buffer= <<>>}) -> + do_info(StreamID, Info, [], State#state{ + read_body_pid=Pid, + read_body_ref=Ref, + read_body_length=auto + }); +%% Request body, auto mode, body buffered or complete. +info(StreamID, Info={read_body, Pid, Ref, auto, infinity}, State=#state{ + read_body_is_fin=IsFin, read_body_buffer=Buffer, body_length=BodyLen}) -> + send_request_body(Pid, Ref, IsFin, BodyLen, Buffer), + do_info(StreamID, Info, [{flow, byte_size(Buffer)}], + State#state{read_body_buffer= <<>>}); +%% Request body, body buffered large enough or complete. +%% +%% We do not send a 100 continue response if the client +%% already started sending the body. +info(StreamID, Info={read_body, Pid, Ref, Length, _}, State=#state{ + read_body_is_fin=IsFin, read_body_buffer=Buffer, body_length=BodyLen}) + when IsFin =:= fin; byte_size(Buffer) >= Length -> + send_request_body(Pid, Ref, IsFin, BodyLen, Buffer), + do_info(StreamID, Info, [], State#state{read_body_buffer= <<>>}); +%% Request body, not enough to send yet. +info(StreamID, Info={read_body, Pid, Ref, Length, Period}, State=#state{expect=Expect}) -> + Commands = case Expect of + continue -> [{inform, 100, #{}}, {flow, Length}]; + undefined -> [{flow, Length}] + end, + TRef = erlang:send_after(Period, self(), {{self(), StreamID}, {read_body_timeout, Ref}}), + do_info(StreamID, Info, Commands, State#state{ + read_body_pid=Pid, + read_body_ref=Ref, + read_body_timer_ref=TRef, + read_body_length=Length + }); +%% Request body reading timeout; send what we got. +info(StreamID, Info={read_body_timeout, Ref}, State=#state{read_body_pid=Pid, read_body_ref=Ref, + read_body_is_fin=IsFin, read_body_buffer=Buffer, body_length=BodyLen}) -> + send_request_body(Pid, Ref, IsFin, BodyLen, Buffer), + do_info(StreamID, Info, [], State#state{ + read_body_ref=undefined, + read_body_timer_ref=undefined, + read_body_buffer= <<>> + }); +info(StreamID, Info={read_body_timeout, _}, State) -> + do_info(StreamID, Info, [], State); +%% Response. +%% +%% We reset the expect field when a 100 continue response +%% is sent or when any final response is sent. +info(StreamID, Inform={inform, Status, _}, State0) -> + State = case cow_http:status_to_integer(Status) of + 100 -> State0#state{expect=undefined}; + _ -> State0 + end, + do_info(StreamID, Inform, [Inform], State); +info(StreamID, Response={response, _, _, _}, State) -> + do_info(StreamID, Response, [Response], State#state{expect=undefined}); +info(StreamID, Headers={headers, _, _}, State) -> + do_info(StreamID, Headers, [Headers], State#state{expect=undefined}); +%% Sending data involves the data message, the stream_buffer_full alarm +%% and the connection_buffer_full alarm. We stop sending acks when an alarm is on. +%% +%% We only apply backpressure when the message includes a pid. Otherwise +%% it is a message from Cowboy, or the user circumventing the backpressure. +%% +%% We currently do not support sending data from multiple processes concurrently. +info(StreamID, Data={data, _, _}, State) -> + do_info(StreamID, Data, [Data], State); +info(StreamID, Data0={data, Pid, _, _}, State0=#state{stream_body_status=Status}) -> + State = case Status of + normal -> + Pid ! {data_ack, self()}, + State0; + blocking -> + State0#state{stream_body_pid=Pid, stream_body_status=blocked}; + blocked -> + State0 + end, + Data = erlang:delete_element(2, Data0), + do_info(StreamID, Data, [Data], State); +info(StreamID, Alarm={alarm, Name, on}, State0=#state{stream_body_status=Status}) + when Name =:= connection_buffer_full; Name =:= stream_buffer_full -> + State = case Status of + normal -> State0#state{stream_body_status=blocking}; + _ -> State0 + end, + do_info(StreamID, Alarm, [], State); +info(StreamID, Alarm={alarm, Name, off}, State=#state{stream_body_pid=Pid, stream_body_status=Status}) + when Name =:= connection_buffer_full; Name =:= stream_buffer_full -> + _ = case Status of + normal -> ok; + blocking -> ok; + blocked -> Pid ! {data_ack, self()} + end, + do_info(StreamID, Alarm, [], State#state{stream_body_pid=undefined, stream_body_status=normal}); +info(StreamID, Trailers={trailers, _}, State) -> + do_info(StreamID, Trailers, [Trailers], State); +info(StreamID, Push={push, _, _, _, _, _, _, _}, State) -> + do_info(StreamID, Push, [Push], State); +info(StreamID, SwitchProtocol={switch_protocol, _, _, _}, State) -> + do_info(StreamID, SwitchProtocol, [SwitchProtocol], State#state{expect=undefined}); +%% Convert the set_options message to a command. +info(StreamID, SetOptions={set_options, _}, State) -> + do_info(StreamID, SetOptions, [SetOptions], State); +%% Unknown message, either stray or meant for a handler down the line. +info(StreamID, Info, State) -> + do_info(StreamID, Info, [], State). + +do_info(StreamID, Info, Commands1, State0=#state{next=Next0}) -> + {Commands2, Next} = cowboy_stream:info(StreamID, Info, Next0), + {Commands1 ++ Commands2, State0#state{next=Next}}. + +-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), #state{}) -> ok. +terminate(StreamID, Reason, #state{next=Next}) -> + cowboy_stream:terminate(StreamID, Reason, Next). + +-spec early_error(cowboy_stream:streamid(), cowboy_stream:reason(), + cowboy_stream:partial_req(), Resp, cowboy:opts()) -> Resp + when Resp::cowboy_stream:resp_command(). +early_error(StreamID, Reason, PartialReq, Resp, Opts) -> + cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts). + +send_request_body(Pid, Ref, nofin, _, Data) -> + Pid ! {request_body, Ref, nofin, Data}, + ok; +send_request_body(Pid, Ref, fin, BodyLen, Data) -> + Pid ! {request_body, Ref, fin, BodyLen, Data}, + ok. + +%% Request process. + +%% We add the stacktrace to exit exceptions here in order +%% to simplify the debugging of errors. The proc_lib library +%% already adds the stacktrace to other types of exceptions. +-spec request_process(cowboy_req:req(), cowboy_middleware:env(), [module()]) -> ok. +request_process(Req, Env, Middlewares) -> + try + execute(Req, Env, Middlewares) + catch + exit:Reason={shutdown, _}:Stacktrace -> + erlang:raise(exit, Reason, Stacktrace); + exit:Reason:Stacktrace when Reason =/= normal, Reason =/= shutdown -> + erlang:raise(exit, {Reason, Stacktrace}, Stacktrace) + end. + +execute(_, _, []) -> + ok; +execute(Req, Env, [Middleware|Tail]) -> + case Middleware:execute(Req, Env) of + {ok, Req2, Env2} -> + execute(Req2, Env2, Tail); + {suspend, Module, Function, Args} -> + proc_lib:hibernate(?MODULE, resume, [Env, Tail, Module, Function, Args]); + {stop, _Req2} -> + ok + end. + +-spec resume(cowboy_middleware:env(), [module()], module(), atom(), [any()]) -> ok. +resume(Env, Tail, Module, Function, Args) -> + case apply(Module, Function, Args) of + {ok, Req2, Env2} -> + execute(Req2, Env2, Tail); + {suspend, Module2, Function2, Args2} -> + proc_lib:hibernate(?MODULE, resume, [Env, Tail, Module2, Function2, Args2]); + {stop, _Req2} -> + ok + end. diff --git a/src/wsSrv/cowboy_sub_protocol.erl b/src/wsSrv/cowboy_sub_protocol.erl new file mode 100644 index 0000000..6714289 --- /dev/null +++ b/src/wsSrv/cowboy_sub_protocol.erl @@ -0,0 +1,24 @@ +%% Copyright (c) 2013-2017, Loïc Hoguin +%% Copyright (c) 2013, James Fish +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_sub_protocol). + +-callback upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} | {suspend, module(), atom(), [any()]} | {stop, Req} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). + +-callback upgrade(Req, Env, module(), any(), any()) + -> {ok, Req, Env} | {suspend, module(), atom(), [any()]} | {stop, Req} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). diff --git a/src/wsSrv/cowboy_sup.erl b/src/wsSrv/cowboy_sup.erl new file mode 100644 index 0000000..d3ac3b0 --- /dev/null +++ b/src/wsSrv/cowboy_sup.erl @@ -0,0 +1,30 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_sup). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +-spec start_link() -> {ok, pid()}. +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +-spec init([]) + -> {ok, {{supervisor:strategy(), 10, 10}, [supervisor:child_spec()]}}. +init([]) -> + Procs = [{cowboy_clock, {cowboy_clock, start_link, []}, + permanent, 5000, worker, [cowboy_clock]}], + {ok, {{one_for_one, 10, 10}, Procs}}. diff --git a/src/wsSrv/cowboy_tls.erl b/src/wsSrv/cowboy_tls.erl new file mode 100644 index 0000000..c049ecb --- /dev/null +++ b/src/wsSrv/cowboy_tls.erl @@ -0,0 +1,56 @@ +%% Copyright (c) 2015-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_tls). +-behavior(ranch_protocol). + +-export([start_link/3]). +-export([start_link/4]). +-export([connection_process/4]). + +%% Ranch 1. +-spec start_link(ranch:ref(), ssl:sslsocket(), module(), cowboy:opts()) -> {ok, pid()}. +start_link(Ref, _Socket, Transport, Opts) -> + start_link(Ref, Transport, Opts). + +%% Ranch 2. +-spec start_link(ranch:ref(), module(), cowboy:opts()) -> {ok, pid()}. +start_link(Ref, Transport, Opts) -> + Pid = proc_lib:spawn_link(?MODULE, connection_process, + [self(), Ref, Transport, Opts]), + {ok, Pid}. + +-spec connection_process(pid(), ranch:ref(), module(), cowboy:opts()) -> ok. +connection_process(Parent, Ref, Transport, Opts) -> + ProxyInfo = case maps:get(proxy_header, Opts, false) of + true -> + {ok, ProxyInfo0} = ranch:recv_proxy_header(Ref, 1000), + ProxyInfo0; + false -> + undefined + end, + {ok, Socket} = ranch:handshake(Ref), + case ssl:negotiated_protocol(Socket) of + {ok, <<"h2">>} -> + init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, cowboy_http2); + _ -> %% http/1.1 or no protocol negotiated. + init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, cowboy_http) + end. + +init(Parent, Ref, Socket, Transport, ProxyInfo, Opts, Protocol) -> + _ = case maps:get(connection_type, Opts, supervisor) of + worker -> ok; + supervisor -> process_flag(trap_exit, true) + end, + Protocol:init(Parent, Ref, Socket, Transport, ProxyInfo, Opts). diff --git a/src/wsSrv/cowboy_tracer_h.erl b/src/wsSrv/cowboy_tracer_h.erl new file mode 100644 index 0000000..9a19ae1 --- /dev/null +++ b/src/wsSrv/cowboy_tracer_h.erl @@ -0,0 +1,192 @@ +%% Copyright (c) 2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_tracer_h). +-behavior(cowboy_stream). + +-export([init/3]). +-export([data/4]). +-export([info/3]). +-export([terminate/3]). +-export([early_error/5]). + +-export([set_trace_patterns/0]). + +-export([tracer_process/3]). +-export([system_continue/3]). +-export([system_terminate/4]). +-export([system_code_change/4]). + +-type match_predicate() + :: fun((cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) -> boolean()). + +-type tracer_match_specs() :: [match_predicate() + | {method, binary()} + | {host, binary()} + | {path, binary()} + | {path_start, binary()} + | {header, binary()} + | {header, binary(), binary()} + | {peer_ip, inet:ip_address()} +]. +-export_type([tracer_match_specs/0]). + +-type tracer_callback() :: fun((init | terminate | tuple(), any()) -> any()). +-export_type([tracer_callback/0]). + +-spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) + -> {cowboy_stream:commands(), any()}. +init(StreamID, Req, Opts) -> + init_tracer(StreamID, Req, Opts), + cowboy_stream:init(StreamID, Req, Opts). + +-spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State) + -> {cowboy_stream:commands(), State} when State::any(). +data(StreamID, IsFin, Data, Next) -> + cowboy_stream:data(StreamID, IsFin, Data, Next). + +-spec info(cowboy_stream:streamid(), any(), State) + -> {cowboy_stream:commands(), State} when State::any(). +info(StreamID, Info, Next) -> + cowboy_stream:info(StreamID, Info, Next). + +-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), any()) -> any(). +terminate(StreamID, Reason, Next) -> + cowboy_stream:terminate(StreamID, Reason, Next). + +-spec early_error(cowboy_stream:streamid(), cowboy_stream:reason(), + cowboy_stream:partial_req(), Resp, cowboy:opts()) -> Resp + when Resp::cowboy_stream:resp_command(). +early_error(StreamID, Reason, PartialReq, Resp, Opts) -> + cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts). + +%% API. + +%% These trace patterns are most likely not suitable for production. +-spec set_trace_patterns() -> ok. +set_trace_patterns() -> + erlang:trace_pattern({'_', '_', '_'}, [{'_', [], [{return_trace}]}], [local]), + erlang:trace_pattern(on_load, [{'_', [], [{return_trace}]}], [local]), + ok. + +%% Internal. + +init_tracer(StreamID, Req, Opts=#{tracer_match_specs := List, tracer_callback := _}) -> + case match(List, StreamID, Req, Opts) of + false -> + ok; + true -> + start_tracer(StreamID, Req, Opts) + end; +%% When the options tracer_match_specs or tracer_callback +%% are not provided we do not enable tracing. +init_tracer(_, _, _) -> + ok. + +match([], _, _, _) -> + true; +match([Predicate|Tail], StreamID, Req, Opts) when is_function(Predicate) -> + case Predicate(StreamID, Req, Opts) of + true -> match(Tail, StreamID, Req, Opts); + false -> false + end; +match([{method, Value}|Tail], StreamID, Req=#{method := Value}, Opts) -> + match(Tail, StreamID, Req, Opts); +match([{host, Value}|Tail], StreamID, Req=#{host := Value}, Opts) -> + match(Tail, StreamID, Req, Opts); +match([{path, Value}|Tail], StreamID, Req=#{path := Value}, Opts) -> + match(Tail, StreamID, Req, Opts); +match([{path_start, PathStart}|Tail], StreamID, Req=#{path := Path}, Opts) -> + Len = byte_size(PathStart), + case Path of + <> -> match(Tail, StreamID, Req, Opts); + _ -> false + end; +match([{header, Name}|Tail], StreamID, Req=#{headers := Headers}, Opts) -> + case Headers of + #{Name := _} -> match(Tail, StreamID, Req, Opts); + _ -> false + end; +match([{header, Name, Value}|Tail], StreamID, Req=#{headers := Headers}, Opts) -> + case Headers of + #{Name := Value} -> match(Tail, StreamID, Req, Opts); + _ -> false + end; +match([{peer_ip, IP}|Tail], StreamID, Req=#{peer := {IP, _}}, Opts) -> + match(Tail, StreamID, Req, Opts); +match(_, _, _, _) -> + false. + +%% We only start the tracer if one wasn't started before. +start_tracer(StreamID, Req, Opts) -> + case erlang:trace_info(self(), tracer) of + {tracer, []} -> + TracerPid = proc_lib:spawn_link(?MODULE, tracer_process, [StreamID, Req, Opts]), + %% The default flags are probably not suitable for production. + Flags = maps:get(tracer_flags, Opts, [ + send, 'receive', call, return_to, + procs, ports, monotonic_timestamp, + %% The set_on_spawn flag is necessary to catch events + %% from request processes. + set_on_spawn + ]), + erlang:trace(self(), true, [{tracer, TracerPid}|Flags]), + ok; + _ -> + ok + end. + +%% Tracer process. + +-spec tracer_process(_, _, _) -> no_return(). +tracer_process(StreamID, Req=#{pid := Parent}, Opts=#{tracer_callback := Fun}) -> + %% This is necessary because otherwise the tracer could stop + %% before it has finished processing the events in its queue. + process_flag(trap_exit, true), + State = Fun(init, {StreamID, Req, Opts}), + tracer_loop(Parent, Opts, State). + +tracer_loop(Parent, Opts=#{tracer_callback := Fun}, State0) -> + receive + Msg when element(1, Msg) =:= trace; element(1, Msg) =:= trace_ts -> + State = Fun(Msg, State0), + tracer_loop(Parent, Opts, State); + {'EXIT', Parent, Reason} -> + tracer_terminate(Reason, Opts, State0); + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], {Opts, State0}); + Msg -> + cowboy:log(warning, "~p: Tracer process received stray message ~9999p~n", + [?MODULE, Msg], Opts), + tracer_loop(Parent, Opts, State0) + end. + +-spec tracer_terminate(_, _, _) -> no_return(). +tracer_terminate(Reason, #{tracer_callback := Fun}, State) -> + _ = Fun(terminate, State), + exit(Reason). + +%% System callbacks. + +-spec system_continue(pid(), _, {cowboy:opts(), any()}) -> no_return(). +system_continue(Parent, _, {Opts, State}) -> + tracer_loop(Parent, Opts, State). + +-spec system_terminate(any(), _, _, _) -> no_return(). +system_terminate(Reason, _, _, {Opts, State}) -> + tracer_terminate(Reason, Opts, State). + +-spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::any(). +system_code_change(Misc, _, _, _) -> + {ok, Misc}. diff --git a/src/wsSrv/cowboy_websocket.erl b/src/wsSrv/cowboy_websocket.erl new file mode 100644 index 0000000..e7d8f31 --- /dev/null +++ b/src/wsSrv/cowboy_websocket.erl @@ -0,0 +1,707 @@ +%% Copyright (c) 2011-2017, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Cowboy supports versions 7 through 17 of the Websocket drafts. +%% It also supports RFC6455, the proposed standard for Websocket. +-module(cowboy_websocket). +-behaviour(cowboy_sub_protocol). + +-export([is_upgrade_request/1]). +-export([upgrade/4]). +-export([upgrade/5]). +-export([takeover/7]). +-export([loop/3]). + +-export([system_continue/3]). +-export([system_terminate/4]). +-export([system_code_change/4]). + +-type commands() :: [cow_ws:frame() + | {active, boolean()} + | {deflate, boolean()} + | {set_options, map()} + | {shutdown_reason, any()} +]. +-export_type([commands/0]). + +-type call_result(State) :: {commands(), State} | {commands(), State, hibernate}. + +-type deprecated_call_result(State) :: {ok, State} + | {ok, State, hibernate} + | {reply, cow_ws:frame() | [cow_ws:frame()], State} + | {reply, cow_ws:frame() | [cow_ws:frame()], State, hibernate} + | {stop, State}. + +-type terminate_reason() :: normal | stop | timeout + | remote | {remote, cow_ws:close_code(), binary()} + | {error, badencoding | badframe | closed | atom()} + | {crash, error | exit | throw, any()}. + +-callback init(Req, any()) + -> {ok | module(), Req, any()} + | {module(), Req, any(), any()} + when Req::cowboy_req:req(). + +-callback websocket_init(State) + -> call_result(State) | deprecated_call_result(State) when State::any(). +-optional_callbacks([websocket_init/1]). + +-callback websocket_handle(ping | pong | {text | binary | ping | pong, binary()}, State) + -> call_result(State) | deprecated_call_result(State) when State::any(). +-callback websocket_info(any(), State) + -> call_result(State) | deprecated_call_result(State) when State::any(). + +-callback terminate(any(), cowboy_req:req(), any()) -> ok. +-optional_callbacks([terminate/3]). + +-type opts() :: #{ + active_n => pos_integer(), + compress => boolean(), + deflate_opts => cow_ws:deflate_opts(), + idle_timeout => timeout(), + max_frame_size => non_neg_integer() | infinity, + req_filter => fun((cowboy_req:req()) -> map()), + validate_utf8 => boolean() +}. +-export_type([opts/0]). + +-record(state, { + parent :: undefined | pid(), + ref :: ranch:ref(), + socket = undefined :: inet:socket() | {pid(), cowboy_stream:streamid()} | undefined, + transport = undefined :: module() | undefined, + opts = #{} :: opts(), + active = true :: boolean(), + handler :: module(), + key = undefined :: undefined | binary(), + timeout_ref = undefined :: undefined | reference(), + messages = undefined :: undefined | {atom(), atom(), atom()} + | {atom(), atom(), atom(), atom()}, + hibernate = false :: boolean(), + frag_state = undefined :: cow_ws:frag_state(), + frag_buffer = <<>> :: binary(), + utf8_state :: cow_ws:utf8_state(), + deflate = true :: boolean(), + extensions = #{} :: map(), + req = #{} :: map(), + shutdown_reason = normal :: any() +}). + +%% Because the HTTP/1.1 and HTTP/2 handshakes are so different, +%% this function is necessary to figure out whether a request +%% is trying to upgrade to the Websocket protocol. + +-spec is_upgrade_request(cowboy_req:req()) -> boolean(). +is_upgrade_request(#{version := 'HTTP/2', method := <<"CONNECT">>, protocol := Protocol}) -> + <<"websocket">> =:= cowboy_bstr:to_lower(Protocol); +is_upgrade_request(Req=#{version := 'HTTP/1.1', method := <<"GET">>}) -> + ConnTokens = cowboy_req:parse_header(<<"connection">>, Req, []), + case lists:member(<<"upgrade">>, ConnTokens) of + false -> + false; + true -> + UpgradeTokens = cowboy_req:parse_header(<<"upgrade">>, Req), + lists:member(<<"websocket">>, UpgradeTokens) + end; +is_upgrade_request(_) -> + false. + +%% Stream process. + +-spec upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +upgrade(Req, Env, Handler, HandlerState) -> + upgrade(Req, Env, Handler, HandlerState, #{}). + +-spec upgrade(Req, Env, module(), any(), opts()) + -> {ok, Req, Env} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +%% @todo Immediately crash if a response has already been sent. +upgrade(Req0=#{version := Version}, Env, Handler, HandlerState, Opts) -> + FilteredReq = case maps:get(req_filter, Opts, undefined) of + undefined -> maps:with([method, version, scheme, host, port, path, qs, peer], Req0); + FilterFun -> FilterFun(Req0) + end, + Utf8State = case maps:get(validate_utf8, Opts, true) of + true -> 0; + false -> undefined + end, + State0 = #state{opts=Opts, handler=Handler, utf8_state=Utf8State, req=FilteredReq}, + try websocket_upgrade(State0, Req0) of + {ok, State, Req} -> + websocket_handshake(State, Req, HandlerState, Env); + %% The status code 426 is specific to HTTP/1.1 connections. + {error, upgrade_required} when Version =:= 'HTTP/1.1' -> + {ok, cowboy_req:reply(426, #{ + <<"connection">> => <<"upgrade">>, + <<"upgrade">> => <<"websocket">> + }, Req0), Env}; + %% Use a generic 400 error for HTTP/2. + {error, upgrade_required} -> + {ok, cowboy_req:reply(400, Req0), Env} + catch _:_ -> + %% @todo Probably log something here? + %% @todo Test that we can have 2 /ws 400 status code in a row on the same connection. + %% @todo Does this even work? + {ok, cowboy_req:reply(400, Req0), Env} + end. + +websocket_upgrade(State, Req=#{version := Version}) -> + case is_upgrade_request(Req) of + false -> + {error, upgrade_required}; + true when Version =:= 'HTTP/1.1' -> + Key = cowboy_req:header(<<"sec-websocket-key">>, Req), + false = Key =:= undefined, + websocket_version(State#state{key=Key}, Req); + true -> + websocket_version(State, Req) + end. + +websocket_version(State, Req) -> + WsVersion = cowboy_req:parse_header(<<"sec-websocket-version">>, Req), + case WsVersion of + 7 -> ok; + 8 -> ok; + 13 -> ok + end, + websocket_extensions(State, Req#{websocket_version => WsVersion}). + +websocket_extensions(State=#state{opts=Opts}, Req) -> + %% @todo We want different options for this. For example + %% * compress everything auto + %% * compress only text auto + %% * compress only binary auto + %% * compress nothing auto (but still enabled it) + %% * disable compression + Compress = maps:get(compress, Opts, false), + case {Compress, cowboy_req:parse_header(<<"sec-websocket-extensions">>, Req)} of + {true, Extensions} when Extensions =/= undefined -> + websocket_extensions(State, Req, Extensions, []); + _ -> + {ok, State, Req} + end. + +websocket_extensions(State, Req, [], []) -> + {ok, State, Req}; +websocket_extensions(State, Req, [], [<<", ">>|RespHeader]) -> + {ok, State, cowboy_req:set_resp_header(<<"sec-websocket-extensions">>, lists:reverse(RespHeader), Req)}; +%% For HTTP/2 we ARE on the controlling process and do NOT want to update the owner. +websocket_extensions(State=#state{opts=Opts, extensions=Extensions}, + Req=#{pid := Pid, version := Version}, + [{<<"permessage-deflate">>, Params}|Tail], RespHeader) -> + DeflateOpts0 = maps:get(deflate_opts, Opts, #{}), + DeflateOpts = case Version of + 'HTTP/1.1' -> DeflateOpts0#{owner => Pid}; + _ -> DeflateOpts0 + end, + try cow_ws:negotiate_permessage_deflate(Params, Extensions, DeflateOpts) of + {ok, RespExt, Extensions2} -> + websocket_extensions(State#state{extensions=Extensions2}, + Req, Tail, [<<", ">>, RespExt|RespHeader]); + ignore -> + websocket_extensions(State, Req, Tail, RespHeader) + catch exit:{error, incompatible_zlib_version, _} -> + websocket_extensions(State, Req, Tail, RespHeader) + end; +websocket_extensions(State=#state{opts=Opts, extensions=Extensions}, + Req=#{pid := Pid, version := Version}, + [{<<"x-webkit-deflate-frame">>, Params}|Tail], RespHeader) -> + DeflateOpts0 = maps:get(deflate_opts, Opts, #{}), + DeflateOpts = case Version of + 'HTTP/1.1' -> DeflateOpts0#{owner => Pid}; + _ -> DeflateOpts0 + end, + try cow_ws:negotiate_x_webkit_deflate_frame(Params, Extensions, DeflateOpts) of + {ok, RespExt, Extensions2} -> + websocket_extensions(State#state{extensions=Extensions2}, + Req, Tail, [<<", ">>, RespExt|RespHeader]); + ignore -> + websocket_extensions(State, Req, Tail, RespHeader) + catch exit:{error, incompatible_zlib_version, _} -> + websocket_extensions(State, Req, Tail, RespHeader) + end; +websocket_extensions(State, Req, [_|Tail], RespHeader) -> + websocket_extensions(State, Req, Tail, RespHeader). + +-spec websocket_handshake(#state{}, Req, any(), Env) + -> {ok, Req, Env} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +websocket_handshake(State=#state{key=Key}, + Req=#{version := 'HTTP/1.1', pid := Pid, streamid := StreamID}, + HandlerState, Env) -> + Challenge = base64:encode(crypto:hash(sha, + << Key/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" >>)), + %% @todo We don't want date and server headers. + Headers = cowboy_req:response_headers(#{ + <<"connection">> => <<"Upgrade">>, + <<"upgrade">> => <<"websocket">>, + <<"sec-websocket-accept">> => Challenge + }, Req), + Pid ! {{Pid, StreamID}, {switch_protocol, Headers, ?MODULE, {State, HandlerState}}}, + {ok, Req, Env}; +%% For HTTP/2 we do not let the process die, we instead keep it +%% for the Websocket stream. This is because in HTTP/2 we only +%% have a stream, it doesn't take over the whole connection. +websocket_handshake(State, Req=#{ref := Ref, pid := Pid, streamid := StreamID}, + HandlerState, _Env) -> + %% @todo We don't want date and server headers. + Headers = cowboy_req:response_headers(#{}, Req), + Pid ! {{Pid, StreamID}, {switch_protocol, Headers, ?MODULE, {State, HandlerState}}}, + takeover(Pid, Ref, {Pid, StreamID}, undefined, undefined, <<>>, + {State, HandlerState}). + +%% Connection process. + +-record(ps_header, { + buffer = <<>> :: binary() +}). + +-record(ps_payload, { + type :: cow_ws:frame_type(), + len :: non_neg_integer(), + mask_key :: cow_ws:mask_key(), + rsv :: cow_ws:rsv(), + close_code = undefined :: undefined | cow_ws:close_code(), + unmasked = <<>> :: binary(), + unmasked_len = 0 :: non_neg_integer(), + buffer = <<>> :: binary() +}). + +-type parse_state() :: #ps_header{} | #ps_payload{}. + +-spec takeover(pid(), ranch:ref(), inet:socket() | {pid(), cowboy_stream:streamid()}, + module() | undefined, any(), binary(), + {#state{}, any()}) -> no_return(). +takeover(Parent, Ref, Socket, Transport, _Opts, Buffer, + {State0=#state{handler=Handler}, HandlerState}) -> + %% @todo We should have an option to disable this behavior. + ranch:remove_connection(Ref), + Messages = case Transport of + undefined -> undefined; + _ -> Transport:messages() + end, + State = loop_timeout(State0#state{parent=Parent, + ref=Ref, socket=Socket, transport=Transport, + key=undefined, messages=Messages}), + %% We call parse_header/3 immediately because there might be + %% some data in the buffer that was sent along with the handshake. + %% While it is not allowed by the protocol to send frames immediately, + %% we still want to process that data if any. + case erlang:function_exported(Handler, websocket_init, 1) of + true -> handler_call(State, HandlerState, #ps_header{buffer=Buffer}, + websocket_init, undefined, fun after_init/3); + false -> after_init(State, HandlerState, #ps_header{buffer=Buffer}) + end. + +after_init(State=#state{active=true}, HandlerState, ParseState) -> + %% Enable active,N for HTTP/1.1, and auto read_body for HTTP/2. + %% We must do this only after calling websocket_init/1 (if any) + %% to give the handler a chance to disable active mode immediately. + setopts_active(State), + maybe_read_body(State), + parse_header(State, HandlerState, ParseState); +after_init(State, HandlerState, ParseState) -> + parse_header(State, HandlerState, ParseState). + +%% We have two ways of reading the body for Websocket. For HTTP/1.1 +%% we have full control of the socket and can therefore use active,N. +%% For HTTP/2 we are just a stream, and are instead using read_body +%% (automatic mode). Technically HTTP/2 will only go passive after +%% receiving the next data message, while HTTP/1.1 goes passive +%% immediately but there might still be data to be processed in +%% the message queue. + +setopts_active(#state{transport=undefined}) -> + ok; +setopts_active(#state{socket=Socket, transport=Transport, opts=Opts}) -> + N = maps:get(active_n, Opts, 100), + Transport:setopts(Socket, [{active, N}]). + +maybe_read_body(#state{socket=Stream={Pid, _}, transport=undefined, active=true}) -> + %% @todo Keep Ref around. + ReadBodyRef = make_ref(), + Pid ! {Stream, {read_body, self(), ReadBodyRef, auto, infinity}}, + ok; +maybe_read_body(_) -> + ok. + +active(State) -> + setopts_active(State), + maybe_read_body(State), + State#state{active=true}. + +passive(State=#state{transport=undefined}) -> + %% Unfortunately we cannot currently cancel read_body. + %% But that's OK, we will just stop reading the body + %% after the next message. + State#state{active=false}; +passive(State=#state{socket=Socket, transport=Transport, messages=Messages}) -> + Transport:setopts(Socket, [{active, false}]), + flush_passive(Socket, Messages), + State#state{active=false}. + +flush_passive(Socket, Messages) -> + receive + {Passive, Socket} when Passive =:= element(4, Messages); + %% Hardcoded for compatibility with Ranch 1.x. + Passive =:= tcp_passive; Passive =:= ssl_passive -> + flush_passive(Socket, Messages) + after 0 -> + ok + end. + +before_loop(State=#state{hibernate=true}, HandlerState, ParseState) -> + proc_lib:hibernate(?MODULE, loop, + [State#state{hibernate=false}, HandlerState, ParseState]); +before_loop(State, HandlerState, ParseState) -> + loop(State, HandlerState, ParseState). + +-spec loop_timeout(#state{}) -> #state{}. +loop_timeout(State=#state{opts=Opts, timeout_ref=PrevRef}) -> + _ = case PrevRef of + undefined -> ignore; + PrevRef -> erlang:cancel_timer(PrevRef) + end, + case maps:get(idle_timeout, Opts, 60000) of + infinity -> + State#state{timeout_ref=undefined}; + Timeout -> + TRef = erlang:start_timer(Timeout, self(), ?MODULE), + State#state{timeout_ref=TRef} + end. + +-spec loop(#state{}, any(), parse_state()) -> no_return(). +loop(State=#state{parent=Parent, socket=Socket, messages=Messages, + timeout_ref=TRef}, HandlerState, ParseState) -> + receive + %% Socket messages. (HTTP/1.1) + {OK, Socket, Data} when OK =:= element(1, Messages) -> + State2 = loop_timeout(State), + parse(State2, HandlerState, ParseState, Data); + {Closed, Socket} when Closed =:= element(2, Messages) -> + terminate(State, HandlerState, {error, closed}); + {Error, Socket, Reason} when Error =:= element(3, Messages) -> + terminate(State, HandlerState, {error, Reason}); + {Passive, Socket} when Passive =:= element(4, Messages); + %% Hardcoded for compatibility with Ranch 1.x. + Passive =:= tcp_passive; Passive =:= ssl_passive -> + setopts_active(State), + loop(State, HandlerState, ParseState); + %% Body reading messages. (HTTP/2) + {request_body, _Ref, nofin, Data} -> + maybe_read_body(State), + State2 = loop_timeout(State), + parse(State2, HandlerState, ParseState, Data); + %% @todo We need to handle this case as if it was an {error, closed} + %% but not before we finish processing frames. We probably should have + %% a check in before_loop to let us stop looping if a flag is set. + {request_body, _Ref, fin, _, Data} -> + maybe_read_body(State), + State2 = loop_timeout(State), + parse(State2, HandlerState, ParseState, Data); + %% Timeouts. + {timeout, TRef, ?MODULE} -> + websocket_close(State, HandlerState, timeout); + {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) -> + before_loop(State, HandlerState, ParseState); + %% System messages. + {'EXIT', Parent, Reason} -> + %% @todo We should exit gracefully. + exit(Reason); + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], + {State, HandlerState, ParseState}); + %% Calls from supervisor module. + {'$gen_call', From, Call} -> + cowboy_children:handle_supervisor_call(Call, From, [], ?MODULE), + before_loop(State, HandlerState, ParseState); + Message -> + handler_call(State, HandlerState, ParseState, + websocket_info, Message, fun before_loop/3) + end. + +parse(State, HandlerState, PS=#ps_header{buffer=Buffer}, Data) -> + parse_header(State, HandlerState, PS#ps_header{ + buffer= <>}); +parse(State, HandlerState, PS=#ps_payload{buffer=Buffer}, Data) -> + parse_payload(State, HandlerState, PS#ps_payload{buffer= <<>>}, + <>). + +parse_header(State=#state{opts=Opts, frag_state=FragState, extensions=Extensions}, + HandlerState, ParseState=#ps_header{buffer=Data}) -> + MaxFrameSize = maps:get(max_frame_size, Opts, infinity), + case cow_ws:parse_header(Data, Extensions, FragState) of + %% All frames sent from the client to the server are masked. + {_, _, _, _, undefined, _} -> + websocket_close(State, HandlerState, {error, badframe}); + {_, _, _, Len, _, _} when Len > MaxFrameSize -> + websocket_close(State, HandlerState, {error, badsize}); + {Type, FragState2, Rsv, Len, MaskKey, Rest} -> + parse_payload(State#state{frag_state=FragState2}, HandlerState, + #ps_payload{type=Type, len=Len, mask_key=MaskKey, rsv=Rsv}, Rest); + more -> + before_loop(State, HandlerState, ParseState); + error -> + websocket_close(State, HandlerState, {error, badframe}) + end. + +parse_payload(State=#state{frag_state=FragState, utf8_state=Incomplete, extensions=Extensions}, + HandlerState, ParseState=#ps_payload{ + type=Type, len=Len, mask_key=MaskKey, rsv=Rsv, + unmasked=Unmasked, unmasked_len=UnmaskedLen}, Data) -> + case cow_ws:parse_payload(Data, MaskKey, Incomplete, UnmaskedLen, + Type, Len, FragState, Extensions, Rsv) of + {ok, CloseCode, Payload, Utf8State, Rest} -> + dispatch_frame(State#state{utf8_state=Utf8State}, HandlerState, + ParseState#ps_payload{unmasked= <>, + close_code=CloseCode}, Rest); + {ok, Payload, Utf8State, Rest} -> + dispatch_frame(State#state{utf8_state=Utf8State}, HandlerState, + ParseState#ps_payload{unmasked= <>}, + Rest); + {more, CloseCode, Payload, Utf8State} -> + before_loop(State#state{utf8_state=Utf8State}, HandlerState, + ParseState#ps_payload{len=Len - byte_size(Data), close_code=CloseCode, + unmasked= <>, + unmasked_len=UnmaskedLen + byte_size(Data)}); + {more, Payload, Utf8State} -> + before_loop(State#state{utf8_state=Utf8State}, HandlerState, + ParseState#ps_payload{len=Len - byte_size(Data), + unmasked= <>, + unmasked_len=UnmaskedLen + byte_size(Data)}); + Error = {error, _Reason} -> + websocket_close(State, HandlerState, Error) + end. + +dispatch_frame(State=#state{opts=Opts, frag_state=FragState, frag_buffer=SoFar}, HandlerState, + #ps_payload{type=Type0, unmasked=Payload0, close_code=CloseCode0}, RemainingData) -> + MaxFrameSize = maps:get(max_frame_size, Opts, infinity), + case cow_ws:make_frame(Type0, Payload0, CloseCode0, FragState) of + %% @todo Allow receiving fragments. + {fragment, _, _, Payload} when byte_size(Payload) + byte_size(SoFar) > MaxFrameSize -> + websocket_close(State, HandlerState, {error, badsize}); + {fragment, nofin, _, Payload} -> + parse_header(State#state{frag_buffer= << SoFar/binary, Payload/binary >>}, + HandlerState, #ps_header{buffer=RemainingData}); + {fragment, fin, Type, Payload} -> + handler_call(State#state{frag_state=undefined, frag_buffer= <<>>}, HandlerState, + #ps_header{buffer=RemainingData}, + websocket_handle, {Type, << SoFar/binary, Payload/binary >>}, + fun parse_header/3); + close -> + websocket_close(State, HandlerState, remote); + {close, CloseCode, Payload} -> + websocket_close(State, HandlerState, {remote, CloseCode, Payload}); + Frame = ping -> + transport_send(State, nofin, frame(pong, State)), + handler_call(State, HandlerState, + #ps_header{buffer=RemainingData}, + websocket_handle, Frame, fun parse_header/3); + Frame = {ping, Payload} -> + transport_send(State, nofin, frame({pong, Payload}, State)), + handler_call(State, HandlerState, + #ps_header{buffer=RemainingData}, + websocket_handle, Frame, fun parse_header/3); + Frame -> + handler_call(State, HandlerState, + #ps_header{buffer=RemainingData}, + websocket_handle, Frame, fun parse_header/3) + end. + +handler_call(State=#state{handler=Handler}, HandlerState, + ParseState, Callback, Message, NextState) -> + try case Callback of + websocket_init -> Handler:websocket_init(HandlerState); + _ -> Handler:Callback(Message, HandlerState) + end of + {Commands, HandlerState2} when is_list(Commands) -> + handler_call_result(State, + HandlerState2, ParseState, NextState, Commands); + {Commands, HandlerState2, hibernate} when is_list(Commands) -> + handler_call_result(State#state{hibernate=true}, + HandlerState2, ParseState, NextState, Commands); + %% The following call results are deprecated. + {ok, HandlerState2} -> + NextState(State, HandlerState2, ParseState); + {ok, HandlerState2, hibernate} -> + NextState(State#state{hibernate=true}, HandlerState2, ParseState); + {reply, Payload, HandlerState2} -> + case websocket_send(Payload, State) of + ok -> + NextState(State, HandlerState2, ParseState); + stop -> + terminate(State, HandlerState2, stop); + Error = {error, _} -> + terminate(State, HandlerState2, Error) + end; + {reply, Payload, HandlerState2, hibernate} -> + case websocket_send(Payload, State) of + ok -> + NextState(State#state{hibernate=true}, + HandlerState2, ParseState); + stop -> + terminate(State, HandlerState2, stop); + Error = {error, _} -> + terminate(State, HandlerState2, Error) + end; + {stop, HandlerState2} -> + websocket_close(State, HandlerState2, stop) + catch Class:Reason:Stacktrace -> + websocket_send_close(State, {crash, Class, Reason}), + handler_terminate(State, HandlerState, {crash, Class, Reason}), + erlang:raise(Class, Reason, Stacktrace) + end. + +-spec handler_call_result(#state{}, any(), parse_state(), fun(), commands()) -> no_return(). +handler_call_result(State0, HandlerState, ParseState, NextState, Commands) -> + case commands(Commands, State0, []) of + {ok, State} -> + NextState(State, HandlerState, ParseState); + {stop, State} -> + terminate(State, HandlerState, stop); + {Error = {error, _}, State} -> + terminate(State, HandlerState, Error) + end. + +commands([], State, []) -> + {ok, State}; +commands([], State, Data) -> + Result = transport_send(State, nofin, lists:reverse(Data)), + {Result, State}; +commands([{active, Active}|Tail], State0=#state{active=Active0}, Data) when is_boolean(Active) -> + State = if + Active, not Active0 -> + active(State0); + Active0, not Active -> + passive(State0); + true -> + State0 + end, + commands(Tail, State#state{active=Active}, Data); +commands([{deflate, Deflate}|Tail], State, Data) when is_boolean(Deflate) -> + commands(Tail, State#state{deflate=Deflate}, Data); +commands([{set_options, SetOpts}|Tail], State0=#state{opts=Opts}, Data) -> + State = case SetOpts of + #{idle_timeout := IdleTimeout} -> + loop_timeout(State0#state{opts=Opts#{idle_timeout => IdleTimeout}}); + _ -> + State0 + end, + commands(Tail, State, Data); +commands([{shutdown_reason, ShutdownReason}|Tail], State, Data) -> + commands(Tail, State#state{shutdown_reason=ShutdownReason}, Data); +commands([Frame|Tail], State, Data0) -> + Data = [frame(Frame, State)|Data0], + case is_close_frame(Frame) of + true -> + _ = transport_send(State, fin, lists:reverse(Data)), + {stop, State}; + false -> + commands(Tail, State, Data) + end. + +transport_send(#state{socket=Stream={Pid, _}, transport=undefined}, IsFin, Data) -> + Pid ! {Stream, {data, IsFin, Data}}, + ok; +transport_send(#state{socket=Socket, transport=Transport}, _, Data) -> + Transport:send(Socket, Data). + +-spec websocket_send(cow_ws:frame(), #state{}) -> ok | stop | {error, atom()}. +websocket_send(Frames, State) when is_list(Frames) -> + websocket_send_many(Frames, State, []); +websocket_send(Frame, State) -> + Data = frame(Frame, State), + case is_close_frame(Frame) of + true -> + _ = transport_send(State, fin, Data), + stop; + false -> + transport_send(State, nofin, Data) + end. + +websocket_send_many([], State, Acc) -> + transport_send(State, nofin, lists:reverse(Acc)); +websocket_send_many([Frame|Tail], State, Acc0) -> + Acc = [frame(Frame, State)|Acc0], + case is_close_frame(Frame) of + true -> + _ = transport_send(State, fin, lists:reverse(Acc)), + stop; + false -> + websocket_send_many(Tail, State, Acc) + end. + +is_close_frame(close) -> true; +is_close_frame({close, _}) -> true; +is_close_frame({close, _, _}) -> true; +is_close_frame(_) -> false. + +-spec websocket_close(#state{}, any(), terminate_reason()) -> no_return(). +websocket_close(State, HandlerState, Reason) -> + websocket_send_close(State, Reason), + terminate(State, HandlerState, Reason). + +websocket_send_close(State, Reason) -> + _ = case Reason of + Normal when Normal =:= stop; Normal =:= timeout -> + transport_send(State, fin, frame({close, 1000, <<>>}, State)); + {error, badframe} -> + transport_send(State, fin, frame({close, 1002, <<>>}, State)); + {error, badencoding} -> + transport_send(State, fin, frame({close, 1007, <<>>}, State)); + {error, badsize} -> + transport_send(State, fin, frame({close, 1009, <<>>}, State)); + {crash, _, _} -> + transport_send(State, fin, frame({close, 1011, <<>>}, State)); + remote -> + transport_send(State, fin, frame(close, State)); + {remote, Code, _} -> + transport_send(State, fin, frame({close, Code, <<>>}, State)) + end, + ok. + +%% Don't compress frames while deflate is disabled. +frame(Frame, #state{deflate=false, extensions=Extensions}) -> + cow_ws:frame(Frame, Extensions#{deflate => false}); +frame(Frame, #state{extensions=Extensions}) -> + cow_ws:frame(Frame, Extensions). + +-spec terminate(#state{}, any(), terminate_reason()) -> no_return(). +terminate(State=#state{shutdown_reason=Shutdown}, HandlerState, Reason) -> + handler_terminate(State, HandlerState, Reason), + case Shutdown of + normal -> exit(normal); + _ -> exit({shutdown, Shutdown}) + end. + +handler_terminate(#state{handler=Handler, req=Req}, HandlerState, Reason) -> + cowboy_handler:terminate(Reason, Req, HandlerState, Handler). + +%% System callbacks. + +-spec system_continue(_, _, {#state{}, any(), parse_state()}) -> no_return(). +system_continue(_, _, {State, HandlerState, ParseState}) -> + loop(State, HandlerState, ParseState). + +-spec system_terminate(any(), _, _, {#state{}, any(), parse_state()}) -> no_return(). +system_terminate(Reason, _, _, {State, HandlerState, _}) -> + %% @todo We should exit gracefully, if possible. + terminate(State, HandlerState, Reason). + +-spec system_code_change(Misc, _, _, _) + -> {ok, Misc} when Misc::{#state{}, any(), parse_state()}. +system_code_change(Misc, _, _, _) -> + {ok, Misc}.