summaryrefslogtreecommitdiff
path: root/src/mochiweb
diff options
context:
space:
mode:
authorRobert Newson <rnewson@apache.org>2010-07-26 17:21:30 +0000
committerRobert Newson <rnewson@apache.org>2010-07-26 17:21:30 +0000
commit4b0948ddb3a428f8a5330e05745b2fbd4ccf9375 (patch)
tree5ab1dde286028653d5569ceae6dfc883fa365b7a /src/mochiweb
parentcd214b23e8129868d4a7020ddafd55a16e496652 (diff)
Add SSL support to CouchDB.
To enable SSL you need to do three things; 1) enable the httpsd daemon in local.ini (you can just uncomment the line). 2) supply your PEM-encoded cert and key files in the [ssl] section. 3) start CouchDB. CouchDB will now, in addition to handling HTTP on port 5984, accept SSL connections on port 6984. The patch itself adds SSL support by updating the local version of Mochiweb to the latest. The upstream release includes our local tweak to support large numbers and to handle Accept-Encoding headers. Our local Mochiweb fork changed the default idle timeout from 10 seconds to 5 minutes, and it was agreed on #irc to revert this change. The only tweaks to Mochiweb were in mochiweb.app.src (to record the git commit I built from) and the removal of Makefile (replaced by Makefile.am). Futon received many tweaks as we have 'http://' hardcoded all over. All such instances now use window.location.protocol + '//'. CouchDB received a tweak to use the right scheme in couch_httpd:absolute_uri (it now gets it from the Mochireq and not mochiweb_socket_server). git-svn-id: https://svn.apache.org/repos/asf/couchdb/trunk@979368 13f79535-47bb-0310-9956-ffa450edef68
Diffstat (limited to 'src/mochiweb')
-rw-r--r--src/mochiweb/Makefile.am115
-rw-r--r--src/mochiweb/internal.hrl3
-rw-r--r--src/mochiweb/mochifmt.erl27
-rw-r--r--src/mochiweb/mochifmt_records.erl8
-rw-r--r--src/mochiweb/mochifmt_std.erl7
-rw-r--r--src/mochiweb/mochiglobal.erl107
-rw-r--r--src/mochiweb/mochihex.erl38
-rw-r--r--src/mochiweb/mochijson.erl27
-rw-r--r--src/mochiweb/mochijson2.erl198
-rw-r--r--src/mochiweb/mochilists.erl104
-rw-r--r--src/mochiweb/mochilogfile2.erl140
-rw-r--r--src/mochiweb/mochinum.erl82
-rw-r--r--src/mochiweb/mochitemp.erl310
-rw-r--r--src/mochiweb/mochiutf8.erl316
-rw-r--r--src/mochiweb/mochiweb.app.in2
-rw-r--r--src/mochiweb/mochiweb.app.src9
-rw-r--r--src/mochiweb/mochiweb.erl221
-rw-r--r--src/mochiweb/mochiweb_acceptor.erl48
-rw-r--r--src/mochiweb/mochiweb_app.erl7
-rw-r--r--src/mochiweb/mochiweb_charref.erl35
-rw-r--r--src/mochiweb/mochiweb_cookies.erl108
-rw-r--r--src/mochiweb/mochiweb_cover.erl75
-rw-r--r--src/mochiweb/mochiweb_echo.erl11
-rw-r--r--src/mochiweb/mochiweb_headers.erl180
-rw-r--r--src/mochiweb/mochiweb_html.erl668
-rw-r--r--src/mochiweb/mochiweb_http.erl181
-rw-r--r--src/mochiweb/mochiweb_io.erl46
-rw-r--r--src/mochiweb/mochiweb_mime.erl94
-rw-r--r--src/mochiweb/mochiweb_multipart.erl392
-rw-r--r--src/mochiweb/mochiweb_request.erl326
-rw-r--r--src/mochiweb/mochiweb_response.erl8
-rw-r--r--src/mochiweb/mochiweb_skel.erl41
-rw-r--r--src/mochiweb/mochiweb_socket.erl84
-rw-r--r--src/mochiweb/mochiweb_socket_server.erl190
-rw-r--r--src/mochiweb/mochiweb_sup.erl7
-rw-r--r--src/mochiweb/mochiweb_util.erl361
-rw-r--r--src/mochiweb/reloader.erl40
37 files changed, 3542 insertions, 1074 deletions
diff --git a/src/mochiweb/Makefile.am b/src/mochiweb/Makefile.am
index c191abfa..27efebda 100644
--- a/src/mochiweb/Makefile.am
+++ b/src/mochiweb/Makefile.am
@@ -10,59 +10,79 @@
## License for the specific language governing permissions and limitations under
## the License.
-mochiwebebindir = $(localerlanglibdir)/mochiweb-r113/ebin
+mochiwebebindir = $(localerlanglibdir)/mochiweb-7c2bc2/ebin
mochiweb_file_collection = \
- mochifmt.erl \
- mochifmt_records.erl \
- mochifmt_std.erl \
- mochihex.erl \
- mochijson.erl \
- mochijson2.erl \
- mochinum.erl \
+ mochifmt.erl \
+ mochifmt_records.erl \
+ mochifmt_std.erl \
+ mochiglobal.erl \
+ mochihex.erl \
+ mochijson.erl \
+ mochijson2.erl \
+ mochilists.erl \
+ mochilogfile2.erl \
+ mochinum.erl \
+ mochitemp.erl \
+ mochiutf8.erl \
mochiweb.app.in \
- mochiweb.erl \
- mochiweb_app.erl \
- mochiweb_charref.erl \
- mochiweb_cookies.erl \
- mochiweb_echo.erl \
- mochiweb_headers.erl \
- mochiweb_html.erl \
- mochiweb_http.erl \
- mochiweb_multipart.erl \
- mochiweb_request.erl \
- mochiweb_response.erl \
- mochiweb_skel.erl \
- mochiweb_socket_server.erl \
- mochiweb_sup.erl \
- mochiweb_util.erl \
- reloader.erl
+ mochiweb.erl \
+ mochiweb_acceptor.erl \
+ mochiweb_app.erl \
+ mochiweb_charref.erl \
+ mochiweb_cookies.erl \
+ mochiweb_cover.erl \
+ mochiweb_echo.erl \
+ mochiweb_headers.erl \
+ mochiweb_html.erl \
+ mochiweb_http.erl \
+ mochiweb_io.erl \
+ mochiweb_mime.erl \
+ mochiweb_multipart.erl \
+ mochiweb_request.erl \
+ mochiweb_response.erl \
+ mochiweb_skel.erl \
+ mochiweb_socket.erl \
+ mochiweb_socket_server.erl \
+ mochiweb_sup.erl \
+ mochiweb_util.erl \
+ reloader.erl
mochiwebebin_make_generated_file_list = \
- mochifmt.beam \
- mochifmt_records.beam \
- mochifmt_std.beam \
- mochihex.beam \
- mochijson.beam \
- mochijson2.beam \
- mochinum.beam \
+ mochifmt.beam \
+ mochifmt_records.beam \
+ mochifmt_std.beam \
+ mochiglobal.beam \
+ mochihex.beam \
+ mochijson.beam \
+ mochijson2.beam \
+ mochilists.beam \
+ mochilogfile2.beam \
+ mochinum.beam \
+ mochitemp.beam \
+ mochiutf8.beam \
mochiweb.app \
- mochiweb.beam \
- mochiweb_app.beam \
- mochiweb_charref.beam \
- mochiweb_cookies.beam \
- mochiweb_echo.beam \
- mochiweb_headers.beam \
- mochiweb_html.beam \
- mochiweb_http.beam \
- mochiweb_multipart.beam \
- mochiweb_request.beam \
- mochiweb_response.beam \
- mochiweb_skel.beam \
- mochiweb_socket_server.beam \
- mochiweb_sup.beam \
- mochiweb_util.beam \
- reloader.beam
+ mochiweb.beam \
+ mochiweb_acceptor.beam \
+ mochiweb_app.beam \
+ mochiweb_charref.beam \
+ mochiweb_cookies.beam \
+ mochiweb_cover.beam \
+ mochiweb_echo.beam \
+ mochiweb_headers.beam \
+ mochiweb_html.beam \
+ mochiweb_http.beam \
+ mochiweb_io.beam \
+ mochiweb_mime.beam \
+ mochiweb_multipart.beam \
+ mochiweb_request.beam \
+ mochiweb_response.beam \
+ mochiweb_skel.beam \
+ mochiweb_socket.beam \
+ mochiweb_socket_server.beam \
+ mochiweb_sup.beam \
+ mochiweb_util.beam \
+ reloader.beam
mochiwebebin_DATA = \
$(mochiwebebin_make_generated_file_list)
@@ -77,4 +97,5 @@ CLEANFILES = \
cp $< $@
%.beam: %.erl
+
$(ERLC) $(ERLC_FLAGS) $<
diff --git a/src/mochiweb/internal.hrl b/src/mochiweb/internal.hrl
new file mode 100644
index 00000000..6db899a0
--- /dev/null
+++ b/src/mochiweb/internal.hrl
@@ -0,0 +1,3 @@
+
+-define(RECBUF_SIZE, 8192).
+
diff --git a/src/mochiweb/mochifmt.erl b/src/mochiweb/mochifmt.erl
index da0a133a..5bc6b9c4 100644
--- a/src/mochiweb/mochifmt.erl
+++ b/src/mochiweb/mochifmt.erl
@@ -10,7 +10,6 @@
-export([tokenize/1, format/3, get_field/3, format_field/3]).
-export([bformat/2, bformat/3]).
-export([f/2, f/3]).
--export([test/0]).
-record(conversion, {length, precision, ctype, align, fill_char, sign}).
@@ -113,15 +112,6 @@ bformat(Format, Args) ->
bformat(Format, Args, Module) ->
iolist_to_binary(format(Format, Args, Module)).
-%% @spec test() -> ok
-%% @doc Run tests.
-test() ->
- ok = test_tokenize(),
- ok = test_format(),
- ok = test_std(),
- ok = test_records(),
- ok.
-
%% Internal API
add_raw("", Acc) ->
@@ -375,14 +365,21 @@ parse_std_conversion([$. | Spec], Acc) ->
parse_std_conversion([Type], Acc) ->
parse_std_conversion("", Acc#conversion{ctype=ctype(Type)}).
-test_tokenize() ->
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+tokenize_test() ->
{?MODULE, [{raw, "ABC"}]} = tokenize("ABC"),
{?MODULE, [{format, {"0", "", ""}}]} = tokenize("{0}"),
{?MODULE, [{raw, "ABC"}, {format, {"1", "", ""}}, {raw, "DEF"}]} =
tokenize("ABC{1}DEF"),
ok.
-test_format() ->
+format_test() ->
<<" -4">> = bformat("{0:4}", [-4]),
<<" 4">> = bformat("{0:4}", [4]),
<<" 4">> = bformat("{0:{0}}", [4]),
@@ -410,12 +407,12 @@ test_format() ->
{{2008,5,4}, {4, 2, 2}}),
ok.
-test_std() ->
+std_test() ->
M = mochifmt_std:new(),
<<"01">> = bformat("{0}{1}", [0, 1], M),
ok.
-test_records() ->
+records_test() ->
M = mochifmt_records:new([{conversion, record_info(fields, conversion)}]),
R = #conversion{length=long, precision=hard, sign=peace},
long = M:get_value("length", R),
@@ -424,3 +421,5 @@ test_records() ->
<<"long hard">> = bformat("{length} {precision}", R, M),
<<"long hard">> = bformat("{0.length} {0.precision}", [R], M),
ok.
+
+-endif.
diff --git a/src/mochiweb/mochifmt_records.erl b/src/mochiweb/mochifmt_records.erl
index 94c77978..2326d1dd 100644
--- a/src/mochiweb/mochifmt_records.erl
+++ b/src/mochiweb/mochifmt_records.erl
@@ -28,3 +28,11 @@ get_rec_index(Atom, [Atom | _], Index) ->
Index;
get_rec_index(Atom, [_ | Rest], Index) ->
get_rec_index(Atom, Rest, 1 + Index).
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochifmt_std.erl b/src/mochiweb/mochifmt_std.erl
index 9442016a..d4d74f6f 100644
--- a/src/mochiweb/mochifmt_std.erl
+++ b/src/mochiweb/mochifmt_std.erl
@@ -21,3 +21,10 @@ get_value(Key, Args) ->
format_field(Arg, Format) ->
mochifmt:format_field(Arg, Format, THIS).
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiglobal.erl b/src/mochiweb/mochiglobal.erl
new file mode 100644
index 00000000..c740b878
--- /dev/null
+++ b/src/mochiweb/mochiglobal.erl
@@ -0,0 +1,107 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6)
+%% <a href="http://www.erlang.org/pipermail/erlang-questions/2009-March/042503.html">[1]</a>.
+-module(mochiglobal).
+-author("Bob Ippolito <bob@mochimedia.com>").
+-export([get/1, get/2, put/2, delete/1]).
+
+-spec get(atom()) -> any() | undefined.
+%% @equiv get(K, undefined)
+get(K) ->
+ get(K, undefined).
+
+-spec get(atom(), T) -> any() | T.
+%% @doc Get the term for K or return Default.
+get(K, Default) ->
+ get(K, Default, key_to_module(K)).
+
+get(_K, Default, Mod) ->
+ try Mod:term()
+ catch error:undef ->
+ Default
+ end.
+
+-spec put(atom(), any()) -> ok.
+%% @doc Store term V at K, replaces an existing term if present.
+put(K, V) ->
+ put(K, V, key_to_module(K)).
+
+put(_K, V, Mod) ->
+ Bin = compile(Mod, V),
+ code:purge(Mod),
+ code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin),
+ ok.
+
+-spec delete(atom()) -> boolean().
+%% @doc Delete term stored at K, no-op if non-existent.
+delete(K) ->
+ delete(K, key_to_module(K)).
+
+delete(_K, Mod) ->
+ code:purge(Mod),
+ code:delete(Mod).
+
+-spec key_to_module(atom()) -> atom().
+key_to_module(K) ->
+ list_to_atom("mochiglobal:" ++ atom_to_list(K)).
+
+-spec compile(atom(), any()) -> binary().
+compile(Module, T) ->
+ {ok, Module, Bin} = compile:forms(forms(Module, T),
+ [verbose, report_errors]),
+ Bin.
+
+-spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
+forms(Module, T) ->
+ [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)].
+
+-spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
+term_to_abstract(Module, Getter, T) ->
+ [%% -module(Module).
+ erl_syntax:attribute(
+ erl_syntax:atom(module),
+ [erl_syntax:atom(Module)]),
+ %% -export([Getter/0]).
+ erl_syntax:attribute(
+ erl_syntax:atom(export),
+ [erl_syntax:list(
+ [erl_syntax:arity_qualifier(
+ erl_syntax:atom(Getter),
+ erl_syntax:integer(0))])]),
+ %% Getter() -> T.
+ erl_syntax:function(
+ erl_syntax:atom(Getter),
+ [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])].
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+get_put_delete_test() ->
+ K = '$$test$$mochiglobal',
+ delete(K),
+ ?assertEqual(
+ bar,
+ get(K, bar)),
+ try
+ ?MODULE:put(K, baz),
+ ?assertEqual(
+ baz,
+ get(K, bar)),
+ ?MODULE:put(K, wibble),
+ ?assertEqual(
+ wibble,
+ ?MODULE:get(K))
+ after
+ delete(K)
+ end,
+ ?assertEqual(
+ bar,
+ get(K, bar)),
+ ?assertEqual(
+ undefined,
+ ?MODULE:get(K)),
+ ok.
+-endif.
diff --git a/src/mochiweb/mochihex.erl b/src/mochiweb/mochihex.erl
index 7fe6899e..44a2aa7f 100644
--- a/src/mochiweb/mochihex.erl
+++ b/src/mochiweb/mochihex.erl
@@ -6,7 +6,7 @@
-module(mochihex).
-author('bob@mochimedia.com').
--export([test/0, to_hex/1, to_bin/1, to_int/1, dehex/1, hexdigit/1]).
+-export([to_hex/1, to_bin/1, to_int/1, dehex/1, hexdigit/1]).
%% @type iolist() = [char() | binary() | iolist()]
%% @type iodata() = iolist() | binary()
@@ -46,16 +46,6 @@ hexdigit(C) when C >= 0, C =< 9 ->
hexdigit(C) when C =< 15 ->
C + $a - 10.
-%% @spec test() -> ok
-%% @doc Test this module.
-test() ->
- "ff000ff1" = to_hex([255, 0, 15, 241]),
- <<255, 0, 15, 241>> = to_bin("ff000ff1"),
- 16#ff000ff1 = to_int("ff000ff1"),
- "ff000ff1" = to_hex(16#ff000ff1),
- ok.
-
-
%% Internal API
to_hex(<<>>, Acc) ->
@@ -73,3 +63,29 @@ to_bin([], Acc) ->
to_bin([C1, C2 | Rest], Acc) ->
to_bin(Rest, [(dehex(C1) bsl 4) bor dehex(C2) | Acc]).
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+to_hex_test() ->
+ "ff000ff1" = to_hex([255, 0, 15, 241]),
+ "ff000ff1" = to_hex(16#ff000ff1),
+ "0" = to_hex(16#0),
+ ok.
+
+to_bin_test() ->
+ <<255, 0, 15, 241>> = to_bin("ff000ff1"),
+ <<255, 0, 10, 161>> = to_bin("Ff000aA1"),
+ ok.
+
+to_int_test() ->
+ 16#ff000ff1 = to_int("ff000ff1"),
+ 16#ff000aa1 = to_int("FF000Aa1"),
+ 16#0 = to_int("0"),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochijson.erl b/src/mochiweb/mochijson.erl
index 74695a75..2e3d1452 100644
--- a/src/mochiweb/mochijson.erl
+++ b/src/mochiweb/mochijson.erl
@@ -8,7 +8,6 @@
-export([decoder/1, decode/1]).
-export([binary_encoder/1, binary_encode/1]).
-export([binary_decoder/1, binary_decode/1]).
--export([test/0]).
% This is a macro to placate syntax highlighters..
-define(Q, $\").
@@ -91,10 +90,6 @@ binary_encode(Any) ->
binary_decode(S) ->
mochijson2:decode(S).
-test() ->
- test_all(),
- mochijson2:test().
-
%% Internal API
parse_encoder_options([], State) ->
@@ -145,7 +140,7 @@ json_encode_proplist([], _State) ->
"{}";
json_encode_proplist(Props, State) ->
F = fun ({K, V}, Acc) ->
- KS = case K of
+ KS = case K of
K when is_atom(K) ->
json_encode_string_utf8(atom_to_list(K));
K when is_integer(K) ->
@@ -321,12 +316,12 @@ tokenize_string([$\\, $u, C3, C2, C1, C0 | Rest], S, Acc) ->
% coalesce UTF-16 surrogate pair?
C = dehex(C0) bor
(dehex(C1) bsl 4) bor
- (dehex(C2) bsl 8) bor
+ (dehex(C2) bsl 8) bor
(dehex(C3) bsl 12),
tokenize_string(Rest, ?ADV_COL(S, 6), [C | Acc]);
tokenize_string([C | Rest], S, Acc) when C >= $\s; C < 16#10FFFF ->
tokenize_string(Rest, ?ADV_COL(S, 1), [C | Acc]).
-
+
tokenize_number(IoList=[C | _], Mode, S=#decoder{input_encoding=utf8}, Acc)
when is_list(C); is_binary(C); C >= 16#7f ->
List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
@@ -407,6 +402,13 @@ tokenize(L=[C | _], S) when C >= $0, C =< $9; C == $- ->
{{const, list_to_float(Float)}, Rest, S1}
end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
%% testing constructs borrowed from the Yaws JSON implementation.
%% Create an object from a list of Key/Value pairs.
@@ -419,7 +421,7 @@ is_obj({struct, Props}) ->
true;
(_) ->
false
- end,
+ end,
lists:all(F, Props).
obj_from_list(Props) ->
@@ -462,11 +464,10 @@ equiv_list([], []) ->
equiv_list([V1 | L1], [V2 | L2]) ->
equiv(V1, V2) andalso equiv_list(L1, L2).
-test_all() ->
- test_issue33(),
+e2j_vec_test() ->
test_one(e2j_test_vec(utf8), 1).
-test_issue33() ->
+issue33_test() ->
%% http://code.google.com/p/mochiweb/issues/detail?id=33
Js = {struct, [{"key", [194, 163]}]},
Encoder = encoder([{input_encoding, utf8}]),
@@ -526,3 +527,5 @@ e2j_test_vec(utf8) ->
{{array, [-123, "foo", obj_from_list([{"bar", {array, []}}]), null]},
"[-123,\"foo\",{\"bar\":[]},null]"}
].
+
+-endif.
diff --git a/src/mochiweb/mochijson2.erl b/src/mochiweb/mochijson2.erl
index 111c37bd..eeb25b5b 100644
--- a/src/mochiweb/mochijson2.erl
+++ b/src/mochiweb/mochijson2.erl
@@ -9,7 +9,6 @@
-author('bob@mochimedia.com').
-export([encoder/1, encode/1]).
-export([decoder/1, decode/1]).
--export([test/0]).
% This is a macro to placate syntax highlighters..
-define(Q, $\").
@@ -39,8 +38,9 @@
%% @type json_number() = integer() | float()
%% @type json_array() = [json_term()]
%% @type json_object() = {struct, [{json_string(), json_term()}]}
+%% @type json_iolist() = {json, iolist()}
%% @type json_term() = json_string() | json_number() | json_array() |
-%% json_object()
+%% json_object() | json_iolist()
-record(encoder, {handler=null,
utf8=false}).
@@ -75,9 +75,6 @@ decoder(Options) ->
decode(S) ->
json_decode(S, #decoder{}).
-test() ->
- test_all().
-
%% Internal API
parse_encoder_options([], State) ->
@@ -108,6 +105,8 @@ json_encode(Array, State) when is_list(Array) ->
json_encode_array(Array, State);
json_encode({struct, Props}, State) when is_list(Props) ->
json_encode_proplist(Props, State);
+json_encode({json, IoList}, _State) ->
+ IoList;
json_encode(Bad, #encoder{handler=null}) ->
exit({json_encode, {bad_term, Bad}});
json_encode(Bad, State=#encoder{handler=Handler}) ->
@@ -202,12 +201,10 @@ json_bin_is_safe(<<C, Rest/binary>>) ->
false;
$\t ->
false;
- C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
+ C when C >= 0, C < $\s; C >= 16#7f ->
false;
C when C < 16#7f ->
- json_bin_is_safe(Rest);
- _ ->
- false
+ json_bin_is_safe(Rest)
end.
json_encode_string_unicode([], _State, Acc) ->
@@ -507,6 +504,12 @@ tokenize(B, S=#decoder{offset=O}) ->
trim = S#decoder.state,
{eof, S}
end.
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
%% testing constructs borrowed from the Yaws JSON implementation.
@@ -516,19 +519,13 @@ obj_new() ->
{struct, []}.
is_obj({struct, Props}) ->
- F = fun ({K, _}) when is_binary(K) ->
- true;
- (_) ->
- false
- end,
+ F = fun ({K, _}) when is_binary(K) -> true end,
lists:all(F, Props).
obj_from_list(Props) ->
Obj = {struct, Props},
- case is_obj(Obj) of
- true -> Obj;
- false -> exit({json_bad_object, Obj})
- end.
+ ?assert(is_obj(Obj)),
+ Obj.
%% Test for equivalence of Erlang terms.
%% Due to arbitrary order of construction, equivalent objects might
@@ -541,9 +538,7 @@ equiv(L1, L2) when is_list(L1), is_list(L2) ->
equiv_list(L1, L2);
equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2;
-equiv(true, true) -> true;
-equiv(false, false) -> true;
-equiv(null, null) -> true.
+equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true.
%% Object representation and traversal order is unknown.
%% Use the sledgehammer and sort property lists.
@@ -563,11 +558,11 @@ equiv_list([], []) ->
equiv_list([V1 | L1], [V2 | L2]) ->
equiv(V1, V2) andalso equiv_list(L1, L2).
-test_all() ->
+decode_test() ->
[1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>),
- <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]),
- test_encoder_utf8(),
- test_input_validation(),
+ <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]).
+
+e2j_vec_test() ->
test_one(e2j_test_vec(utf8), 1).
test_one([], _N) ->
@@ -624,7 +619,7 @@ e2j_test_vec(utf8) ->
].
%% test utf8 encoding
-test_encoder_utf8() ->
+encoder_utf8_test() ->
%% safe conversion case (default)
[34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] =
encode(<<1,"\321\202\320\265\321\201\321\202">>),
@@ -634,11 +629,11 @@ test_encoder_utf8() ->
[34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] =
Enc(<<1,"\321\202\320\265\321\201\321\202">>).
-test_input_validation() ->
+input_validation_test() ->
Good = [
- {16#00A3, <<?Q, 16#C2, 16#A3, ?Q>>}, % pound
- {16#20AC, <<?Q, 16#E2, 16#82, 16#AC, ?Q>>}, % euro
- {16#10196, <<?Q, 16#F0, 16#90, 16#86, 16#96, ?Q>>} % denarius
+ {16#00A3, <<?Q, 16#C2, 16#A3, ?Q>>}, %% pound
+ {16#20AC, <<?Q, 16#E2, 16#82, 16#AC, ?Q>>}, %% euro
+ {16#10196, <<?Q, 16#F0, 16#90, 16#86, 16#96, ?Q>>} %% denarius
],
lists:foreach(fun({CodePoint, UTF8}) ->
Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)),
@@ -646,15 +641,146 @@ test_input_validation() ->
end, Good),
Bad = [
- % 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
+ %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
<<?Q, 16#80, ?Q>>,
- % missing continuations, last byte in each should be 80-BF
+ %% missing continuations, last byte in each should be 80-BF
<<?Q, 16#C2, 16#7F, ?Q>>,
<<?Q, 16#E0, 16#80,16#7F, ?Q>>,
<<?Q, 16#F0, 16#80, 16#80, 16#7F, ?Q>>,
- % we don't support code points > 10FFFF per RFC 3629
+ %% we don't support code points > 10FFFF per RFC 3629
<<?Q, 16#F5, 16#80, 16#80, 16#80, ?Q>>
],
- lists:foreach(fun(X) ->
- ok = try decode(X) catch invalid_utf8 -> ok end
- end, Bad).
+ lists:foreach(
+ fun(X) ->
+ ok = try decode(X) catch invalid_utf8 -> ok end,
+ %% could be {ucs,{bad_utf8_character_code}} or
+ %% {json_encode,{bad_char,_}}
+ {'EXIT', _} = (catch encode(X))
+ end, Bad).
+
+inline_json_test() ->
+ ?assertEqual(<<"\"iodata iodata\"">>,
+ iolist_to_binary(
+ encode({json, [<<"\"iodata">>, " iodata\""]}))),
+ ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]},
+ decode(
+ encode({struct,
+ [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))),
+ ok.
+
+big_unicode_test() ->
+ UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)),
+ ?assertEqual(
+ <<"\"\\ud834\\udd20\"">>,
+ iolist_to_binary(encode(UTF8Seq))),
+ ?assertEqual(
+ UTF8Seq,
+ decode(iolist_to_binary(encode(UTF8Seq)))),
+ ok.
+
+custom_decoder_test() ->
+ ?assertEqual(
+ {struct, [{<<"key">>, <<"value">>}]},
+ (decoder([]))("{\"key\": \"value\"}")),
+ F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end,
+ ?assertEqual(
+ win,
+ (decoder([{object_hook, F}]))("{\"key\": \"value\"}")),
+ ok.
+
+atom_test() ->
+ %% JSON native atoms
+ [begin
+ ?assertEqual(A, decode(atom_to_list(A))),
+ ?assertEqual(iolist_to_binary(atom_to_list(A)),
+ iolist_to_binary(encode(A)))
+ end || A <- [true, false, null]],
+ %% Atom to string
+ ?assertEqual(
+ <<"\"foo\"">>,
+ iolist_to_binary(encode(foo))),
+ ?assertEqual(
+ <<"\"\\ud834\\udd20\"">>,
+ iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))),
+ ok.
+
+key_encode_test() ->
+ %% Some forms are accepted as keys that would not be strings in other
+ %% cases
+ ?assertEqual(
+ <<"{\"foo\":1}">>,
+ iolist_to_binary(encode({struct, [{foo, 1}]}))),
+ ?assertEqual(
+ <<"{\"foo\":1}">>,
+ iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))),
+ ?assertEqual(
+ <<"{\"foo\":1}">>,
+ iolist_to_binary(encode({struct, [{"foo", 1}]}))),
+ ?assertEqual(
+ <<"{\"\\ud834\\udd20\":1}">>,
+ iolist_to_binary(
+ encode({struct, [{[16#0001d120], 1}]}))),
+ ?assertEqual(
+ <<"{\"1\":1}">>,
+ iolist_to_binary(encode({struct, [{1, 1}]}))),
+ ok.
+
+unsafe_chars_test() ->
+ Chars = "\"\\\b\f\n\r\t",
+ [begin
+ ?assertEqual(false, json_string_is_safe([C])),
+ ?assertEqual(false, json_bin_is_safe(<<C>>)),
+ ?assertEqual(<<C>>, decode(encode(<<C>>)))
+ end || C <- Chars],
+ ?assertEqual(
+ false,
+ json_string_is_safe([16#0001d120])),
+ ?assertEqual(
+ false,
+ json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))),
+ ?assertEqual(
+ [16#0001d120],
+ xmerl_ucs:from_utf8(
+ binary_to_list(
+ decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))),
+ ?assertEqual(
+ false,
+ json_string_is_safe([16#110000])),
+ ?assertEqual(
+ false,
+ json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))),
+ %% solidus can be escaped but isn't unsafe by default
+ ?assertEqual(
+ <<"/">>,
+ decode(<<"\"\\/\"">>)),
+ ok.
+
+int_test() ->
+ ?assertEqual(0, decode("0")),
+ ?assertEqual(1, decode("1")),
+ ?assertEqual(11, decode("11")),
+ ok.
+
+large_int_test() ->
+ ?assertEqual(<<"-2147483649214748364921474836492147483649">>,
+ iolist_to_binary(encode(-2147483649214748364921474836492147483649))),
+ ?assertEqual(<<"2147483649214748364921474836492147483649">>,
+ iolist_to_binary(encode(2147483649214748364921474836492147483649))),
+ ok.
+
+float_test() ->
+ ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))),
+ ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))),
+ ok.
+
+handler_test() ->
+ ?assertEqual(
+ {'EXIT',{json_encode,{bad_term,{}}}},
+ catch encode({})),
+ F = fun ({}) -> [] end,
+ ?assertEqual(
+ <<"[]">>,
+ iolist_to_binary((encoder([{handler, F}]))({}))),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochilists.erl b/src/mochiweb/mochilists.erl
new file mode 100644
index 00000000..8981e7b6
--- /dev/null
+++ b/src/mochiweb/mochilists.erl
@@ -0,0 +1,104 @@
+%% @copyright Copyright (c) 2010 Mochi Media, Inc.
+%% @author David Reid <dreid@mochimedia.com>
+
+%% @doc Utility functions for dealing with proplists.
+
+-module(mochilists).
+-author("David Reid <dreid@mochimedia.com>").
+-export([get_value/2, get_value/3, is_defined/2, set_default/2, set_defaults/2]).
+
+%% @spec set_default({Key::term(), Value::term()}, Proplist::list()) -> list()
+%%
+%% @doc Return new Proplist with {Key, Value} set if not is_defined(Key, Proplist).
+set_default({Key, Value}, Proplist) ->
+ case is_defined(Key, Proplist) of
+ true ->
+ Proplist;
+ false ->
+ [{Key, Value} | Proplist]
+ end.
+
+%% @spec set_defaults([{Key::term(), Value::term()}], Proplist::list()) -> list()
+%%
+%% @doc Return new Proplist with {Key, Value} set if not is_defined(Key, Proplist).
+set_defaults(DefaultProps, Proplist) ->
+ lists:foldl(fun set_default/2, Proplist, DefaultProps).
+
+
+%% @spec is_defined(Key::term(), Proplist::list()) -> bool()
+%%
+%% @doc Returns true if Propist contains at least one entry associated
+%% with Key, otherwise false is returned.
+is_defined(Key, Proplist) ->
+ lists:keyfind(Key, 1, Proplist) =/= false.
+
+
+%% @spec get_value(Key::term(), Proplist::list()) -> term() | undefined
+%%
+%% @doc Return the value of <code>Key</code> or undefined
+get_value(Key, Proplist) ->
+ get_value(Key, Proplist, undefined).
+
+%% @spec get_value(Key::term(), Proplist::list(), Default::term()) -> term()
+%%
+%% @doc Return the value of <code>Key</code> or <code>Default</code>
+get_value(_Key, [], Default) ->
+ Default;
+get_value(Key, Proplist, Default) ->
+ case lists:keyfind(Key, 1, Proplist) of
+ false ->
+ Default;
+ {Key, Value} ->
+ Value
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+set_defaults_test() ->
+ ?assertEqual(
+ [{k, v}],
+ set_defaults([{k, v}], [])),
+ ?assertEqual(
+ [{k, v}],
+ set_defaults([{k, vee}], [{k, v}])),
+ ?assertEqual(
+ lists:sort([{kay, vee}, {k, v}]),
+ lists:sort(set_defaults([{k, vee}, {kay, vee}], [{k, v}]))),
+ ok.
+
+set_default_test() ->
+ ?assertEqual(
+ [{k, v}],
+ set_default({k, v}, [])),
+ ?assertEqual(
+ [{k, v}],
+ set_default({k, vee}, [{k, v}])),
+ ok.
+
+get_value_test() ->
+ ?assertEqual(
+ undefined,
+ get_value(foo, [])),
+ ?assertEqual(
+ undefined,
+ get_value(foo, [{bar, baz}])),
+ ?assertEqual(
+ bar,
+ get_value(foo, [{foo, bar}])),
+ ?assertEqual(
+ default,
+ get_value(foo, [], default)),
+ ?assertEqual(
+ default,
+ get_value(foo, [{bar, baz}], default)),
+ ?assertEqual(
+ bar,
+ get_value(foo, [{foo, bar}], default)),
+ ok.
+
+-endif.
+
diff --git a/src/mochiweb/mochilogfile2.erl b/src/mochiweb/mochilogfile2.erl
new file mode 100644
index 00000000..c34ee73a
--- /dev/null
+++ b/src/mochiweb/mochilogfile2.erl
@@ -0,0 +1,140 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc Write newline delimited log files, ensuring that if a truncated
+%% entry is found on log open then it is fixed before writing. Uses
+%% delayed writes and raw files for performance.
+-module(mochilogfile2).
+-author('bob@mochimedia.com').
+
+-export([open/1, write/2, close/1, name/1]).
+
+%% @spec open(Name) -> Handle
+%% @doc Open the log file Name, creating or appending as necessary. All data
+%% at the end of the file will be truncated until a newline is found, to
+%% ensure that all records are complete.
+open(Name) ->
+ {ok, FD} = file:open(Name, [raw, read, write, delayed_write, binary]),
+ fix_log(FD),
+ {?MODULE, Name, FD}.
+
+%% @spec name(Handle) -> string()
+%% @doc Return the path of the log file.
+name({?MODULE, Name, _FD}) ->
+ Name.
+
+%% @spec write(Handle, IoData) -> ok
+%% @doc Write IoData to the log file referenced by Handle.
+write({?MODULE, _Name, FD}, IoData) ->
+ ok = file:write(FD, [IoData, $\n]),
+ ok.
+
+%% @spec close(Handle) -> ok
+%% @doc Close the log file referenced by Handle.
+close({?MODULE, _Name, FD}) ->
+ ok = file:sync(FD),
+ ok = file:close(FD),
+ ok.
+
+fix_log(FD) ->
+ {ok, Location} = file:position(FD, eof),
+ Seek = find_last_newline(FD, Location),
+ {ok, Seek} = file:position(FD, Seek),
+ ok = file:truncate(FD),
+ ok.
+
+%% Seek backwards to the last valid log entry
+find_last_newline(_FD, N) when N =< 1 ->
+ 0;
+find_last_newline(FD, Location) ->
+ case file:pread(FD, Location - 1, 1) of
+ {ok, <<$\n>>} ->
+ Location;
+ {ok, _} ->
+ find_last_newline(FD, Location - 1)
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+name_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "open_close_test.log"),
+ H = open(FileName),
+ ?assertEqual(
+ FileName,
+ name(H)),
+ close(H),
+ file:delete(FileName),
+ file:del_dir(D),
+ ok.
+
+open_close_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "open_close_test.log"),
+ OpenClose = fun () ->
+ H = open(FileName),
+ ?assertEqual(
+ true,
+ filelib:is_file(FileName)),
+ ok = close(H),
+ ?assertEqual(
+ {ok, <<>>},
+ file:read_file(FileName)),
+ ok
+ end,
+ OpenClose(),
+ OpenClose(),
+ file:delete(FileName),
+ file:del_dir(D),
+ ok.
+
+write_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "write_test.log"),
+ F = fun () ->
+ H = open(FileName),
+ write(H, "test line"),
+ close(H),
+ ok
+ end,
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\n">>},
+ file:read_file(FileName)),
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\ntest line\n">>},
+ file:read_file(FileName)),
+ file:delete(FileName),
+ file:del_dir(D),
+ ok.
+
+fix_log_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "write_test.log"),
+ file:write_file(FileName, <<"first line good\nsecond line bad">>),
+ F = fun () ->
+ H = open(FileName),
+ write(H, "test line"),
+ close(H),
+ ok
+ end,
+ F(),
+ ?assertEqual(
+ {ok, <<"first line good\ntest line\n">>},
+ file:read_file(FileName)),
+ file:write_file(FileName, <<"first line bad">>),
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\n">>},
+ file:read_file(FileName)),
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\ntest line\n">>},
+ file:read_file(FileName)),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochinum.erl b/src/mochiweb/mochinum.erl
index 6a866042..a7e2bfbc 100644
--- a/src/mochiweb/mochinum.erl
+++ b/src/mochiweb/mochinum.erl
@@ -11,7 +11,7 @@
-module(mochinum).
-author("Bob Ippolito <bob@mochimedia.com>").
--export([digits/1, frexp/1, int_pow/2, int_ceil/1, test/0]).
+-export([digits/1, frexp/1, int_pow/2, int_ceil/1]).
%% IEEE 754 Float exponent bias
-define(FLOAT_BIAS, 1022).
@@ -120,7 +120,7 @@ digits1(Float, Exp, Frac) ->
case Exp >= 0 of
true ->
BExp = 1 bsl Exp,
- case (Frac /= ?BIG_POW) of
+ case (Frac =/= ?BIG_POW) of
true ->
scale((Frac * BExp * 2), 2, BExp, BExp,
Round, Round, Float);
@@ -129,7 +129,7 @@ digits1(Float, Exp, Frac) ->
Round, Round, Float)
end;
false ->
- case (Exp == ?MIN_EXP) orelse (Frac /= ?BIG_POW) of
+ case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of
true ->
scale((Frac * 2), 1 bsl (1 - Exp), 1, 1,
Round, Round, Float);
@@ -228,14 +228,13 @@ log2floor(Int, N) ->
log2floor(Int bsr 1, 1 + N).
-test() ->
- ok = test_frexp(),
- ok = test_int_ceil(),
- ok = test_int_pow(),
- ok = test_digits(),
- ok.
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
-test_int_ceil() ->
+int_ceil_test() ->
1 = int_ceil(0.0001),
0 = int_ceil(0.0),
1 = int_ceil(0.99),
@@ -244,7 +243,7 @@ test_int_ceil() ->
-2 = int_ceil(-2.0),
ok.
-test_int_pow() ->
+int_pow_test() ->
1 = int_pow(1, 1),
1 = int_pow(1, 0),
1 = int_pow(10, 0),
@@ -253,17 +252,58 @@ test_int_pow() ->
1000 = int_pow(10, 3),
ok.
-test_digits() ->
- "0" = digits(0),
- "0.0" = digits(0.0),
- "1.0" = digits(1.0),
- "-1.0" = digits(-1.0),
- "0.1" = digits(0.1),
- "0.01" = digits(0.01),
- "0.001" = digits(0.001),
+digits_test() ->
+ ?assertEqual("0",
+ digits(0)),
+ ?assertEqual("0.0",
+ digits(0.0)),
+ ?assertEqual("1.0",
+ digits(1.0)),
+ ?assertEqual("-1.0",
+ digits(-1.0)),
+ ?assertEqual("0.1",
+ digits(0.1)),
+ ?assertEqual("0.01",
+ digits(0.01)),
+ ?assertEqual("0.001",
+ digits(0.001)),
+ ?assertEqual("1.0e+6",
+ digits(1000000.0)),
+ ?assertEqual("0.5",
+ digits(0.5)),
+ ?assertEqual("4503599627370496.0",
+ digits(4503599627370496.0)),
+ %% small denormalized number
+ %% 4.94065645841246544177e-324
+ <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
+ ?assertEqual("4.9406564584124654e-324",
+ digits(SmallDenorm)),
+ ?assertEqual(SmallDenorm,
+ list_to_float(digits(SmallDenorm))),
+ %% large denormalized number
+ %% 2.22507385850720088902e-308
+ <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
+ ?assertEqual("2.225073858507201e-308",
+ digits(BigDenorm)),
+ ?assertEqual(BigDenorm,
+ list_to_float(digits(BigDenorm))),
+ %% small normalized number
+ %% 2.22507385850720138309e-308
+ <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
+ ?assertEqual("2.2250738585072014e-308",
+ digits(SmallNorm)),
+ ?assertEqual(SmallNorm,
+ list_to_float(digits(SmallNorm))),
+ %% large normalized number
+ %% 1.79769313486231570815e+308
+ <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
+ ?assertEqual("1.7976931348623157e+308",
+ digits(LargeNorm)),
+ ?assertEqual(LargeNorm,
+ list_to_float(digits(LargeNorm))),
ok.
-test_frexp() ->
+frexp_test() ->
%% zero
{0.0, 0} = frexp(0.0),
%% one
@@ -287,3 +327,5 @@ test_frexp() ->
<<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
{0.99999999999999989, 1024} = frexp(LargeNorm),
ok.
+
+-endif.
diff --git a/src/mochiweb/mochitemp.erl b/src/mochiweb/mochitemp.erl
new file mode 100644
index 00000000..bb23d2a6
--- /dev/null
+++ b/src/mochiweb/mochitemp.erl
@@ -0,0 +1,310 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc Create temporary files and directories. Requires crypto to be started.
+
+-module(mochitemp).
+-export([gettempdir/0]).
+-export([mkdtemp/0, mkdtemp/3]).
+-export([rmtempdir/1]).
+%% -export([mkstemp/4]).
+-define(SAFE_CHARS, {$a, $b, $c, $d, $e, $f, $g, $h, $i, $j, $k, $l, $m,
+ $n, $o, $p, $q, $r, $s, $t, $u, $v, $w, $x, $y, $z,
+ $A, $B, $C, $D, $E, $F, $G, $H, $I, $J, $K, $L, $M,
+ $N, $O, $P, $Q, $R, $S, $T, $U, $V, $W, $X, $Y, $Z,
+ $0, $1, $2, $3, $4, $5, $6, $7, $8, $9, $_}).
+-define(TMP_MAX, 10000).
+
+-include_lib("kernel/include/file.hrl").
+
+%% TODO: An ugly wrapper over the mktemp tool with open_port and sadness?
+%% We can't implement this race-free in Erlang without the ability
+%% to issue O_CREAT|O_EXCL. I suppose we could hack something with
+%% mkdtemp, del_dir, open.
+%% mkstemp(Suffix, Prefix, Dir, Options) ->
+%% ok.
+
+rmtempdir(Dir) ->
+ case file:del_dir(Dir) of
+ {error, eexist} ->
+ ok = rmtempdirfiles(Dir),
+ ok = file:del_dir(Dir);
+ ok ->
+ ok
+ end.
+
+rmtempdirfiles(Dir) ->
+ {ok, Files} = file:list_dir(Dir),
+ ok = rmtempdirfiles(Dir, Files).
+
+rmtempdirfiles(_Dir, []) ->
+ ok;
+rmtempdirfiles(Dir, [Basename | Rest]) ->
+ Path = filename:join([Dir, Basename]),
+ case filelib:is_dir(Path) of
+ true ->
+ ok = rmtempdir(Path);
+ false ->
+ ok = file:delete(Path)
+ end,
+ rmtempdirfiles(Dir, Rest).
+
+mkdtemp() ->
+ mkdtemp("", "tmp", gettempdir()).
+
+mkdtemp(Suffix, Prefix, Dir) ->
+ mkdtemp_n(rngpath_fun(Suffix, Prefix, Dir), ?TMP_MAX).
+
+
+
+mkdtemp_n(RngPath, 1) ->
+ make_dir(RngPath());
+mkdtemp_n(RngPath, N) ->
+ try make_dir(RngPath())
+ catch throw:{error, eexist} ->
+ mkdtemp_n(RngPath, N - 1)
+ end.
+
+make_dir(Path) ->
+ case file:make_dir(Path) of
+ ok ->
+ ok;
+ E={error, eexist} ->
+ throw(E)
+ end,
+ %% Small window for a race condition here because dir is created 777
+ ok = file:write_file_info(Path, #file_info{mode=8#0700}),
+ Path.
+
+rngpath_fun(Prefix, Suffix, Dir) ->
+ fun () ->
+ filename:join([Dir, Prefix ++ rngchars(6) ++ Suffix])
+ end.
+
+rngchars(0) ->
+ "";
+rngchars(N) ->
+ [rngchar() | rngchars(N - 1)].
+
+rngchar() ->
+ rngchar(crypto:rand_uniform(0, tuple_size(?SAFE_CHARS))).
+
+rngchar(C) ->
+ element(1 + C, ?SAFE_CHARS).
+
+%% @spec gettempdir() -> string()
+%% @doc Get a usable temporary directory using the first of these that is a directory:
+%% $TMPDIR, $TMP, $TEMP, "/tmp", "/var/tmp", "/usr/tmp", ".".
+gettempdir() ->
+ gettempdir(gettempdir_checks(), fun normalize_dir/1).
+
+gettempdir_checks() ->
+ [{fun os:getenv/1, ["TMPDIR", "TMP", "TEMP"]},
+ {fun gettempdir_identity/1, ["/tmp", "/var/tmp", "/usr/tmp"]},
+ {fun gettempdir_cwd/1, [cwd]}].
+
+gettempdir_identity(L) ->
+ L.
+
+gettempdir_cwd(cwd) ->
+ {ok, L} = file:get_cwd(),
+ L.
+
+gettempdir([{_F, []} | RestF], Normalize) ->
+ gettempdir(RestF, Normalize);
+gettempdir([{F, [L | RestL]} | RestF], Normalize) ->
+ case Normalize(F(L)) of
+ false ->
+ gettempdir([{F, RestL} | RestF], Normalize);
+ Dir ->
+ Dir
+ end.
+
+normalize_dir(False) when False =:= false orelse False =:= "" ->
+ %% Erlang doesn't have an unsetenv, wtf.
+ false;
+normalize_dir(L) ->
+ Dir = filename:absname(L),
+ case filelib:is_dir(Dir) of
+ false ->
+ false;
+ true ->
+ Dir
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+pushenv(L) ->
+ [{K, os:getenv(K)} || K <- L].
+popenv(L) ->
+ F = fun ({K, false}) ->
+ %% Erlang doesn't have an unsetenv, wtf.
+ os:putenv(K, "");
+ ({K, V}) ->
+ os:putenv(K, V)
+ end,
+ lists:foreach(F, L).
+
+gettempdir_fallback_test() ->
+ ?assertEqual(
+ "/",
+ gettempdir([{fun gettempdir_identity/1, ["/--not-here--/"]},
+ {fun gettempdir_identity/1, ["/"]}],
+ fun normalize_dir/1)),
+ ?assertEqual(
+ "/",
+ %% simulate a true os:getenv unset env
+ gettempdir([{fun gettempdir_identity/1, [false]},
+ {fun gettempdir_identity/1, ["/"]}],
+ fun normalize_dir/1)),
+ ok.
+
+gettempdir_identity_test() ->
+ ?assertEqual(
+ "/",
+ gettempdir([{fun gettempdir_identity/1, ["/"]}], fun normalize_dir/1)),
+ ok.
+
+gettempdir_cwd_test() ->
+ {ok, Cwd} = file:get_cwd(),
+ ?assertEqual(
+ normalize_dir(Cwd),
+ gettempdir([{fun gettempdir_cwd/1, [cwd]}], fun normalize_dir/1)),
+ ok.
+
+rngchars_test() ->
+ crypto:start(),
+ ?assertEqual(
+ "",
+ rngchars(0)),
+ ?assertEqual(
+ 10,
+ length(rngchars(10))),
+ ok.
+
+rngchar_test() ->
+ ?assertEqual(
+ $a,
+ rngchar(0)),
+ ?assertEqual(
+ $A,
+ rngchar(26)),
+ ?assertEqual(
+ $_,
+ rngchar(62)),
+ ok.
+
+mkdtemp_n_failonce_test() ->
+ crypto:start(),
+ D = mkdtemp(),
+ Path = filename:join([D, "testdir"]),
+ %% Toggle the existence of a dir so that it fails
+ %% the first time and succeeds the second.
+ F = fun () ->
+ case filelib:is_dir(Path) of
+ true ->
+ file:del_dir(Path);
+ false ->
+ file:make_dir(Path)
+ end,
+ Path
+ end,
+ try
+ %% Fails the first time
+ ?assertThrow(
+ {error, eexist},
+ mkdtemp_n(F, 1)),
+ %% Reset state
+ file:del_dir(Path),
+ %% Succeeds the second time
+ ?assertEqual(
+ Path,
+ mkdtemp_n(F, 2))
+ after rmtempdir(D)
+ end,
+ ok.
+
+mkdtemp_n_fail_test() ->
+ {ok, Cwd} = file:get_cwd(),
+ ?assertThrow(
+ {error, eexist},
+ mkdtemp_n(fun () -> Cwd end, 1)),
+ ?assertThrow(
+ {error, eexist},
+ mkdtemp_n(fun () -> Cwd end, 2)),
+ ok.
+
+make_dir_fail_test() ->
+ {ok, Cwd} = file:get_cwd(),
+ ?assertThrow(
+ {error, eexist},
+ make_dir(Cwd)),
+ ok.
+
+mkdtemp_test() ->
+ crypto:start(),
+ D = mkdtemp(),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D)),
+ ?assertEqual(
+ ok,
+ file:del_dir(D)),
+ ok.
+
+rmtempdir_test() ->
+ crypto:start(),
+ D1 = mkdtemp(),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D1)),
+ ?assertEqual(
+ ok,
+ rmtempdir(D1)),
+ D2 = mkdtemp(),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D2)),
+ ok = file:write_file(filename:join([D2, "foo"]), <<"bytes">>),
+ D3 = mkdtemp("suffix", "prefix", D2),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D3)),
+ ok = file:write_file(filename:join([D3, "foo"]), <<"bytes">>),
+ ?assertEqual(
+ ok,
+ rmtempdir(D2)),
+ ?assertEqual(
+ {error, enoent},
+ file:consult(D3)),
+ ?assertEqual(
+ {error, enoent},
+ file:consult(D2)),
+ ok.
+
+gettempdir_env_test() ->
+ Env = pushenv(["TMPDIR", "TEMP", "TMP"]),
+ FalseEnv = [{"TMPDIR", false}, {"TEMP", false}, {"TMP", false}],
+ try
+ popenv(FalseEnv),
+ popenv([{"TMPDIR", "/"}]),
+ ?assertEqual(
+ "/",
+ os:getenv("TMPDIR")),
+ ?assertEqual(
+ "/",
+ gettempdir()),
+ {ok, Cwd} = file:get_cwd(),
+ popenv(FalseEnv),
+ popenv([{"TMP", Cwd}]),
+ ?assertEqual(
+ normalize_dir(Cwd),
+ gettempdir())
+ after popenv(Env)
+ end,
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiutf8.erl b/src/mochiweb/mochiutf8.erl
new file mode 100644
index 00000000..206e1186
--- /dev/null
+++ b/src/mochiweb/mochiutf8.erl
@@ -0,0 +1,316 @@
+%% @copyright 2010 Mochi Media, Inc.
+%% @author Bob Ippolito <bob@mochimedia.com>
+
+%% @doc Algorithm to convert any binary to a valid UTF-8 sequence by ignoring
+%% invalid bytes.
+
+-module(mochiutf8).
+-export([valid_utf8_bytes/1, codepoint_to_bytes/1, bytes_to_codepoints/1]).
+-export([bytes_foldl/3, codepoint_foldl/3, read_codepoint/1, len/1]).
+
+%% External API
+
+-type unichar_low() :: 0..16#d7ff.
+-type unichar_high() :: 16#e000..16#10ffff.
+-type unichar() :: unichar_low() | unichar_high().
+
+-spec codepoint_to_bytes(unichar()) -> binary().
+%% @doc Convert a unicode codepoint to UTF-8 bytes.
+codepoint_to_bytes(C) when (C >= 16#00 andalso C =< 16#7f) ->
+ %% U+0000 - U+007F - 7 bits
+ <<C>>;
+codepoint_to_bytes(C) when (C >= 16#080 andalso C =< 16#07FF) ->
+ %% U+0080 - U+07FF - 11 bits
+ <<0:5, B1:5, B0:6>> = <<C:16>>,
+ <<2#110:3, B1:5,
+ 2#10:2, B0:6>>;
+codepoint_to_bytes(C) when (C >= 16#0800 andalso C =< 16#FFFF) andalso
+ (C < 16#D800 orelse C > 16#DFFF) ->
+ %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
+ <<B2:4, B1:6, B0:6>> = <<C:16>>,
+ <<2#1110:4, B2:4,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6>>;
+codepoint_to_bytes(C) when (C >= 16#010000 andalso C =< 16#10FFFF) ->
+ %% U+10000 - U+10FFFF - 21 bits
+ <<0:3, B3:3, B2:6, B1:6, B0:6>> = <<C:24>>,
+ <<2#11110:5, B3:3,
+ 2#10:2, B2:6,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6>>.
+
+-spec codepoints_to_bytes([unichar()]) -> binary().
+%% @doc Convert a list of codepoints to a UTF-8 binary.
+codepoints_to_bytes(L) ->
+ <<<<(codepoint_to_bytes(C))/binary>> || C <- L>>.
+
+-spec read_codepoint(binary()) -> {unichar(), binary(), binary()}.
+read_codepoint(Bin = <<2#0:1, C:7, Rest/binary>>) ->
+ %% U+0000 - U+007F - 7 bits
+ <<B:1/binary, _/binary>> = Bin,
+ {C, B, Rest};
+read_codepoint(Bin = <<2#110:3, B1:5,
+ 2#10:2, B0:6,
+ Rest/binary>>) ->
+ %% U+0080 - U+07FF - 11 bits
+ case <<B1:5, B0:6>> of
+ <<C:11>> when C >= 16#80 ->
+ <<B:2/binary, _/binary>> = Bin,
+ {C, B, Rest}
+ end;
+read_codepoint(Bin = <<2#1110:4, B2:4,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6,
+ Rest/binary>>) ->
+ %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
+ case <<B2:4, B1:6, B0:6>> of
+ <<C:16>> when (C >= 16#0800 andalso C =< 16#FFFF) andalso
+ (C < 16#D800 orelse C > 16#DFFF) ->
+ <<B:3/binary, _/binary>> = Bin,
+ {C, B, Rest}
+ end;
+read_codepoint(Bin = <<2#11110:5, B3:3,
+ 2#10:2, B2:6,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6,
+ Rest/binary>>) ->
+ %% U+10000 - U+10FFFF - 21 bits
+ case <<B3:3, B2:6, B1:6, B0:6>> of
+ <<C:21>> when (C >= 16#010000 andalso C =< 16#10FFFF) ->
+ <<B:4/binary, _/binary>> = Bin,
+ {C, B, Rest}
+ end.
+
+-spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _.
+codepoint_foldl(F, Acc, <<>>) when is_function(F, 2) ->
+ Acc;
+codepoint_foldl(F, Acc, Bin) ->
+ {C, _, Rest} = read_codepoint(Bin),
+ codepoint_foldl(F, F(C, Acc), Rest).
+
+-spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _.
+bytes_foldl(F, Acc, <<>>) when is_function(F, 2) ->
+ Acc;
+bytes_foldl(F, Acc, Bin) ->
+ {_, B, Rest} = read_codepoint(Bin),
+ bytes_foldl(F, F(B, Acc), Rest).
+
+-spec bytes_to_codepoints(binary()) -> [unichar()].
+bytes_to_codepoints(B) ->
+ lists:reverse(codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], B)).
+
+-spec len(binary()) -> non_neg_integer().
+len(<<>>) ->
+ 0;
+len(B) ->
+ {_, _, Rest} = read_codepoint(B),
+ 1 + len(Rest).
+
+-spec valid_utf8_bytes(B::binary()) -> binary().
+%% @doc Return only the bytes in B that represent valid UTF-8. Uses
+%% the following recursive algorithm: skip one byte if B does not
+%% follow UTF-8 syntax (a 1-4 byte encoding of some number),
+%% skip sequence of 2-4 bytes if it represents an overlong encoding
+%% or bad code point (surrogate U+D800 - U+DFFF or > U+10FFFF).
+valid_utf8_bytes(B) when is_binary(B) ->
+ binary_skip_bytes(B, invalid_utf8_indexes(B)).
+
+%% Internal API
+
+-spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary().
+%% @doc Return B, but skipping the 0-based indexes in L.
+binary_skip_bytes(B, []) ->
+ B;
+binary_skip_bytes(B, L) ->
+ binary_skip_bytes(B, L, 0, []).
+
+%% @private
+-spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary().
+binary_skip_bytes(B, [], _N, Acc) ->
+ iolist_to_binary(lists:reverse([B | Acc]));
+binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) ->
+ binary_skip_bytes(RestB, RestL, 1 + N, Acc);
+binary_skip_bytes(<<C, RestB/binary>>, L, N, Acc) ->
+ binary_skip_bytes(RestB, L, 1 + N, [C | Acc]).
+
+-spec invalid_utf8_indexes(binary()) -> [non_neg_integer()].
+%% @doc Return the 0-based indexes in B that are not valid UTF-8.
+invalid_utf8_indexes(B) ->
+ invalid_utf8_indexes(B, 0, []).
+
+%% @private.
+-spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()].
+invalid_utf8_indexes(<<C, Rest/binary>>, N, Acc) when C < 16#80 ->
+ %% U+0000 - U+007F - 7 bits
+ invalid_utf8_indexes(Rest, 1 + N, Acc);
+invalid_utf8_indexes(<<C1, C2, Rest/binary>>, N, Acc)
+ when C1 band 16#E0 =:= 16#C0,
+ C2 band 16#C0 =:= 16#80 ->
+ %% U+0080 - U+07FF - 11 bits
+ case ((C1 band 16#1F) bsl 6) bor (C2 band 16#3F) of
+ C when C < 16#80 ->
+ %% Overlong encoding.
+ invalid_utf8_indexes(Rest, 2 + N, [1 + N, N | Acc]);
+ _ ->
+ %% Upper bound U+07FF does not need to be checked
+ invalid_utf8_indexes(Rest, 2 + N, Acc)
+ end;
+invalid_utf8_indexes(<<C1, C2, C3, Rest/binary>>, N, Acc)
+ when C1 band 16#F0 =:= 16#E0,
+ C2 band 16#C0 =:= 16#80,
+ C3 band 16#C0 =:= 16#80 ->
+ %% U+0800 - U+FFFF - 16 bits
+ case ((((C1 band 16#0F) bsl 6) bor (C2 band 16#3F)) bsl 6) bor
+ (C3 band 16#3F) of
+ C when (C < 16#800) orelse (C >= 16#D800 andalso C =< 16#DFFF) ->
+ %% Overlong encoding or surrogate.
+ invalid_utf8_indexes(Rest, 3 + N, [2 + N, 1 + N, N | Acc]);
+ _ ->
+ %% Upper bound U+FFFF does not need to be checked
+ invalid_utf8_indexes(Rest, 3 + N, Acc)
+ end;
+invalid_utf8_indexes(<<C1, C2, C3, C4, Rest/binary>>, N, Acc)
+ when C1 band 16#F8 =:= 16#F0,
+ C2 band 16#C0 =:= 16#80,
+ C3 band 16#C0 =:= 16#80,
+ C4 band 16#C0 =:= 16#80 ->
+ %% U+10000 - U+10FFFF - 21 bits
+ case ((((((C1 band 16#0F) bsl 6) bor (C2 band 16#3F)) bsl 6) bor
+ (C3 band 16#3F)) bsl 6) bor (C4 band 16#3F) of
+ C when (C < 16#10000) orelse (C > 16#10FFFF) ->
+ %% Overlong encoding or invalid code point.
+ invalid_utf8_indexes(Rest, 4 + N, [3 + N, 2 + N, 1 + N, N | Acc]);
+ _ ->
+ invalid_utf8_indexes(Rest, 4 + N, Acc)
+ end;
+invalid_utf8_indexes(<<_, Rest/binary>>, N, Acc) ->
+ %% Invalid char
+ invalid_utf8_indexes(Rest, 1 + N, [N | Acc]);
+invalid_utf8_indexes(<<>>, _N, Acc) ->
+ lists:reverse(Acc).
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+binary_skip_bytes_test() ->
+ ?assertEqual(<<"foo">>,
+ binary_skip_bytes(<<"foo">>, [])),
+ ?assertEqual(<<"foobar">>,
+ binary_skip_bytes(<<"foo bar">>, [3])),
+ ?assertEqual(<<"foo">>,
+ binary_skip_bytes(<<"foo bar">>, [3, 4, 5, 6])),
+ ?assertEqual(<<"oo bar">>,
+ binary_skip_bytes(<<"foo bar">>, [0])),
+ ok.
+
+invalid_utf8_indexes_test() ->
+ ?assertEqual(
+ [],
+ invalid_utf8_indexes(<<"unicode snowman for you: ", 226, 152, 131>>)),
+ ?assertEqual(
+ [0],
+ invalid_utf8_indexes(<<128>>)),
+ ?assertEqual(
+ [57,59,60,64,66,67],
+ invalid_utf8_indexes(<<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (",
+ 167, 65, 170, 186, 73, 83, 80, 166, 87, 186, 217, 41, 41>>)),
+ ok.
+
+codepoint_to_bytes_test() ->
+ %% U+0000 - U+007F - 7 bits
+ %% U+0080 - U+07FF - 11 bits
+ %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
+ %% U+10000 - U+10FFFF - 21 bits
+ ?assertEqual(
+ <<"a">>,
+ codepoint_to_bytes($a)),
+ ?assertEqual(
+ <<16#c2, 16#80>>,
+ codepoint_to_bytes(16#80)),
+ ?assertEqual(
+ <<16#df, 16#bf>>,
+ codepoint_to_bytes(16#07ff)),
+ ?assertEqual(
+ <<16#ef, 16#bf, 16#bf>>,
+ codepoint_to_bytes(16#ffff)),
+ ?assertEqual(
+ <<16#f4, 16#8f, 16#bf, 16#bf>>,
+ codepoint_to_bytes(16#10ffff)),
+ ok.
+
+bytes_foldl_test() ->
+ ?assertEqual(
+ <<"abc">>,
+ bytes_foldl(fun (B, Acc) -> <<Acc/binary, B/binary>> end, <<>>, <<"abc">>)),
+ ?assertEqual(
+ <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>,
+ bytes_foldl(fun (B, Acc) -> <<Acc/binary, B/binary>> end, <<>>,
+ <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
+ ok.
+
+bytes_to_codepoints_test() ->
+ ?assertEqual(
+ "abc" ++ [16#2603, 16#4e2d, 16#85, 16#10ffff],
+ bytes_to_codepoints(<<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
+ ok.
+
+codepoint_foldl_test() ->
+ ?assertEqual(
+ "cba",
+ codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], <<"abc">>)),
+ ?assertEqual(
+ [16#10ffff, 16#85, 16#4e2d, 16#2603 | "cba"],
+ codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [],
+ <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
+ ok.
+
+len_test() ->
+ ?assertEqual(
+ 29,
+ len(<<"unicode snowman for you: ", 226, 152, 131, 228, 184, 173, 194, 133, 244, 143, 191, 191>>)),
+ ok.
+
+codepoints_to_bytes_test() ->
+ ?assertEqual(
+ iolist_to_binary(lists:map(fun codepoint_to_bytes/1, lists:seq(1, 1000))),
+ codepoints_to_bytes(lists:seq(1, 1000))),
+ ok.
+
+valid_utf8_bytes_test() ->
+ ?assertEqual(
+ <<"invalid U+11ffff: ">>,
+ valid_utf8_bytes(<<"invalid U+11ffff: ", 244, 159, 191, 191>>)),
+ ?assertEqual(
+ <<"U+10ffff: ", 244, 143, 191, 191>>,
+ valid_utf8_bytes(<<"U+10ffff: ", 244, 143, 191, 191>>)),
+ ?assertEqual(
+ <<"overlong 2-byte encoding (a): ">>,
+ valid_utf8_bytes(<<"overlong 2-byte encoding (a): ", 2#11000001, 2#10100001>>)),
+ ?assertEqual(
+ <<"overlong 2-byte encoding (!): ">>,
+ valid_utf8_bytes(<<"overlong 2-byte encoding (!): ", 2#11000000, 2#10100001>>)),
+ ?assertEqual(
+ <<"mu: ", 194, 181>>,
+ valid_utf8_bytes(<<"mu: ", 194, 181>>)),
+ ?assertEqual(
+ <<"bad coding bytes: ">>,
+ valid_utf8_bytes(<<"bad coding bytes: ", 2#10011111, 2#10111111, 2#11111111>>)),
+ ?assertEqual(
+ <<"low surrogate (unpaired): ">>,
+ valid_utf8_bytes(<<"low surrogate (unpaired): ", 237, 176, 128>>)),
+ ?assertEqual(
+ <<"high surrogate (unpaired): ">>,
+ valid_utf8_bytes(<<"high surrogate (unpaired): ", 237, 191, 191>>)),
+ ?assertEqual(
+ <<"unicode snowman for you: ", 226, 152, 131>>,
+ valid_utf8_bytes(<<"unicode snowman for you: ", 226, 152, 131>>)),
+ ?assertEqual(
+ <<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (AISPW))">>,
+ valid_utf8_bytes(<<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (",
+ 167, 65, 170, 186, 73, 83, 80, 166, 87, 186, 217, 41, 41>>)),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb.app.in b/src/mochiweb/mochiweb.app.in
index b0f90144..c6a2630b 100644
--- a/src/mochiweb/mochiweb.app.in
+++ b/src/mochiweb/mochiweb.app.in
@@ -1,6 +1,6 @@
{application, mochiweb,
[{description, "MochiMedia Web Server"},
- {vsn, "113"},
+ {vsn, "7c2bc2"},
{modules, [
mochihex,
mochijson,
diff --git a/src/mochiweb/mochiweb.app.src b/src/mochiweb/mochiweb.app.src
new file mode 100644
index 00000000..a1c95aae
--- /dev/null
+++ b/src/mochiweb/mochiweb.app.src
@@ -0,0 +1,9 @@
+%% This is generated from src/mochiweb.app.src
+{application, mochiweb,
+ [{description, "MochiMedia Web Server"},
+ {vsn, "7c2bc2"},
+ {modules, []},
+ {registered, []},
+ {mod, {mochiweb_app, []}},
+ {env, []},
+ {applications, [kernel, stdlib, crypto, inets]}]}.
diff --git a/src/mochiweb/mochiweb.erl b/src/mochiweb/mochiweb.erl
index 0f4d52a6..3118028b 100644
--- a/src/mochiweb/mochiweb.erl
+++ b/src/mochiweb/mochiweb.erl
@@ -9,7 +9,6 @@
-export([start/0, stop/0]).
-export([new_request/1, new_response/1]).
-export([all_loaded/0, all_loaded/1, reload/0]).
--export([test/0]).
%% @spec start() -> ok
%% @doc Start the MochiWeb server.
@@ -24,21 +23,6 @@ stop() ->
application:stop(crypto),
Res.
-%% @spec test() -> ok
-%% @doc Run all of the tests for MochiWeb.
-test() ->
- mochiweb_util:test(),
- mochiweb_headers:test(),
- mochiweb_cookies:test(),
- mochihex:test(),
- mochinum:test(),
- mochijson:test(),
- mochiweb_charref:test(),
- mochiweb_html:test(),
- mochifmt:test(),
- test_request(),
- ok.
-
reload() ->
[c:l(Module) || Module <- all_loaded()].
@@ -96,11 +80,6 @@ new_response({Request, Code, Headers}) ->
%% Internal API
-test_request() ->
- R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []),
- "/foo/bar/baz wibble quux" = R:get(path),
- ok.
-
ensure_started(App) ->
case application:start(App) of
ok ->
@@ -108,3 +87,203 @@ ensure_started(App) ->
{error, {already_started, App}} ->
ok
end.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+-record(treq, {path, body= <<>>, xreply= <<>>}).
+
+ssl_cert_opts() ->
+ EbinDir = filename:dirname(code:which(?MODULE)),
+ CertDir = filename:join([EbinDir, "..", "support", "test-materials"]),
+ CertFile = filename:join(CertDir, "test_ssl_cert.pem"),
+ KeyFile = filename:join(CertDir, "test_ssl_key.pem"),
+ [{certfile, CertFile}, {keyfile, KeyFile}].
+
+with_server(Transport, ServerFun, ClientFun) ->
+ ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}],
+ ServerOpts = case Transport of
+ plain ->
+ ServerOpts0;
+ ssl ->
+ ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}]
+ end,
+ {ok, Server} = mochiweb_http:start(ServerOpts),
+ Port = mochiweb_socket_server:get(Server, port),
+ Res = (catch ClientFun(Transport, Port)),
+ mochiweb_http:stop(Server),
+ Res.
+
+request_test() ->
+ R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []),
+ "/foo/bar/baz wibble quux" = R:get(path),
+ ok.
+
+single_http_GET_test() ->
+ do_GET(plain, 1).
+
+single_https_GET_test() ->
+ do_GET(ssl, 1).
+
+multiple_http_GET_test() ->
+ do_GET(plain, 3).
+
+multiple_https_GET_test() ->
+ do_GET(ssl, 3).
+
+hundred_http_GET_test() ->
+ do_GET(plain, 100).
+
+hundred_https_GET_test() ->
+ do_GET(ssl, 100).
+
+single_128_http_POST_test() ->
+ do_POST(plain, 128, 1).
+
+single_128_https_POST_test() ->
+ do_POST(ssl, 128, 1).
+
+single_2k_http_POST_test() ->
+ do_POST(plain, 2048, 1).
+
+single_2k_https_POST_test() ->
+ do_POST(ssl, 2048, 1).
+
+single_100k_http_POST_test() ->
+ do_POST(plain, 102400, 1).
+
+single_100k_https_POST_test() ->
+ do_POST(ssl, 102400, 1).
+
+multiple_100k_http_POST_test() ->
+ do_POST(plain, 102400, 3).
+
+multiple_100K_https_POST_test() ->
+ do_POST(ssl, 102400, 3).
+
+hundred_128_http_POST_test() ->
+ do_POST(plain, 128, 100).
+
+hundred_128_https_POST_test() ->
+ do_POST(ssl, 128, 100).
+
+do_GET(Transport, Times) ->
+ PathPrefix = "/whatever/",
+ ReplyPrefix = "You requested: ",
+ ServerFun = fun (Req) ->
+ Reply = ReplyPrefix ++ Req:get(path),
+ Req:ok({"text/plain", Reply})
+ end,
+ TestReqs = [begin
+ Path = PathPrefix ++ integer_to_list(N),
+ ExpectedReply = list_to_binary(ReplyPrefix ++ Path),
+ #treq{path=Path, xreply=ExpectedReply}
+ end || N <- lists:seq(1, Times)],
+ ClientFun = new_client_fun('GET', TestReqs),
+ ok = with_server(Transport, ServerFun, ClientFun),
+ ok.
+
+do_POST(Transport, Size, Times) ->
+ ServerFun = fun (Req) ->
+ Body = Req:recv_body(),
+ Headers = [{"Content-Type", "application/octet-stream"}],
+ Req:respond({201, Headers, Body})
+ end,
+ TestReqs = [begin
+ Path = "/stuff/" ++ integer_to_list(N),
+ Body = crypto:rand_bytes(Size),
+ #treq{path=Path, body=Body, xreply=Body}
+ end || N <- lists:seq(1, Times)],
+ ClientFun = new_client_fun('POST', TestReqs),
+ ok = with_server(Transport, ServerFun, ClientFun),
+ ok.
+
+new_client_fun(Method, TestReqs) ->
+ fun (Transport, Port) ->
+ client_request(Transport, Port, Method, TestReqs)
+ end.
+
+client_request(Transport, Port, Method, TestReqs) ->
+ Opts = [binary, {active, false}, {packet, http}],
+ SockFun = case Transport of
+ plain ->
+ {ok, Socket} = gen_tcp:connect("127.0.0.1", Port, Opts),
+ fun (recv) ->
+ gen_tcp:recv(Socket, 0);
+ ({recv, Length}) ->
+ gen_tcp:recv(Socket, Length);
+ ({send, Data}) ->
+ gen_tcp:send(Socket, Data);
+ ({setopts, L}) ->
+ inet:setopts(Socket, L)
+ end;
+ ssl ->
+ {ok, Socket} = ssl:connect("127.0.0.1", Port, [{ssl_imp, new} | Opts]),
+ fun (recv) ->
+ ssl:recv(Socket, 0);
+ ({recv, Length}) ->
+ ssl:recv(Socket, Length);
+ ({send, Data}) ->
+ ssl:send(Socket, Data);
+ ({setopts, L}) ->
+ ssl:setopts(Socket, L)
+ end
+ end,
+ client_request(SockFun, Method, TestReqs).
+
+client_request(SockFun, _Method, []) ->
+ {the_end, {error, closed}} = {the_end, SockFun(recv)},
+ ok;
+client_request(SockFun, Method,
+ [#treq{path=Path, body=Body, xreply=ExReply} | Rest]) ->
+ Request = [atom_to_list(Method), " ", Path, " HTTP/1.1\r\n",
+ client_headers(Body, Rest =:= []),
+ "\r\n",
+ Body],
+ ok = SockFun({send, Request}),
+ case Method of
+ 'GET' ->
+ {ok, {http_response, {1,1}, 200, "OK"}} = SockFun(recv);
+ 'POST' ->
+ {ok, {http_response, {1,1}, 201, "Created"}} = SockFun(recv)
+ end,
+ ok = SockFun({setopts, [{packet, httph}]}),
+ {ok, {http_header, _, 'Server', _, "MochiWeb" ++ _}} = SockFun(recv),
+ {ok, {http_header, _, 'Date', _, _}} = SockFun(recv),
+ {ok, {http_header, _, 'Content-Type', _, _}} = SockFun(recv),
+ {ok, {http_header, _, 'Content-Length', _, ConLenStr}} = SockFun(recv),
+ ContentLength = list_to_integer(ConLenStr),
+ {ok, http_eoh} = SockFun(recv),
+ ok = SockFun({setopts, [{packet, raw}]}),
+ {payload, ExReply} = {payload, drain_reply(SockFun, ContentLength, <<>>)},
+ ok = SockFun({setopts, [{packet, http}]}),
+ client_request(SockFun, Method, Rest).
+
+client_headers(Body, IsLastRequest) ->
+ ["Host: localhost\r\n",
+ case Body of
+ <<>> ->
+ "";
+ _ ->
+ ["Content-Type: application/octet-stream\r\n",
+ "Content-Length: ", integer_to_list(byte_size(Body)), "\r\n"]
+ end,
+ case IsLastRequest of
+ true ->
+ "Connection: close\r\n";
+ false ->
+ ""
+ end].
+
+drain_reply(_SockFun, 0, Acc) ->
+ Acc;
+drain_reply(SockFun, Length, Acc) ->
+ Sz = erlang:min(Length, 1024),
+ {ok, B} = SockFun({recv, Sz}),
+ drain_reply(SockFun, Length - Sz, <<Acc/bytes, B/bytes>>).
+
+-endif.
diff --git a/src/mochiweb/mochiweb_acceptor.erl b/src/mochiweb/mochiweb_acceptor.erl
new file mode 100644
index 00000000..79d172c3
--- /dev/null
+++ b/src/mochiweb/mochiweb_acceptor.erl
@@ -0,0 +1,48 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc MochiWeb acceptor.
+
+-module(mochiweb_acceptor).
+-author('bob@mochimedia.com').
+
+-include("internal.hrl").
+
+-export([start_link/3, init/3]).
+
+start_link(Server, Listen, Loop) ->
+ proc_lib:spawn_link(?MODULE, init, [Server, Listen, Loop]).
+
+init(Server, Listen, Loop) ->
+ T1 = now(),
+ case catch mochiweb_socket:accept(Listen) of
+ {ok, Socket} ->
+ gen_server:cast(Server, {accepted, self(), timer:now_diff(now(), T1)}),
+ call_loop(Loop, Socket);
+ {error, closed} ->
+ exit(normal);
+ {error, timeout} ->
+ exit(normal);
+ {error, esslaccept} ->
+ exit(normal);
+ Other ->
+ error_logger:error_report(
+ [{application, mochiweb},
+ "Accept failed error",
+ lists:flatten(io_lib:format("~p", [Other]))]),
+ exit({error, accept_failed})
+ end.
+
+call_loop({M, F}, Socket) ->
+ M:F(Socket);
+call_loop({M, F, A}, Socket) ->
+ erlang:apply(M, F, [Socket | A]);
+call_loop(Loop, Socket) ->
+ Loop(Socket).
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_app.erl b/src/mochiweb/mochiweb_app.erl
index 2b437f6c..5d67787b 100644
--- a/src/mochiweb/mochiweb_app.erl
+++ b/src/mochiweb/mochiweb_app.erl
@@ -18,3 +18,10 @@ start(_Type, _StartArgs) ->
%% @doc application stop callback for mochiweb.
stop(_State) ->
ok.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_charref.erl b/src/mochiweb/mochiweb_charref.erl
index d037d2f8..99cd5502 100644
--- a/src/mochiweb/mochiweb_charref.erl
+++ b/src/mochiweb/mochiweb_charref.erl
@@ -3,7 +3,7 @@
%% @doc Converts HTML 4 charrefs and entities to codepoints.
-module(mochiweb_charref).
--export([charref/1, test/0]).
+-export([charref/1]).
%% External API.
@@ -27,16 +27,6 @@ charref([$# | L]) ->
charref(L) ->
entity(L).
-%% @spec test() -> ok
-%% @doc Run tests for mochiweb_charref.
-test() ->
- 1234 = charref("#1234"),
- 255 = charref("#xfF"),
- 255 = charref("#XFf"),
- 38 = charref("amp"),
- undefined = charref("not_an_entity"),
- ok.
-
%% Internal API.
entity("nbsp") -> 160;
@@ -293,3 +283,26 @@ entity("rsaquo") -> 8250;
entity("euro") -> 8364;
entity(_) -> undefined.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+exhaustive_entity_test() ->
+ T = mochiweb_cover:clause_lookup_table(?MODULE, entity),
+ [?assertEqual(V, entity(K)) || {K, V} <- T].
+
+charref_test() ->
+ 1234 = charref("#1234"),
+ 255 = charref("#xfF"),
+ 255 = charref(<<"#XFf">>),
+ 38 = charref("amp"),
+ 38 = charref(<<"amp">>),
+ undefined = charref("not_an_entity"),
+ undefined = charref("#not_an_entity"),
+ undefined = charref("#xnot_an_entity"),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_cookies.erl b/src/mochiweb/mochiweb_cookies.erl
index 61711ff0..c090b714 100644
--- a/src/mochiweb/mochiweb_cookies.erl
+++ b/src/mochiweb/mochiweb_cookies.erl
@@ -4,7 +4,7 @@
%% @doc HTTP Cookie parsing and generating (RFC 2109, RFC 2965).
-module(mochiweb_cookies).
--export([parse_cookie/1, cookie/3, cookie/2, test/0]).
+-export([parse_cookie/1, cookie/3, cookie/2]).
-define(QUOTE, $\").
@@ -130,13 +130,6 @@ parse_cookie("") ->
parse_cookie(Cookie) ->
parse_cookie(Cookie, []).
-%% @spec test() -> ok
-%% @doc Run tests for mochiweb_cookies.
-test() ->
- parse_cookie_test(),
- cookie_test(),
- ok.
-
%% Internal API
parse_cookie([], Acc) ->
@@ -198,24 +191,6 @@ skip_past_separator([$, | Rest]) ->
skip_past_separator([_ | Rest]) ->
skip_past_separator(Rest).
-parse_cookie_test() ->
- %% RFC example
- C1 = "$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\";
- Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\";
- Shipping=\"FedEx\"; $Path=\"/acme\"",
- [
- {"Customer","WILE_E_COYOTE"},
- {"Part_Number","Rocket_Launcher_0001"},
- {"Shipping","FedEx"}
- ] = parse_cookie(C1),
- %% Potential edge cases
- [{"foo", "x"}] = parse_cookie("foo=\"\\x\""),
- [] = parse_cookie("="),
- [{"foo", ""}, {"bar", ""}] = parse_cookie(" foo ; bar "),
- [{"foo", ""}, {"bar", ""}] = parse_cookie("foo=;bar="),
- [{"foo", "\";"}, {"bar", ""}] = parse_cookie("foo = \"\\\";\";bar "),
- [{"foo", "\";bar"}] = parse_cookie("foo=\"\\\";bar").
-
any_to_list(V) when is_list(V) ->
V;
any_to_list(V) when is_atom(V) ->
@@ -225,6 +200,81 @@ any_to_list(V) when is_binary(V) ->
any_to_list(V) when is_integer(V) ->
integer_to_list(V).
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+quote_test() ->
+ %% ?assertError eunit macro is not compatible with coverage module
+ try quote(":wq")
+ catch error:{cookie_quoting_required, ":wq"} -> ok
+ end,
+ ?assertEqual(
+ "foo",
+ quote(foo)),
+ ok.
+
+parse_cookie_test() ->
+ %% RFC example
+ C1 = "$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\";
+ Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\";
+ Shipping=\"FedEx\"; $Path=\"/acme\"",
+ ?assertEqual(
+ [{"Customer","WILE_E_COYOTE"},
+ {"Part_Number","Rocket_Launcher_0001"},
+ {"Shipping","FedEx"}],
+ parse_cookie(C1)),
+ %% Potential edge cases
+ ?assertEqual(
+ [{"foo", "x"}],
+ parse_cookie("foo=\"\\x\"")),
+ ?assertEqual(
+ [],
+ parse_cookie("=")),
+ ?assertEqual(
+ [{"foo", ""}, {"bar", ""}],
+ parse_cookie(" foo ; bar ")),
+ ?assertEqual(
+ [{"foo", ""}, {"bar", ""}],
+ parse_cookie("foo=;bar=")),
+ ?assertEqual(
+ [{"foo", "\";"}, {"bar", ""}],
+ parse_cookie("foo = \"\\\";\";bar ")),
+ ?assertEqual(
+ [{"foo", "\";bar"}],
+ parse_cookie("foo=\"\\\";bar")),
+ ?assertEqual(
+ [],
+ parse_cookie([])),
+ ?assertEqual(
+ [{"foo", "bar"}, {"baz", "wibble"}],
+ parse_cookie("foo=bar , baz=wibble ")),
+ ok.
+
+domain_test() ->
+ ?assertEqual(
+ {"Set-Cookie",
+ "Customer=WILE_E_COYOTE; "
+ "Version=1; "
+ "Domain=acme.com; "
+ "HttpOnly"},
+ cookie("Customer", "WILE_E_COYOTE",
+ [{http_only, true}, {domain, "acme.com"}])),
+ ok.
+
+local_time_test() ->
+ {"Set-Cookie", S} = cookie("Customer", "WILE_E_COYOTE",
+ [{max_age, 111}, {secure, true}]),
+ ?assertMatch(
+ ["Customer=WILE_E_COYOTE",
+ " Version=1",
+ " Expires=" ++ _,
+ " Max-Age=111",
+ " Secure"],
+ string:tokens(S, ";")),
+ ok.
cookie_test() ->
C1 = {"Set-Cookie",
@@ -238,8 +288,8 @@ cookie_test() ->
C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, [{path, <<"/acme">>}]),
{"Set-Cookie","=NoKey; Version=1"} = cookie("", "NoKey", []),
-
- LocalTime = calendar:universal_time_to_local_time({{2007, 5, 15}, {13, 45, 33}}),
+ {"Set-Cookie","=NoKey; Version=1"} = cookie("", "NoKey"),
+ LocalTime = calendar:universal_time_to_local_time({{2007, 5, 15}, {13, 45, 33}}),
C2 = {"Set-Cookie",
"Customer=WILE_E_COYOTE; "
"Version=1; "
@@ -255,3 +305,5 @@ cookie_test() ->
C3 = cookie("Customer", "WILE_E_COYOTE",
[{max_age, 86417}, {local_time, LocalTime}]),
ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_cover.erl b/src/mochiweb/mochiweb_cover.erl
new file mode 100644
index 00000000..6a14ef51
--- /dev/null
+++ b/src/mochiweb/mochiweb_cover.erl
@@ -0,0 +1,75 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc Workarounds for various cover deficiencies.
+-module(mochiweb_cover).
+-export([get_beam/1, get_abstract_code/1,
+ get_clauses/2, clause_lookup_table/1]).
+-export([clause_lookup_table/2]).
+
+%% Internal
+
+get_beam(Module) ->
+ {Module, Beam, _Path} = code:get_object_code(Module),
+ Beam.
+
+get_abstract_code(Beam) ->
+ {ok, {_Module,
+ [{abstract_code,
+ {raw_abstract_v1, L}}]}} = beam_lib:chunks(Beam, [abstract_code]),
+ L.
+
+get_clauses(Function, Code) ->
+ [L] = [Clauses || {function, _, FName, _, Clauses}
+ <- Code, FName =:= Function],
+ L.
+
+clause_lookup_table(Module, Function) ->
+ clause_lookup_table(
+ get_clauses(Function,
+ get_abstract_code(get_beam(Module)))).
+
+clause_lookup_table(Clauses) ->
+ lists:foldr(fun clause_fold/2, [], Clauses).
+
+clause_fold({clause, _,
+ [InTerm],
+ _Guards=[],
+ [OutTerm]},
+ Acc) ->
+ try [{erl_parse:normalise(InTerm), erl_parse:normalise(OutTerm)} | Acc]
+ catch error:_ -> Acc
+ end;
+clause_fold(_, Acc) ->
+ Acc.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+foo_table(a) -> b;
+foo_table("a") -> <<"b">>;
+foo_table(123) -> {4, 3, 2};
+foo_table([list]) -> [];
+foo_table([list1, list2]) -> [list1, list2, list3];
+foo_table(ignored) -> some, code, ignored;
+foo_table(Var) -> Var.
+
+foo_table_test() ->
+ T = clause_lookup_table(?MODULE, foo_table),
+ [?assertEqual(V, foo_table(K)) || {K, V} <- T].
+
+clause_lookup_table_test() ->
+ ?assertEqual(b, foo_table(a)),
+ ?assertEqual(ignored, foo_table(ignored)),
+ ?assertEqual('Var', foo_table('Var')),
+ ?assertEqual(
+ [{a, b},
+ {"a", <<"b">>},
+ {123, {4, 3, 2}},
+ {[list], []},
+ {[list1, list2], [list1, list2, list3]}],
+ clause_lookup_table(?MODULE, foo_table)).
+
+-endif.
diff --git a/src/mochiweb/mochiweb_echo.erl b/src/mochiweb/mochiweb_echo.erl
index f32d6803..6f7872b9 100644
--- a/src/mochiweb/mochiweb_echo.erl
+++ b/src/mochiweb/mochiweb_echo.erl
@@ -18,9 +18,9 @@ start() ->
{loop, {?MODULE, loop}}]).
loop(Socket) ->
- case gen_tcp:recv(Socket, 0, 30000) of
+ case mochiweb_socket:recv(Socket, 0, 30000) of
{ok, Data} ->
- case gen_tcp:send(Socket, Data) of
+ case mochiweb_socket:send(Socket, Data) of
ok ->
loop(Socket);
_ ->
@@ -29,3 +29,10 @@ loop(Socket) ->
_Other ->
exit(normal)
end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_headers.erl b/src/mochiweb/mochiweb_headers.erl
index d90fd679..4fce9838 100644
--- a/src/mochiweb/mochiweb_headers.erl
+++ b/src/mochiweb/mochiweb_headers.erl
@@ -10,66 +10,11 @@
-export([default/3, enter_from_list/2, default_from_list/2]).
-export([to_list/1, make/1]).
-export([from_binary/1]).
--export([test/0]).
%% @type headers().
%% @type key() = atom() | binary() | string().
%% @type value() = atom() | binary() | string() | integer().
-%% @spec test() -> ok
-%% @doc Run tests for this module.
-test() ->
- H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
- [{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
- H1 = ?MODULE:insert(taco, grande, H),
- [{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
- H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
- [{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
- H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
- [{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
- "foo, bar" = ?MODULE:get_value("set-cookie", H3),
- {value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
- undefined = ?MODULE:get_value("shibby", H3),
- none = ?MODULE:lookup("shibby", H3),
- H4 = ?MODULE:insert("content-type",
- "application/x-www-form-urlencoded; charset=utf8",
- H3),
- "application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
- "content-type", H4),
- H4 = ?MODULE:delete_any("nonexistent-header", H4),
- H3 = ?MODULE:delete_any("content-type", H4),
- HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>,
- H_HB = ?MODULE:from_binary(HB),
- H_HB = ?MODULE:from_binary(binary_to_list(HB)),
- "47" = ?MODULE:get_value("Content-Length", H_HB),
- "text/plain" = ?MODULE:get_value("Content-Type", H_HB),
- L_H_HB = ?MODULE:to_list(H_HB),
- 2 = length(L_H_HB),
- true = lists:member({'Content-Length', "47"}, L_H_HB),
- true = lists:member({'Content-Type', "text/plain"}, L_H_HB),
- HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ],
- HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ],
- HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ],
- H_HL = ?MODULE:from_binary(HL),
- H_HL = ?MODULE:from_binary(HL2),
- H_HL = ?MODULE:from_binary(HL3),
- "47" = ?MODULE:get_value("Content-Length", H_HL),
- "text/plain" = ?MODULE:get_value("Content-Type", H_HL),
- L_H_HL = ?MODULE:to_list(H_HL),
- 2 = length(L_H_HL),
- true = lists:member({'Content-Length', "47"}, L_H_HL),
- true = lists:member({'Content-Type', "text/plain"}, L_H_HL),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<>>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary("")),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<>>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])),
- ok.
-
%% @spec empty() -> headers()
%% @doc Create an empty headers structure.
empty() ->
@@ -83,35 +28,34 @@ make(L) when is_list(L) ->
make(T) when is_tuple(T) ->
T.
-%% @spec from_binary(RawHttpHeader()) -> headers()
-%% @type RawHttpHeader() -> string() | binary() | [ string() | binary() ]
-%%
+%% @spec from_binary(iolist()) -> headers()
%% @doc Transforms a raw HTTP header into a mochiweb headers structure.
%%
%% The given raw HTTP header can be one of the following:
%%
-%% 1) A string or a binary representing a full HTTP header ending with
+%% 1) A string or a binary representing a full HTTP header ending with
%% double CRLF.
%% Examples:
+%% ```
%% "Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n"
-%% <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>
+%% <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>'''
%%
-%% 2) A list of binaries or strings where each element represents a raw
+%% 2) A list of binaries or strings where each element represents a raw
%% HTTP header line ending with a single CRLF.
%% Examples:
-%% [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ]
-%% [ "Content-Length: 47\r\n", "Content-Type: text/plain\r\n" ]
-%% [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ]
+%% ```
+%% [<<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">>]
+%% ["Content-Length: 47\r\n", "Content-Type: text/plain\r\n"]
+%% ["Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">>]'''
%%
from_binary(RawHttpHeader) when is_binary(RawHttpHeader) ->
from_binary(RawHttpHeader, []);
-
from_binary(RawHttpHeaderList) ->
from_binary(list_to_binary([RawHttpHeaderList, "\r\n"])).
from_binary(RawHttpHeader, Acc) ->
case erlang:decode_packet(httph, RawHttpHeader, []) of
- { ok, {http_header, _, H, _, V}, Rest } ->
+ {ok, {http_header, _, H, _, V}, Rest} ->
from_binary(Rest, [{H, V} | Acc]);
_ ->
make(Acc)
@@ -248,4 +192,108 @@ any_to_list(V) when is_binary(V) ->
any_to_list(V) when is_integer(V) ->
integer_to_list(V).
+%%
+%% Tests.
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+make_test() ->
+ Identity = make([{hdr, foo}]),
+ ?assertEqual(
+ Identity,
+ make(Identity)).
+
+enter_from_list_test() ->
+ H = make([{hdr, foo}]),
+ ?assertEqual(
+ [{baz, "wibble"}, {hdr, "foo"}],
+ to_list(enter_from_list([{baz, wibble}], H))),
+ ?assertEqual(
+ [{hdr, "bar"}],
+ to_list(enter_from_list([{hdr, bar}], H))),
+ ok.
+
+default_from_list_test() ->
+ H = make([{hdr, foo}]),
+ ?assertEqual(
+ [{baz, "wibble"}, {hdr, "foo"}],
+ to_list(default_from_list([{baz, wibble}], H))),
+ ?assertEqual(
+ [{hdr, "foo"}],
+ to_list(default_from_list([{hdr, bar}], H))),
+ ok.
+
+get_primary_value_test() ->
+ H = make([{hdr, foo}, {baz, <<"wibble;taco">>}]),
+ ?assertEqual(
+ "foo",
+ get_primary_value(hdr, H)),
+ ?assertEqual(
+ undefined,
+ get_primary_value(bar, H)),
+ ?assertEqual(
+ "wibble",
+ get_primary_value(<<"baz">>, H)),
+ ok.
+
+set_cookie_test() ->
+ H = make([{"set-cookie", foo}, {"set-cookie", bar}, {"set-cookie", baz}]),
+ ?assertEqual(
+ [{"set-cookie", "foo"}, {"set-cookie", "bar"}, {"set-cookie", "baz"}],
+ to_list(H)),
+ ok.
+
+headers_test() ->
+ H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
+ [{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
+ H1 = ?MODULE:insert(taco, grande, H),
+ [{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
+ H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
+ [{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
+ H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
+ [{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
+ "foo, bar" = ?MODULE:get_value("set-cookie", H3),
+ {value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
+ undefined = ?MODULE:get_value("shibby", H3),
+ none = ?MODULE:lookup("shibby", H3),
+ H4 = ?MODULE:insert("content-type",
+ "application/x-www-form-urlencoded; charset=utf8",
+ H3),
+ "application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
+ "content-type", H4),
+ H4 = ?MODULE:delete_any("nonexistent-header", H4),
+ H3 = ?MODULE:delete_any("content-type", H4),
+ HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>,
+ H_HB = ?MODULE:from_binary(HB),
+ H_HB = ?MODULE:from_binary(binary_to_list(HB)),
+ "47" = ?MODULE:get_value("Content-Length", H_HB),
+ "text/plain" = ?MODULE:get_value("Content-Type", H_HB),
+ L_H_HB = ?MODULE:to_list(H_HB),
+ 2 = length(L_H_HB),
+ true = lists:member({'Content-Length', "47"}, L_H_HB),
+ true = lists:member({'Content-Type', "text/plain"}, L_H_HB),
+ HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ],
+ HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ],
+ HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ],
+ H_HL = ?MODULE:from_binary(HL),
+ H_HL = ?MODULE:from_binary(HL2),
+ H_HL = ?MODULE:from_binary(HL3),
+ "47" = ?MODULE:get_value("Content-Length", H_HL),
+ "text/plain" = ?MODULE:get_value("Content-Type", H_HL),
+ L_H_HL = ?MODULE:to_list(H_HL),
+ 2 = length(L_H_HL),
+ true = lists:member({'Content-Length', "47"}, L_H_HL),
+ true = lists:member({'Content-Type', "text/plain"}, L_H_HL),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<>>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary("")),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<>>])),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])),
+ ok.
+-endif.
diff --git a/src/mochiweb/mochiweb_html.erl b/src/mochiweb/mochiweb_html.erl
index 77100d50..a15c359c 100644
--- a/src/mochiweb/mochiweb_html.erl
+++ b/src/mochiweb/mochiweb_html.erl
@@ -4,9 +4,9 @@
%% @doc Loosely tokenizes and generates parse trees for HTML 4.
-module(mochiweb_html).
-export([tokens/1, parse/1, parse_tokens/1, to_tokens/1, escape/1,
- escape_attr/1, to_html/1, test/0]).
+ escape_attr/1, to_html/1]).
-% This is a macro to placate syntax highlighters..
+%% This is a macro to placate syntax highlighters..
-define(QUOTE, $\").
-define(SQUOTE, $\').
-define(ADV_COL(S, N),
@@ -35,6 +35,8 @@
-define(IS_LITERAL_SAFE(C),
((C >= $A andalso C =< $Z) orelse (C >= $a andalso C =< $z)
orelse (C >= $0 andalso C =< $9))).
+-define(PROBABLE_CLOSE(C),
+ (C =:= $> orelse ?IS_WHITESPACE(C))).
-record(decoder, {line=1,
column=1,
@@ -89,6 +91,7 @@ to_tokens(T={doctype, _}) ->
to_tokens(T={comment, _}) ->
[T];
to_tokens({Tag0, Acc}) ->
+ %% This is only allowed in sub-tags: {p, [{"class", "foo"}]}
to_tokens({Tag0, [], Acc});
to_tokens({Tag0, Attrs, Acc}) ->
Tag = to_tag(Tag0),
@@ -124,40 +127,6 @@ escape_attr(I) when is_integer(I) ->
escape_attr(F) when is_float(F) ->
escape_attr(mochinum:digits(F), []).
-%% @spec test() -> ok
-%% @doc Run tests for mochiweb_html.
-test() ->
- test_destack(),
- test_tokens(),
- test_tokens2(),
- test_parse(),
- test_parse2(),
- test_parse_tokens(),
- test_escape(),
- test_escape_attr(),
- test_to_html(),
- ok.
-
-
-%% Internal API
-
-test_to_html() ->
- Expect = <<"<html><head><title>hey!</title></head><body><p class=\"foo\">what's up<br /></p><div>sucka</div><!-- comment! --></body></html>">>,
- Expect = iolist_to_binary(
- to_html({html, [],
- [{<<"head">>, [],
- [{title, <<"hey!">>}]},
- {body, [],
- [{p, [{class, foo}], [<<"what's">>, <<" up">>, {br}]},
- {'div', <<"sucka">>},
- {comment, <<" comment! ">>}]}]})),
- Expect1 = <<"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">">>,
- Expect1 = iolist_to_binary(
- to_html({doctype,
- [<<"html">>, <<"PUBLIC">>,
- <<"-//W3C//DTD XHTML 1.0 Transitional//EN">>,
- <<"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">>]})),
- ok.
to_html([], Acc) ->
lists:reverse(Acc);
to_html([{'=', Content} | Rest], Acc) ->
@@ -205,16 +174,6 @@ attrs_to_html([{K, V} | Rest], Acc) ->
[[<<" ">>, escape(K), <<"=\"">>,
escape_attr(V), <<"\"">>] | Acc]).
-test_escape() ->
- <<"&amp;quot;\"word &lt;&lt;up!&amp;quot;">> =
- escape(<<"&quot;\"word <<up!&quot;">>),
- ok.
-
-test_escape_attr() ->
- <<"&amp;quot;&quot;word &lt;&lt;up!&amp;quot;">> =
- escape_attr(<<"&quot;\"word <<up!&quot;">>),
- ok.
-
escape([], Acc) ->
list_to_binary(lists:reverse(Acc));
escape("<" ++ Rest, Acc) ->
@@ -257,6 +216,9 @@ to_tokens([{Tag0, [T0={'=', _C0} | R1]} | Rest], Acc) ->
to_tokens([{Tag0, [T0={comment, _C0} | R1]} | Rest], Acc) ->
%% Allow {comment, iolist()}
to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
+to_tokens([{Tag0, [T0={pi, _S0, _A0} | R1]} | Rest], Acc) ->
+ %% Allow {pi, binary(), list()}
+ to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
to_tokens([{Tag0, [{T0, A0=[{_, _} | _]} | R1]} | Rest], Acc) ->
%% Allow {p, [{"class", "foo"}]}
to_tokens([{Tag0, [{T0, A0, []} | R1]} | Rest], Acc);
@@ -290,39 +252,6 @@ to_tokens([{Tag0, [B | R1]} | Rest], Acc) when is_binary(B) ->
Tag = to_tag(Tag0),
to_tokens([{Tag, R1} | Rest], [{data, B, false} | Acc]).
-test_tokens() ->
- [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
- {<<"wibble">>, <<"wibble">>},
- {<<"alice">>, <<"bob">>}], true}] =
- tokens(<<"<foo bar=baz wibble='wibble' alice=\"bob\"/>">>),
- [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
- {<<"wibble">>, <<"wibble">>},
- {<<"alice">>, <<"bob">>}], true}] =
- tokens(<<"<foo bar=baz wibble='wibble' alice=bob/>">>),
- [{comment, <<"[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]">>}] =
- tokens(<<"<!--[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]-->">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type=\"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type =\"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type = \"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type= \"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"textarea">>, [], false},
- {data, <<"<html></body>">>, false},
- {end_tag, <<"textarea">>}] =
- tokens(<<"<textarea><html></body></textarea>">>),
- ok.
-
tokens(B, S=#decoder{offset=O}, Acc) ->
case B of
<<_:O/binary>> ->
@@ -374,7 +303,8 @@ tokenize(B, S=#decoder{offset=O}) ->
{{end_tag, Tag}, S2};
<<_:O/binary, "<", C, _/binary>> when ?IS_WHITESPACE(C) ->
%% This isn't really strict HTML
- tokenize_data(B, ?INC_COL(S));
+ {{data, Data, _Whitespace}, S1} = tokenize_data(B, ?INC_COL(S)),
+ {{data, <<$<, Data/binary>>, false}, S1};
<<_:O/binary, "<", _/binary>> ->
{Tag, S1} = tokenize_literal(B, ?INC_COL(S)),
{Attrs, S2} = tokenize_attributes(B, S1),
@@ -385,149 +315,6 @@ tokenize(B, S=#decoder{offset=O}) ->
tokenize_data(B, S)
end.
-test_parse() ->
- D0 = <<"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">
-<html>
- <head>
- <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">
- <title>Foo</title>
- <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/rel/dojo/resources/dojo.css\" media=\"screen\">
- <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/foo.css\" media=\"screen\">
- <!--[if lt IE 7]>
- <style type=\"text/css\">
- .no_ie { display: none; }
- </style>
- <![endif]-->
- <link rel=\"icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
- <link rel=\"shortcut icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
- </head>
- <body id=\"home\" class=\"tundra\"><![CDATA[&lt;<this<!-- is -->CDATA>&gt;]]></body>
-</html>">>,
- Expect = {<<"html">>, [],
- [{<<"head">>, [],
- [{<<"meta">>,
- [{<<"http-equiv">>,<<"Content-Type">>},
- {<<"content">>,<<"text/html; charset=UTF-8">>}],
- []},
- {<<"title">>,[],[<<"Foo">>]},
- {<<"link">>,
- [{<<"rel">>,<<"stylesheet">>},
- {<<"type">>,<<"text/css">>},
- {<<"href">>,<<"/static/rel/dojo/resources/dojo.css">>},
- {<<"media">>,<<"screen">>}],
- []},
- {<<"link">>,
- [{<<"rel">>,<<"stylesheet">>},
- {<<"type">>,<<"text/css">>},
- {<<"href">>,<<"/static/foo.css">>},
- {<<"media">>,<<"screen">>}],
- []},
- {comment,<<"[if lt IE 7]>\n <style type=\"text/css\">\n .no_ie { display: none; }\n </style>\n <![endif]">>},
- {<<"link">>,
- [{<<"rel">>,<<"icon">>},
- {<<"href">>,<<"/static/images/favicon.ico">>},
- {<<"type">>,<<"image/x-icon">>}],
- []},
- {<<"link">>,
- [{<<"rel">>,<<"shortcut icon">>},
- {<<"href">>,<<"/static/images/favicon.ico">>},
- {<<"type">>,<<"image/x-icon">>}],
- []}]},
- {<<"body">>,
- [{<<"id">>,<<"home">>},
- {<<"class">>,<<"tundra">>}],
- [<<"&lt;<this<!-- is -->CDATA>&gt;">>]}]},
- Expect = parse(D0),
- ok.
-
-test_tokens2() ->
- D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org</link><description>Bob's Rants</description></channel>">>,
- Expect = [{start_tag,<<"channel">>,[],false},
- {start_tag,<<"title">>,[],false},
- {data,<<"from __future__ import *">>,false},
- {end_tag,<<"title">>},
- {start_tag,<<"link">>,[],true},
- {data,<<"http://bob.pythonmac.org">>,false},
- {end_tag,<<"link">>},
- {start_tag,<<"description">>,[],false},
- {data,<<"Bob's Rants">>,false},
- {end_tag,<<"description">>},
- {end_tag,<<"channel">>}],
- Expect = tokens(D0),
- ok.
-
-test_parse2() ->
- D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org<br>foo</link><description>Bob's Rants</description></channel>">>,
- Expect = {<<"channel">>,[],
- [{<<"title">>,[],[<<"from __future__ import *">>]},
- {<<"link">>,[],[
- <<"http://bob.pythonmac.org">>,
- {<<"br">>,[],[]},
- <<"foo">>]},
- {<<"description">>,[],[<<"Bob's Rants">>]}]},
- Expect = parse(D0),
- ok.
-
-test_parse_tokens() ->
- D0 = [{doctype,[<<"HTML">>,<<"PUBLIC">>,<<"-//W3C//DTD HTML 4.01 Transitional//EN">>]},
- {data,<<"\n">>,true},
- {start_tag,<<"html">>,[],false}],
- {<<"html">>, [], []} = parse_tokens(D0),
- D1 = D0 ++ [{end_tag, <<"html">>}],
- {<<"html">>, [], []} = parse_tokens(D1),
- D2 = D0 ++ [{start_tag, <<"body">>, [], false}],
- {<<"html">>, [], [{<<"body">>, [], []}]} = parse_tokens(D2),
- D3 = D0 ++ [{start_tag, <<"head">>, [], false},
- {end_tag, <<"head">>},
- {start_tag, <<"body">>, [], false}],
- {<<"html">>, [], [{<<"head">>, [], []}, {<<"body">>, [], []}]} = parse_tokens(D3),
- D4 = D3 ++ [{data,<<"\n">>,true},
- {start_tag,<<"div">>,[{<<"class">>,<<"a">>}],false},
- {start_tag,<<"a">>,[{<<"name">>,<<"#anchor">>}],false},
- {end_tag,<<"a">>},
- {end_tag,<<"div">>},
- {start_tag,<<"div">>,[{<<"class">>,<<"b">>}],false},
- {start_tag,<<"div">>,[{<<"class">>,<<"c">>}],false},
- {end_tag,<<"div">>},
- {end_tag,<<"div">>}],
- {<<"html">>, [],
- [{<<"head">>, [], []},
- {<<"body">>, [],
- [{<<"div">>, [{<<"class">>, <<"a">>}], [{<<"a">>, [{<<"name">>, <<"#anchor">>}], []}]},
- {<<"div">>, [{<<"class">>, <<"b">>}], [{<<"div">>, [{<<"class">>, <<"c">>}], []}]}
- ]}]} = parse_tokens(D4),
- D5 = [{start_tag,<<"html">>,[],false},
- {data,<<"\n">>,true},
- {data,<<"boo">>,false},
- {data,<<"hoo">>,false},
- {data,<<"\n">>,true},
- {end_tag,<<"html">>}],
- {<<"html">>, [], [<<"\nboohoo\n">>]} = parse_tokens(D5),
- D6 = [{start_tag,<<"html">>,[],false},
- {data,<<"\n">>,true},
- {data,<<"\n">>,true},
- {end_tag,<<"html">>}],
- {<<"html">>, [], []} = parse_tokens(D6),
- D7 = [{start_tag,<<"html">>,[],false},
- {start_tag,<<"ul">>,[],false},
- {start_tag,<<"li">>,[],false},
- {data,<<"word">>,false},
- {start_tag,<<"li">>,[],false},
- {data,<<"up">>,false},
- {end_tag,<<"li">>},
- {start_tag,<<"li">>,[],false},
- {data,<<"fdsa">>,false},
- {start_tag,<<"br">>,[],true},
- {data,<<"asdf">>,false},
- {end_tag,<<"ul">>},
- {end_tag,<<"html">>}],
- {<<"html">>, [],
- [{<<"ul">>, [],
- [{<<"li">>, [], [<<"word">>]},
- {<<"li">>, [], [<<"up">>]},
- {<<"li">>, [], [<<"fdsa">>,{<<"br">>, [], []}, <<"asdf">>]}]}]} = parse_tokens(D7),
- ok.
-
tree_data([{data, Data, Whitespace} | Rest], AllWhitespace, Acc) ->
tree_data(Rest, (Whitespace andalso AllWhitespace), [Data | Acc]);
tree_data(Rest, AllWhitespace, Acc) ->
@@ -556,7 +343,9 @@ tree(L=[{data, _Data, _Whitespace} | _], S) ->
tree(Rest, S);
{Data, false, Rest} ->
tree(Rest, append_stack_child(Data, S))
- end.
+ end;
+tree([{doctype, _} | Rest], Stack) ->
+ tree(Rest, Stack).
norm({Tag, Attrs}) ->
{norm(Tag), [{norm(K), iolist_to_binary(V)} || {K, V} <- Attrs], []};
@@ -565,21 +354,6 @@ norm(Tag) when is_binary(Tag) ->
norm(Tag) ->
list_to_binary(string:to_lower(Tag)).
-test_destack() ->
- {<<"a">>, [], []} =
- destack([{<<"a">>, [], []}]),
- {<<"a">>, [], [{<<"b">>, [], []}]} =
- destack([{<<"b">>, [], []}, {<<"a">>, [], []}]),
- {<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]} =
- destack([{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
- [{<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]}] =
- destack(<<"b">>,
- [{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
- [{<<"b">>, [], [{<<"c">>, [], []}]}, {<<"a">>, [], []}] =
- destack(<<"c">>,
- [{<<"c">>, [], []}, {<<"b">>, [], []},{<<"a">>, [], []}]),
- ok.
-
stack(T1={TN, _, _}, Stack=[{TN, _, _} | _Rest])
when TN =:= <<"li">> orelse TN =:= <<"option">> ->
[T1 | destack(TN, Stack)];
@@ -719,9 +493,10 @@ find_qgt(Bin, S=#decoder{offset=O}) ->
case Bin of
<<_:O/binary, "?>", _/binary>> ->
?ADV_COL(S, 2);
- <<_:O/binary, C, _/binary>> ->
- find_qgt(Bin, ?INC_CHAR(S, C));
- _ ->
+ %% tokenize_attributes takes care of this state:
+ %% <<_:O/binary, C, _/binary>> ->
+ %% find_qgt(Bin, ?INC_CHAR(S, C));
+ <<_:O/binary>> ->
S
end.
@@ -766,7 +541,7 @@ tokenize_charref(Bin, S=#decoder{offset=O}, Start) ->
<<_:Start1/binary, R:Len1/binary, _/binary>> = Bin,
R;
Unichar ->
- list_to_binary(xmerl_ucs:to_utf8(Unichar))
+ mochiutf8:codepoint_to_bytes(Unichar)
end,
{{data, Data, false}, ?INC_COL(S)};
_ ->
@@ -791,11 +566,10 @@ tokenize_doctype(Bin, S=#decoder{offset=O}, Acc) ->
tokenize_word_or_literal(Bin, S=#decoder{offset=O}) ->
case Bin of
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
- {error, {whitespace, [C], S}};
<<_:O/binary, C, _/binary>> when C =:= ?QUOTE orelse C =:= ?SQUOTE ->
tokenize_word(Bin, ?INC_COL(S), C);
- _ ->
+ <<_:O/binary, C, _/binary>> when not ?IS_WHITESPACE(C) ->
+ %% Sanity check for whitespace
tokenize_literal(Bin, S, [])
end.
@@ -852,13 +626,14 @@ tokenize_script(Bin, S=#decoder{offset=O}) ->
tokenize_script(Bin, S=#decoder{offset=O}, Start) ->
case Bin of
%% Just a look-ahead, we want the end_tag separately
- <<_:O/binary, $<, $/, SS, CC, RR, II, PP, TT, _/binary>>
+ <<_:O/binary, $<, $/, SS, CC, RR, II, PP, TT, ZZ, _/binary>>
when (SS =:= $s orelse SS =:= $S) andalso
(CC =:= $c orelse CC =:= $C) andalso
(RR =:= $r orelse RR =:= $R) andalso
(II =:= $i orelse II =:= $I) andalso
(PP =:= $p orelse PP =:= $P) andalso
- (TT=:= $t orelse TT =:= $T) ->
+ (TT=:= $t orelse TT =:= $T) andalso
+ ?PROBABLE_CLOSE(ZZ) ->
Len = O - Start,
<<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
{{data, Raw, false}, S};
@@ -874,7 +649,7 @@ tokenize_textarea(Bin, S=#decoder{offset=O}) ->
tokenize_textarea(Bin, S=#decoder{offset=O}, Start) ->
case Bin of
%% Just a look-ahead, we want the end_tag separately
- <<_:O/binary, $<, $/, TT, EE, XX, TT2, AA, RR, EE2, AA2, _/binary>>
+ <<_:O/binary, $<, $/, TT, EE, XX, TT2, AA, RR, EE2, AA2, ZZ, _/binary>>
when (TT =:= $t orelse TT =:= $T) andalso
(EE =:= $e orelse EE =:= $E) andalso
(XX =:= $x orelse XX =:= $X) andalso
@@ -882,7 +657,8 @@ tokenize_textarea(Bin, S=#decoder{offset=O}, Start) ->
(AA =:= $a orelse AA =:= $A) andalso
(RR =:= $r orelse RR =:= $R) andalso
(EE2 =:= $e orelse EE2 =:= $E) andalso
- (AA2 =:= $a orelse AA2 =:= $A) ->
+ (AA2 =:= $a orelse AA2 =:= $A) andalso
+ ?PROBABLE_CLOSE(ZZ) ->
Len = O - Start,
<<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
{{data, Raw, false}, S};
@@ -891,3 +667,395 @@ tokenize_textarea(Bin, S=#decoder{offset=O}, Start) ->
<<_:Start/binary, Raw/binary>> ->
{{data, Raw, false}, S}
end.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+to_html_test() ->
+ ?assertEqual(
+ <<"<html><head><title>hey!</title></head><body><p class=\"foo\">what's up<br /></p><div>sucka</div>RAW!<!-- comment! --></body></html>">>,
+ iolist_to_binary(
+ to_html({html, [],
+ [{<<"head">>, [],
+ [{title, <<"hey!">>}]},
+ {body, [],
+ [{p, [{class, foo}], [<<"what's">>, <<" up">>, {br}]},
+ {'div', <<"sucka">>},
+ {'=', <<"RAW!">>},
+ {comment, <<" comment! ">>}]}]}))),
+ ?assertEqual(
+ <<"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">">>,
+ iolist_to_binary(
+ to_html({doctype,
+ [<<"html">>, <<"PUBLIC">>,
+ <<"-//W3C//DTD XHTML 1.0 Transitional//EN">>,
+ <<"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">>]}))),
+ ?assertEqual(
+ <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>,
+ iolist_to_binary(
+ to_html({<<"html">>,[],
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]}))),
+ ok.
+
+escape_test() ->
+ ?assertEqual(
+ <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape(<<"&quot;\"word ><<up!&quot;">>)),
+ ?assertEqual(
+ <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape("&quot;\"word ><<up!&quot;")),
+ ?assertEqual(
+ <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape('&quot;\"word ><<up!&quot;')),
+ ok.
+
+escape_attr_test() ->
+ ?assertEqual(
+ <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape_attr(<<"&quot;\"word ><<up!&quot;">>)),
+ ?assertEqual(
+ <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape_attr("&quot;\"word ><<up!&quot;")),
+ ?assertEqual(
+ <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape_attr('&quot;\"word ><<up!&quot;')),
+ ?assertEqual(
+ <<"12345">>,
+ escape_attr(12345)),
+ ?assertEqual(
+ <<"1.5">>,
+ escape_attr(1.5)),
+ ok.
+
+tokens_test() ->
+ ?assertEqual(
+ [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"alice">>, <<"bob">>}], true}],
+ tokens(<<"<foo bar=baz wibble='wibble' alice=\"bob\"/>">>)),
+ ?assertEqual(
+ [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"alice">>, <<"bob">>}], true}],
+ tokens(<<"<foo bar=baz wibble='wibble' alice=bob/>">>)),
+ ?assertEqual(
+ [{comment, <<"[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]">>}],
+ tokens(<<"<!--[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]-->">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type=\"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type =\"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type = \"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type= \"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"textarea">>, [], false},
+ {data, <<"<html></body>">>, false},
+ {end_tag, <<"textarea">>}],
+ tokens(<<"<textarea><html></body></textarea>">>)),
+ ?assertEqual(
+ [{start_tag, <<"textarea">>, [], false},
+ {data, <<"<html></body></textareaz>">>, false}],
+ tokens(<<"<textarea ><html></body></textareaz>">>)),
+ ?assertEqual(
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
+ tokens(<<"<?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?>">>)),
+ ?assertEqual(
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
+ tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office \n?>">>)),
+ ?assertEqual(
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
+ tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office">>)),
+ ?assertEqual(
+ [{data, <<"<">>, false}],
+ tokens(<<"&lt;">>)),
+ ?assertEqual(
+ [{data, <<"not html ">>, false},
+ {data, <<"< at all">>, false}],
+ tokens(<<"not html < at all">>)),
+ ok.
+
+parse_test() ->
+ D0 = <<"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">
+<html>
+ <head>
+ <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">
+ <title>Foo</title>
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/rel/dojo/resources/dojo.css\" media=\"screen\">
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/foo.css\" media=\"screen\">
+ <!--[if lt IE 7]>
+ <style type=\"text/css\">
+ .no_ie { display: none; }
+ </style>
+ <![endif]-->
+ <link rel=\"icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
+ <link rel=\"shortcut icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
+ </head>
+ <body id=\"home\" class=\"tundra\"><![CDATA[&lt;<this<!-- is -->CDATA>&gt;]]></body>
+</html>">>,
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"head">>, [],
+ [{<<"meta">>,
+ [{<<"http-equiv">>,<<"Content-Type">>},
+ {<<"content">>,<<"text/html; charset=UTF-8">>}],
+ []},
+ {<<"title">>,[],[<<"Foo">>]},
+ {<<"link">>,
+ [{<<"rel">>,<<"stylesheet">>},
+ {<<"type">>,<<"text/css">>},
+ {<<"href">>,<<"/static/rel/dojo/resources/dojo.css">>},
+ {<<"media">>,<<"screen">>}],
+ []},
+ {<<"link">>,
+ [{<<"rel">>,<<"stylesheet">>},
+ {<<"type">>,<<"text/css">>},
+ {<<"href">>,<<"/static/foo.css">>},
+ {<<"media">>,<<"screen">>}],
+ []},
+ {comment,<<"[if lt IE 7]>\n <style type=\"text/css\">\n .no_ie { display: none; }\n </style>\n <![endif]">>},
+ {<<"link">>,
+ [{<<"rel">>,<<"icon">>},
+ {<<"href">>,<<"/static/images/favicon.ico">>},
+ {<<"type">>,<<"image/x-icon">>}],
+ []},
+ {<<"link">>,
+ [{<<"rel">>,<<"shortcut icon">>},
+ {<<"href">>,<<"/static/images/favicon.ico">>},
+ {<<"type">>,<<"image/x-icon">>}],
+ []}]},
+ {<<"body">>,
+ [{<<"id">>,<<"home">>},
+ {<<"class">>,<<"tundra">>}],
+ [<<"&lt;<this<!-- is -->CDATA>&gt;">>]}]},
+ parse(D0)),
+ ?assertEqual(
+ {<<"html">>,[],
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]},
+ parse(
+ <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>)),
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"dd">>, [], [<<"foo">>]},
+ {<<"dt">>, [], [<<"bar">>]}]},
+ parse(<<"<html><dd>foo<dt>bar</html>">>)),
+ %% Singleton sadness
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"link">>, [], []},
+ <<"foo">>,
+ {<<"br">>, [], []},
+ <<"bar">>]},
+ parse(<<"<html><link>foo<br>bar</html>">>)),
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"link">>, [], [<<"foo">>,
+ {<<"br">>, [], []},
+ <<"bar">>]}]},
+ parse(<<"<html><link>foo<br>bar</link></html>">>)),
+ ok.
+
+exhaustive_is_singleton_test() ->
+ T = mochiweb_cover:clause_lookup_table(?MODULE, is_singleton),
+ [?assertEqual(V, is_singleton(K)) || {K, V} <- T].
+
+tokenize_attributes_test() ->
+ ?assertEqual(
+ {<<"foo">>,
+ [{<<"bar">>, <<"b\"az">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"taco", 16#c2, 16#a9>>, <<"bell">>},
+ {<<"quux">>, <<"quux">>}],
+ []},
+ parse(<<"<foo bar=\"b&quot;az\" wibble taco&copy;=bell quux">>)),
+ ok.
+
+tokens2_test() ->
+ D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org</link><description>Bob's Rants</description></channel>">>,
+ ?assertEqual(
+ [{start_tag,<<"channel">>,[],false},
+ {start_tag,<<"title">>,[],false},
+ {data,<<"from __future__ import *">>,false},
+ {end_tag,<<"title">>},
+ {start_tag,<<"link">>,[],true},
+ {data,<<"http://bob.pythonmac.org">>,false},
+ {end_tag,<<"link">>},
+ {start_tag,<<"description">>,[],false},
+ {data,<<"Bob's Rants">>,false},
+ {end_tag,<<"description">>},
+ {end_tag,<<"channel">>}],
+ tokens(D0)),
+ ok.
+
+to_tokens_test() ->
+ ?assertEqual(
+ [{start_tag, <<"p">>, [{class, 1}], false},
+ {end_tag, <<"p">>}],
+ to_tokens({p, [{class, 1}], []})),
+ ?assertEqual(
+ [{start_tag, <<"p">>, [], false},
+ {end_tag, <<"p">>}],
+ to_tokens({p})),
+ ?assertEqual(
+ [{'=', <<"data">>}],
+ to_tokens({'=', <<"data">>})),
+ ?assertEqual(
+ [{comment, <<"comment">>}],
+ to_tokens({comment, <<"comment">>})),
+ %% This is only allowed in sub-tags:
+ %% {p, [{"class", "foo"}]} as {p, [{"class", "foo"}], []}
+ %% On the outside it's always treated as follows:
+ %% {p, [], [{"class", "foo"}]} as {p, [], [{"class", "foo"}]}
+ ?assertEqual(
+ [{start_tag, <<"html">>, [], false},
+ {start_tag, <<"p">>, [{class, 1}], false},
+ {end_tag, <<"p">>},
+ {end_tag, <<"html">>}],
+ to_tokens({html, [{p, [{class, 1}]}]})),
+ ok.
+
+parse2_test() ->
+ D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org<br>foo</link><description>Bob's Rants</description></channel>">>,
+ ?assertEqual(
+ {<<"channel">>,[],
+ [{<<"title">>,[],[<<"from __future__ import *">>]},
+ {<<"link">>,[],[
+ <<"http://bob.pythonmac.org">>,
+ {<<"br">>,[],[]},
+ <<"foo">>]},
+ {<<"description">>,[],[<<"Bob's Rants">>]}]},
+ parse(D0)),
+ ok.
+
+parse_tokens_test() ->
+ D0 = [{doctype,[<<"HTML">>,<<"PUBLIC">>,<<"-//W3C//DTD HTML 4.01 Transitional//EN">>]},
+ {data,<<"\n">>,true},
+ {start_tag,<<"html">>,[],false}],
+ ?assertEqual(
+ {<<"html">>, [], []},
+ parse_tokens(D0)),
+ D1 = D0 ++ [{end_tag, <<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [], []},
+ parse_tokens(D1)),
+ D2 = D0 ++ [{start_tag, <<"body">>, [], false}],
+ ?assertEqual(
+ {<<"html">>, [], [{<<"body">>, [], []}]},
+ parse_tokens(D2)),
+ D3 = D0 ++ [{start_tag, <<"head">>, [], false},
+ {end_tag, <<"head">>},
+ {start_tag, <<"body">>, [], false}],
+ ?assertEqual(
+ {<<"html">>, [], [{<<"head">>, [], []}, {<<"body">>, [], []}]},
+ parse_tokens(D3)),
+ D4 = D3 ++ [{data,<<"\n">>,true},
+ {start_tag,<<"div">>,[{<<"class">>,<<"a">>}],false},
+ {start_tag,<<"a">>,[{<<"name">>,<<"#anchor">>}],false},
+ {end_tag,<<"a">>},
+ {end_tag,<<"div">>},
+ {start_tag,<<"div">>,[{<<"class">>,<<"b">>}],false},
+ {start_tag,<<"div">>,[{<<"class">>,<<"c">>}],false},
+ {end_tag,<<"div">>},
+ {end_tag,<<"div">>}],
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"head">>, [], []},
+ {<<"body">>, [],
+ [{<<"div">>, [{<<"class">>, <<"a">>}], [{<<"a">>, [{<<"name">>, <<"#anchor">>}], []}]},
+ {<<"div">>, [{<<"class">>, <<"b">>}], [{<<"div">>, [{<<"class">>, <<"c">>}], []}]}
+ ]}]},
+ parse_tokens(D4)),
+ D5 = [{start_tag,<<"html">>,[],false},
+ {data,<<"\n">>,true},
+ {data,<<"boo">>,false},
+ {data,<<"hoo">>,false},
+ {data,<<"\n">>,true},
+ {end_tag,<<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [], [<<"\nboohoo\n">>]},
+ parse_tokens(D5)),
+ D6 = [{start_tag,<<"html">>,[],false},
+ {data,<<"\n">>,true},
+ {data,<<"\n">>,true},
+ {end_tag,<<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [], []},
+ parse_tokens(D6)),
+ D7 = [{start_tag,<<"html">>,[],false},
+ {start_tag,<<"ul">>,[],false},
+ {start_tag,<<"li">>,[],false},
+ {data,<<"word">>,false},
+ {start_tag,<<"li">>,[],false},
+ {data,<<"up">>,false},
+ {end_tag,<<"li">>},
+ {start_tag,<<"li">>,[],false},
+ {data,<<"fdsa">>,false},
+ {start_tag,<<"br">>,[],true},
+ {data,<<"asdf">>,false},
+ {end_tag,<<"ul">>},
+ {end_tag,<<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"ul">>, [],
+ [{<<"li">>, [], [<<"word">>]},
+ {<<"li">>, [], [<<"up">>]},
+ {<<"li">>, [], [<<"fdsa">>,{<<"br">>, [], []}, <<"asdf">>]}]}]},
+ parse_tokens(D7)),
+ ok.
+
+destack_test() ->
+ {<<"a">>, [], []} =
+ destack([{<<"a">>, [], []}]),
+ {<<"a">>, [], [{<<"b">>, [], []}]} =
+ destack([{<<"b">>, [], []}, {<<"a">>, [], []}]),
+ {<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]} =
+ destack([{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
+ [{<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]}] =
+ destack(<<"b">>,
+ [{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
+ [{<<"b">>, [], [{<<"c">>, [], []}]}, {<<"a">>, [], []}] =
+ destack(<<"c">>,
+ [{<<"c">>, [], []}, {<<"b">>, [], []},{<<"a">>, [], []}]),
+ ok.
+
+doctype_test() ->
+ ?assertEqual(
+ {<<"html">>,[],[{<<"head">>,[],[]}]},
+ mochiweb_html:parse("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
+ "<html><head></head></body></html>")),
+ %% http://code.google.com/p/mochiweb/issues/detail?id=52
+ ?assertEqual(
+ {<<"html">>,[],[{<<"head">>,[],[]}]},
+ mochiweb_html:parse("<html>"
+ "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
+ "<head></head></body></html>")),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_http.erl b/src/mochiweb/mochiweb_http.erl
index f1821f40..24140994 100644
--- a/src/mochiweb/mochiweb_http.erl
+++ b/src/mochiweb/mochiweb_http.erl
@@ -8,31 +8,22 @@
-export([start/0, start/1, stop/0, stop/1]).
-export([loop/2, default_body/1]).
-export([after_response/2, reentry/1]).
+-export([parse_range_request/1, range_skip_length/2]).
--define(IDLE_TIMEOUT, 30000).
+-define(REQUEST_RECV_TIMEOUT, 300000). % timeout waiting for request line
+-define(HEADERS_RECV_TIMEOUT, 30000). % timeout waiting for headers
-define(MAX_HEADERS, 1000).
-define(DEFAULTS, [{name, ?MODULE},
{port, 8888}]).
-set_default({Prop, Value}, PropList) ->
- case proplists:is_defined(Prop, PropList) of
- true ->
- PropList;
- false ->
- [{Prop, Value} | PropList]
- end.
-
-set_defaults(Defaults, PropList) ->
- lists:foldl(fun set_default/2, PropList, Defaults).
-
parse_options(Options) ->
{loop, HttpLoop} = proplists:lookup(loop, Options),
Loop = fun (S) ->
?MODULE:loop(S, HttpLoop)
end,
Options1 = [{loop, Loop} | proplists:delete(loop, Options)],
- set_defaults(?DEFAULTS, Options1).
+ mochilists:set_defaults(?DEFAULTS, Options1).
stop() ->
mochiweb_socket_server:stop(?MODULE).
@@ -95,20 +86,26 @@ default_body(Req) ->
default_body(Req, Req:get(method), Req:get(path)).
loop(Socket, Body) ->
- inet:setopts(Socket, [{packet, http}]),
+ mochiweb_socket:setopts(Socket, [{packet, http}]),
request(Socket, Body).
request(Socket, Body) ->
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 0, ?REQUEST_RECV_TIMEOUT) of
{ok, {http_request, Method, Path, Version}} ->
+ mochiweb_socket:setopts(Socket, [{packet, httph}]),
headers(Socket, {Method, Path, Version}, [], Body, 0);
{error, {http_error, "\r\n"}} ->
request(Socket, Body);
{error, {http_error, "\n"}} ->
request(Socket, Body);
+ {error, closed} ->
+ mochiweb_socket:close(Socket),
+ exit(normal);
+ {error, timeout} ->
+ mochiweb_socket:close(Socket),
+ exit(normal);
_Other ->
- gen_tcp:close(Socket),
- exit(normal)
+ handle_invalid_request(Socket)
end.
reentry(Body) ->
@@ -118,35 +115,161 @@ reentry(Body) ->
headers(Socket, Request, Headers, _Body, ?MAX_HEADERS) ->
%% Too many headers sent, bad request.
- inet:setopts(Socket, [{packet, raw}]),
- Req = mochiweb:new_request({Socket, Request,
- lists:reverse(Headers)}),
- Req:respond({400, [], []}),
- gen_tcp:close(Socket),
- exit(normal);
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ handle_invalid_request(Socket, Request, Headers);
headers(Socket, Request, Headers, Body, HeaderCount) ->
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 0, ?HEADERS_RECV_TIMEOUT) of
{ok, http_eoh} ->
- inet:setopts(Socket, [{packet, raw}]),
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
Req = mochiweb:new_request({Socket, Request,
lists:reverse(Headers)}),
- Body(Req),
+ call_body(Body, Req),
?MODULE:after_response(Body, Req);
{ok, {http_header, _, Name, _, Value}} ->
headers(Socket, Request, [{Name, Value} | Headers], Body,
1 + HeaderCount);
+ {error, closed} ->
+ mochiweb_socket:close(Socket),
+ exit(normal);
_Other ->
- gen_tcp:close(Socket),
- exit(normal)
+ handle_invalid_request(Socket, Request, Headers)
end.
+call_body({M, F}, Req) ->
+ M:F(Req);
+call_body(Body, Req) ->
+ Body(Req).
+
+handle_invalid_request(Socket) ->
+ handle_invalid_request(Socket, {'GET', {abs_path, "/"}, {0,9}}, []).
+
+handle_invalid_request(Socket, Request, RevHeaders) ->
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ Req = mochiweb:new_request({Socket, Request,
+ lists:reverse(RevHeaders)}),
+ Req:respond({400, [], []}),
+ mochiweb_socket:close(Socket),
+ exit(normal).
+
after_response(Body, Req) ->
Socket = Req:get(socket),
case Req:should_close() of
true ->
- gen_tcp:close(Socket),
+ mochiweb_socket:close(Socket),
exit(normal);
false ->
Req:cleanup(),
?MODULE:loop(Socket, Body)
end.
+
+parse_range_request("bytes=0-") ->
+ undefined;
+parse_range_request(RawRange) when is_list(RawRange) ->
+ try
+ "bytes=" ++ RangeString = RawRange,
+ Ranges = string:tokens(RangeString, ","),
+ lists:map(fun ("-" ++ V) ->
+ {none, list_to_integer(V)};
+ (R) ->
+ case string:tokens(R, "-") of
+ [S1, S2] ->
+ {list_to_integer(S1), list_to_integer(S2)};
+ [S] ->
+ {list_to_integer(S), none}
+ end
+ end,
+ Ranges)
+ catch
+ _:_ ->
+ fail
+ end.
+
+range_skip_length(Spec, Size) ->
+ case Spec of
+ {none, R} when R =< Size, R >= 0 ->
+ {Size - R, R};
+ {none, _OutOfRange} ->
+ {0, Size};
+ {R, none} when R >= 0, R < Size ->
+ {R, Size - R};
+ {_OutOfRange, none} ->
+ invalid_range;
+ {Start, End} when 0 =< Start, Start =< End, End < Size ->
+ {Start, End - Start + 1};
+ {_OutOfRange, _End} ->
+ invalid_range
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+range_test() ->
+ %% valid, single ranges
+ ?assertEqual([{20, 30}], parse_range_request("bytes=20-30")),
+ ?assertEqual([{20, none}], parse_range_request("bytes=20-")),
+ ?assertEqual([{none, 20}], parse_range_request("bytes=-20")),
+
+ %% trivial single range
+ ?assertEqual(undefined, parse_range_request("bytes=0-")),
+
+ %% invalid, single ranges
+ ?assertEqual(fail, parse_range_request("")),
+ ?assertEqual(fail, parse_range_request("garbage")),
+ ?assertEqual(fail, parse_range_request("bytes=-20-30")),
+
+ %% valid, multiple range
+ ?assertEqual(
+ [{20, 30}, {50, 100}, {110, 200}],
+ parse_range_request("bytes=20-30,50-100,110-200")),
+ ?assertEqual(
+ [{20, none}, {50, 100}, {none, 200}],
+ parse_range_request("bytes=20-,50-100,-200")),
+
+ %% no ranges
+ ?assertEqual([], parse_range_request("bytes=")),
+ ok.
+
+range_skip_length_test() ->
+ Body = <<"012345678901234567890123456789012345678901234567890123456789">>,
+ BodySize = byte_size(Body), %% 60
+ BodySize = 60,
+
+ %% these values assume BodySize =:= 60
+ ?assertEqual({1,9}, range_skip_length({1,9}, BodySize)), %% 1-9
+ ?assertEqual({10,10}, range_skip_length({10,19}, BodySize)), %% 10-19
+ ?assertEqual({40, 20}, range_skip_length({none, 20}, BodySize)), %% -20
+ ?assertEqual({30, 30}, range_skip_length({30, none}, BodySize)), %% 30-
+
+ %% valid edge cases for range_skip_length
+ ?assertEqual({BodySize, 0}, range_skip_length({none, 0}, BodySize)),
+ ?assertEqual({0, BodySize}, range_skip_length({none, BodySize}, BodySize)),
+ ?assertEqual({0, BodySize}, range_skip_length({0, none}, BodySize)),
+ BodySizeLess1 = BodySize - 1,
+ ?assertEqual({BodySizeLess1, 1},
+ range_skip_length({BodySize - 1, none}, BodySize)),
+
+ %% out of range, return whole thing
+ ?assertEqual({0, BodySize},
+ range_skip_length({none, BodySize + 1}, BodySize)),
+ ?assertEqual({0, BodySize},
+ range_skip_length({none, -1}, BodySize)),
+
+ %% invalid ranges
+ ?assertEqual(invalid_range,
+ range_skip_length({-1, 30}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({0, BodySize + 1}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({-1, BodySize + 1}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({BodySize, 40}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({-1, none}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({BodySize, none}, BodySize)),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_io.erl b/src/mochiweb/mochiweb_io.erl
new file mode 100644
index 00000000..6ce57ec8
--- /dev/null
+++ b/src/mochiweb/mochiweb_io.erl
@@ -0,0 +1,46 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Utilities for dealing with IO devices (open files).
+
+-module(mochiweb_io).
+-author('bob@mochimedia.com').
+
+-export([iodevice_stream/3, iodevice_stream/2]).
+-export([iodevice_foldl/4, iodevice_foldl/3]).
+-export([iodevice_size/1]).
+-define(READ_SIZE, 8192).
+
+iodevice_foldl(F, Acc, IoDevice) ->
+ iodevice_foldl(F, Acc, IoDevice, ?READ_SIZE).
+
+iodevice_foldl(F, Acc, IoDevice, BufferSize) ->
+ case file:read(IoDevice, BufferSize) of
+ eof ->
+ Acc;
+ {ok, Data} ->
+ iodevice_foldl(F, F(Data, Acc), IoDevice, BufferSize)
+ end.
+
+iodevice_stream(Callback, IoDevice) ->
+ iodevice_stream(Callback, IoDevice, ?READ_SIZE).
+
+iodevice_stream(Callback, IoDevice, BufferSize) ->
+ F = fun (Data, ok) -> Callback(Data) end,
+ ok = iodevice_foldl(F, ok, IoDevice, BufferSize).
+
+iodevice_size(IoDevice) ->
+ {ok, Size} = file:position(IoDevice, eof),
+ {ok, 0} = file:position(IoDevice, bof),
+ Size.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+
+
+-endif.
diff --git a/src/mochiweb/mochiweb_mime.erl b/src/mochiweb/mochiweb_mime.erl
new file mode 100644
index 00000000..5344aee7
--- /dev/null
+++ b/src/mochiweb/mochiweb_mime.erl
@@ -0,0 +1,94 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Gives a good MIME type guess based on file extension.
+
+-module(mochiweb_mime).
+-author('bob@mochimedia.com').
+-export([from_extension/1]).
+
+%% @spec from_extension(S::string()) -> string() | undefined
+%% @doc Given a filename extension (e.g. ".html") return a guess for the MIME
+%% type such as "text/html". Will return the atom undefined if no good
+%% guess is available.
+from_extension(".html") ->
+ "text/html";
+from_extension(".xhtml") ->
+ "application/xhtml+xml";
+from_extension(".xml") ->
+ "application/xml";
+from_extension(".css") ->
+ "text/css";
+from_extension(".js") ->
+ "application/x-javascript";
+from_extension(".jpg") ->
+ "image/jpeg";
+from_extension(".gif") ->
+ "image/gif";
+from_extension(".png") ->
+ "image/png";
+from_extension(".swf") ->
+ "application/x-shockwave-flash";
+from_extension(".zip") ->
+ "application/zip";
+from_extension(".bz2") ->
+ "application/x-bzip2";
+from_extension(".gz") ->
+ "application/x-gzip";
+from_extension(".tar") ->
+ "application/x-tar";
+from_extension(".tgz") ->
+ "application/x-gzip";
+from_extension(".txt") ->
+ "text/plain";
+from_extension(".doc") ->
+ "application/msword";
+from_extension(".pdf") ->
+ "application/pdf";
+from_extension(".xls") ->
+ "application/vnd.ms-excel";
+from_extension(".rtf") ->
+ "application/rtf";
+from_extension(".mov") ->
+ "video/quicktime";
+from_extension(".mp3") ->
+ "audio/mpeg";
+from_extension(".z") ->
+ "application/x-compress";
+from_extension(".wav") ->
+ "audio/x-wav";
+from_extension(".ico") ->
+ "image/x-icon";
+from_extension(".bmp") ->
+ "image/bmp";
+from_extension(".m4a") ->
+ "audio/mpeg";
+from_extension(".m3u") ->
+ "audio/x-mpegurl";
+from_extension(".exe") ->
+ "application/octet-stream";
+from_extension(".csv") ->
+ "text/csv";
+from_extension(_) ->
+ undefined.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+exhaustive_from_extension_test() ->
+ T = mochiweb_cover:clause_lookup_table(?MODULE, from_extension),
+ [?assertEqual(V, from_extension(K)) || {K, V} <- T].
+
+from_extension_test() ->
+ ?assertEqual("text/html",
+ from_extension(".html")),
+ ?assertEqual(undefined,
+ from_extension("")),
+ ?assertEqual(undefined,
+ from_extension(".wtf")),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_multipart.erl b/src/mochiweb/mochiweb_multipart.erl
index 0368a9a6..3069cf4d 100644
--- a/src/mochiweb/mochiweb_multipart.erl
+++ b/src/mochiweb/mochiweb_multipart.erl
@@ -8,17 +8,73 @@
-export([parse_form/1, parse_form/2]).
-export([parse_multipart_request/2]).
--export([test/0]).
+-export([parts_to_body/3, parts_to_multipart_body/4]).
+-export([default_file_handler/2]).
-define(CHUNKSIZE, 4096).
-record(mp, {state, boundary, length, buffer, callback, req}).
%% TODO: DOCUMENT THIS MODULE.
-
+%% @type key() = atom() | string() | binary().
+%% @type value() = atom() | iolist() | integer().
+%% @type header() = {key(), value()}.
+%% @type bodypart() = {Start::integer(), End::integer(), Body::iolist()}.
+%% @type formfile() = {Name::string(), ContentType::string(), Content::binary()}.
+%% @type request().
+%% @type file_handler() = (Filename::string(), ContentType::string()) -> file_handler_callback().
+%% @type file_handler_callback() = (binary() | eof) -> file_handler_callback() | term().
+
+%% @spec parts_to_body([bodypart()], ContentType::string(),
+%% Size::integer()) -> {[header()], iolist()}
+%% @doc Return {[header()], iolist()} representing the body for the given
+%% parts, may be a single part or multipart.
+parts_to_body([{Start, End, Body}], ContentType, Size) ->
+ HeaderList = [{"Content-Type", ContentType},
+ {"Content-Range",
+ ["bytes ",
+ mochiweb_util:make_io(Start), "-", mochiweb_util:make_io(End),
+ "/", mochiweb_util:make_io(Size)]}],
+ {HeaderList, Body};
+parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
+ parts_to_multipart_body(BodyList, ContentType, Size,
+ mochihex:to_hex(crypto:rand_bytes(8))).
+
+%% @spec parts_to_multipart_body([bodypart()], ContentType::string(),
+%% Size::integer(), Boundary::string()) ->
+%% {[header()], iolist()}
+%% @doc Return {[header()], iolist()} representing the body for the given
+%% parts, always a multipart response.
+parts_to_multipart_body(BodyList, ContentType, Size, Boundary) ->
+ HeaderList = [{"Content-Type",
+ ["multipart/byteranges; ",
+ "boundary=", Boundary]}],
+ MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
+
+ {HeaderList, MultiPartBody}.
+
+%% @spec multipart_body([bodypart()], ContentType::string(),
+%% Boundary::string(), Size::integer()) -> iolist()
+%% @doc Return the representation of a multipart body for the given [bodypart()].
+multipart_body([], _ContentType, Boundary, _Size) ->
+ ["--", Boundary, "--\r\n"];
+multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
+ ["--", Boundary, "\r\n",
+ "Content-Type: ", ContentType, "\r\n",
+ "Content-Range: ",
+ "bytes ", mochiweb_util:make_io(Start), "-", mochiweb_util:make_io(End),
+ "/", mochiweb_util:make_io(Size), "\r\n\r\n",
+ Body, "\r\n"
+ | multipart_body(BodyList, ContentType, Boundary, Size)].
+
+%% @spec parse_form(request()) -> [{string(), string() | formfile()}]
+%% @doc Parse a multipart form from the given request using the in-memory
+%% default_file_handler/2.
parse_form(Req) ->
parse_form(Req, fun default_file_handler/2).
+%% @spec parse_form(request(), F::file_handler()) -> [{string(), string() | term()}]
+%% @doc Parse a multipart form from the given request using the given file_handler().
parse_form(Req, FileHandler) ->
Callback = fun (Next) -> parse_form_outer(Next, FileHandler, []) end,
{_, _, Res} = parse_multipart_request(Req, Callback),
@@ -236,13 +292,38 @@ find_boundary(Prefix, Data) ->
not_found
end.
-with_socket_server(ServerFun, ClientFun) ->
- {ok, Server} = mochiweb_socket_server:start([{ip, "127.0.0.1"},
- {port, 0},
- {loop, ServerFun}]),
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+ssl_cert_opts() ->
+ EbinDir = filename:dirname(code:which(?MODULE)),
+ CertDir = filename:join([EbinDir, "..", "support", "test-materials"]),
+ CertFile = filename:join(CertDir, "test_ssl_cert.pem"),
+ KeyFile = filename:join(CertDir, "test_ssl_key.pem"),
+ [{certfile, CertFile}, {keyfile, KeyFile}].
+
+with_socket_server(Transport, ServerFun, ClientFun) ->
+ ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}],
+ ServerOpts = case Transport of
+ plain ->
+ ServerOpts0;
+ ssl ->
+ ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}]
+ end,
+ {ok, Server} = mochiweb_socket_server:start(ServerOpts),
Port = mochiweb_socket_server:get(Server, port),
- {ok, Client} = gen_tcp:connect("127.0.0.1", Port,
- [binary, {active, false}]),
+ ClientOpts = [binary, {active, false}],
+ {ok, Client} = case Transport of
+ plain ->
+ gen_tcp:connect("127.0.0.1", Port, ClientOpts);
+ ssl ->
+ ClientOpts1 = [{ssl_imp, new} | ClientOpts],
+ {ok, SslSocket} = ssl:connect("127.0.0.1", Port, ClientOpts1),
+ {ok, {ssl, SslSocket}}
+ end,
Res = (catch ClientFun(Client)),
mochiweb_socket_server:stop(Server),
Res.
@@ -256,19 +337,30 @@ fake_request(Socket, ContentType, Length) ->
[{"content-type", ContentType},
{"content-length", Length}])).
-test_callback(Expect, [Expect | Rest]) ->
+test_callback({body, <<>>}, Rest=[body_end | _]) ->
+ %% When expecting the body_end we might get an empty binary
+ fun (Next) -> test_callback(Next, Rest) end;
+test_callback({body, Got}, [{body, Expect} | Rest]) when Got =/= Expect ->
+ %% Partial response
+ GotSize = size(Got),
+ <<Got:GotSize/binary, Expect1/binary>> = Expect,
+ fun (Next) -> test_callback(Next, [{body, Expect1} | Rest]) end;
+test_callback(Got, [Expect | Rest]) ->
+ ?assertEqual(Got, Expect),
case Rest of
[] ->
ok;
_ ->
fun (Next) -> test_callback(Next, Rest) end
- end;
-test_callback({body, Got}, [{body, Expect} | Rest]) ->
- GotSize = size(Got),
- <<Got:GotSize/binary, Expect1/binary>> = Expect,
- fun (Next) -> test_callback(Next, [{body, Expect1} | Rest]) end.
+ end.
-test_parse3() ->
+parse3_http_test() ->
+ parse3(plain).
+
+parse3_https_test() ->
+ parse3(ssl).
+
+parse3(Transport) ->
ContentType = "multipart/form-data; boundary=---------------------------7386909285754635891697677882",
BinContent = <<"-----------------------------7386909285754635891697677882\r\nContent-Disposition: form-data; name=\"hidden\"\r\n\r\nmultipart message\r\n-----------------------------7386909285754635891697677882\r\nContent-Disposition: form-data; name=\"file\"; filename=\"test_file.txt\"\r\nContent-Type: text/plain\r\n\r\nWoo multiline text file\n\nLa la la\r\n-----------------------------7386909285754635891697677882--\r\n">>,
Expect = [{headers,
@@ -285,8 +377,8 @@ test_parse3() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -295,11 +387,16 @@ test_parse3() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
+parse2_http_test() ->
+ parse2(plain).
+
+parse2_https_test() ->
+ parse2(ssl).
-test_parse2() ->
+parse2(Transport) ->
ContentType = "multipart/form-data; boundary=---------------------------6072231407570234361599764024",
BinContent = <<"-----------------------------6072231407570234361599764024\r\nContent-Disposition: form-data; name=\"hidden\"\r\n\r\nmultipart message\r\n-----------------------------6072231407570234361599764024\r\nContent-Disposition: form-data; name=\"file\"; filename=\"\"\r\nContent-Type: application/octet-stream\r\n\r\n\r\n-----------------------------6072231407570234361599764024--\r\n">>,
Expect = [{headers,
@@ -316,8 +413,8 @@ test_parse2() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -326,10 +423,16 @@ test_parse2() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_parse_form() ->
+parse_form_http_test() ->
+ do_parse_form(plain).
+
+parse_form_https_test() ->
+ do_parse_form(ssl).
+
+do_parse_form(Transport) ->
ContentType = "multipart/form-data; boundary=AaB03x",
"AaB03x" = get_boundary(ContentType),
Content = mochiweb_util:join(
@@ -347,8 +450,8 @@ test_parse_form() ->
""], "\r\n"),
BinContent = iolist_to_binary(Content),
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -360,10 +463,16 @@ test_parse_form() ->
}] = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_parse() ->
+parse_http_test() ->
+ do_parse(plain).
+
+parse_https_test() ->
+ do_parse(ssl).
+
+do_parse(Transport) ->
ContentType = "multipart/form-data; boundary=AaB03x",
"AaB03x" = get_boundary(ContentType),
Content = mochiweb_util:join(
@@ -394,8 +503,113 @@ test_parse() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
+ end,
+ ClientFun = fun (Socket) ->
+ Req = fake_request(Socket, ContentType,
+ byte_size(BinContent)),
+ Res = parse_multipart_request(Req, TestCallback),
+ {0, <<>>, ok} = Res,
+ ok
+ end,
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
+ ok.
+
+parse_partial_body_boundary_http_test() ->
+ parse_partial_body_boundary(plain).
+
+parse_partial_body_boundary_https_test() ->
+ parse_partial_body_boundary(ssl).
+
+parse_partial_body_boundary(Transport) ->
+ Boundary = string:copies("$", 2048),
+ ContentType = "multipart/form-data; boundary=" ++ Boundary,
+ ?assertEqual(Boundary, get_boundary(ContentType)),
+ Content = mochiweb_util:join(
+ ["--" ++ Boundary,
+ "Content-Disposition: form-data; name=\"submit-name\"",
+ "",
+ "Larry",
+ "--" ++ Boundary,
+ "Content-Disposition: form-data; name=\"files\";"
+ ++ "filename=\"file1.txt\"",
+ "Content-Type: text/plain",
+ "",
+ "... contents of file1.txt ...",
+ "--" ++ Boundary ++ "--",
+ ""], "\r\n"),
+ BinContent = iolist_to_binary(Content),
+ Expect = [{headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "submit-name"}]}}]},
+ {body, <<"Larry">>},
+ body_end,
+ {headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "files"}, {"filename", "file1.txt"}]}},
+ {"content-type", {"text/plain", []}}
+ ]},
+ {body, <<"... contents of file1.txt ...">>},
+ body_end,
+ eof],
+ TestCallback = fun (Next) -> test_callback(Next, Expect) end,
+ ServerFun = fun (Socket) ->
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
+ end,
+ ClientFun = fun (Socket) ->
+ Req = fake_request(Socket, ContentType,
+ byte_size(BinContent)),
+ Res = parse_multipart_request(Req, TestCallback),
+ {0, <<>>, ok} = Res,
+ ok
+ end,
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
+ ok.
+
+parse_large_header_http_test() ->
+ parse_large_header(plain).
+
+parse_large_header_https_test() ->
+ parse_large_header(ssl).
+
+parse_large_header(Transport) ->
+ ContentType = "multipart/form-data; boundary=AaB03x",
+ "AaB03x" = get_boundary(ContentType),
+ Content = mochiweb_util:join(
+ ["--AaB03x",
+ "Content-Disposition: form-data; name=\"submit-name\"",
+ "",
+ "Larry",
+ "--AaB03x",
+ "Content-Disposition: form-data; name=\"files\";"
+ ++ "filename=\"file1.txt\"",
+ "Content-Type: text/plain",
+ "x-large-header: " ++ string:copies("%", 4096),
+ "",
+ "... contents of file1.txt ...",
+ "--AaB03x--",
+ ""], "\r\n"),
+ BinContent = iolist_to_binary(Content),
+ Expect = [{headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "submit-name"}]}}]},
+ {body, <<"Larry">>},
+ body_end,
+ {headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "files"}, {"filename", "file1.txt"}]}},
+ {"content-type", {"text/plain", []}},
+ {"x-large-header", {string:copies("%", 4096), []}}
+ ]},
+ {body, <<"... contents of file1.txt ...">>},
+ body_end,
+ eof],
+ TestCallback = fun (Next) -> test_callback(Next, Expect) end,
+ ServerFun = fun (Socket) ->
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -404,10 +618,10 @@ test_parse() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_find_boundary() ->
+find_boundary_test() ->
B = <<"\r\n--X">>,
{next_boundary, 0, 7} = find_boundary(B, <<"\r\n--X\r\nRest">>),
{next_boundary, 1, 7} = find_boundary(B, <<"!\r\n--X\r\nRest">>),
@@ -422,9 +636,10 @@ test_find_boundary() ->
45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,
49,54,48,51,55,52,53,52,51,53,49>>,
{maybe, 30} = find_boundary(P, B0),
+ not_found = find_boundary(B, <<"\r\n--XJOPKE">>),
ok.
-test_find_in_binary() ->
+find_in_binary_test() ->
{exact, 0} = find_in_binary(<<"foo">>, <<"foobarbaz">>),
{exact, 1} = find_in_binary(<<"oo">>, <<"foobarbaz">>),
{exact, 8} = find_in_binary(<<"z">>, <<"foobarbaz">>),
@@ -435,7 +650,13 @@ test_find_in_binary() ->
{partial, 1, 3} = find_in_binary(<<"foobar">>, <<"afoo">>),
ok.
-test_flash_parse() ->
+flash_parse_http_test() ->
+ flash_parse(plain).
+
+flash_parse_https_test() ->
+ flash_parse(ssl).
+
+flash_parse(Transport) ->
ContentType = "multipart/form-data; boundary=----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5",
"----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5" = get_boundary(ContentType),
BinContent = <<"------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Filename\"\r\n\r\nhello.txt\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"success_action_status\"\r\n\r\n201\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"file\"; filename=\"hello.txt\"\r\nContent-Type: application/octet-stream\r\n\r\nhello\n\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Upload\"\r\n\r\nSubmit Query\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5--">>,
@@ -463,8 +684,8 @@ test_flash_parse() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -473,10 +694,16 @@ test_flash_parse() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_flash_parse2() ->
+flash_parse2_http_test() ->
+ flash_parse2(plain).
+
+flash_parse2_https_test() ->
+ flash_parse2(ssl).
+
+flash_parse2(Transport) ->
ContentType = "multipart/form-data; boundary=----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5",
"----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5" = get_boundary(ContentType),
Chunk = iolist_to_binary(string:copies("%", 4096)),
@@ -505,8 +732,8 @@ test_flash_parse2() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -515,16 +742,83 @@ test_flash_parse2() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
+ ok.
+
+parse_headers_test() ->
+ ?assertEqual([], parse_headers(<<>>)).
+
+flash_multipart_hack_test() ->
+ Buffer = <<"prefix-">>,
+ Prefix = <<"prefix">>,
+ State = #mp{length=0, buffer=Buffer, boundary=Prefix},
+ ?assertEqual(State,
+ flash_multipart_hack(State)).
+
+parts_to_body_single_test() ->
+ {HL, B} = parts_to_body([{0, 5, <<"01234">>}],
+ "text/plain",
+ 10),
+ [{"Content-Range", Range},
+ {"Content-Type", Type}] = lists:sort(HL),
+ ?assertEqual(
+ <<"bytes 0-5/10">>,
+ iolist_to_binary(Range)),
+ ?assertEqual(
+ <<"text/plain">>,
+ iolist_to_binary(Type)),
+ ?assertEqual(
+ <<"01234">>,
+ iolist_to_binary(B)),
+ ok.
+
+parts_to_body_multi_test() ->
+ {[{"Content-Type", Type}],
+ _B} = parts_to_body([{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ 10),
+ ?assertMatch(
+ <<"multipart/byteranges; boundary=", _/binary>>,
+ iolist_to_binary(Type)),
ok.
-test() ->
- test_find_in_binary(),
- test_find_boundary(),
- test_parse(),
- test_parse2(),
- test_parse3(),
- test_parse_form(),
- test_flash_parse(),
- test_flash_parse2(),
+parts_to_multipart_body_test() ->
+ {[{"Content-Type", V}], B} = parts_to_multipart_body(
+ [{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ 10,
+ "BOUNDARY"),
+ MB = multipart_body(
+ [{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ "BOUNDARY",
+ 10),
+ ?assertEqual(
+ <<"multipart/byteranges; boundary=BOUNDARY">>,
+ iolist_to_binary(V)),
+ ?assertEqual(
+ iolist_to_binary(MB),
+ iolist_to_binary(B)),
ok.
+
+multipart_body_test() ->
+ ?assertEqual(
+ <<"--BOUNDARY--\r\n">>,
+ iolist_to_binary(multipart_body([], "text/plain", "BOUNDARY", 0))),
+ ?assertEqual(
+ <<"--BOUNDARY\r\n"
+ "Content-Type: text/plain\r\n"
+ "Content-Range: bytes 0-5/10\r\n\r\n"
+ "01234\r\n"
+ "--BOUNDARY\r\n"
+ "Content-Type: text/plain\r\n"
+ "Content-Range: bytes 5-10/10\r\n\r\n"
+ "56789\r\n"
+ "--BOUNDARY--\r\n">>,
+ iolist_to_binary(multipart_body([{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ "BOUNDARY",
+ 10))),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_request.erl b/src/mochiweb/mochiweb_request.erl
index 5d7af26b..1cf96160 100644
--- a/src/mochiweb/mochiweb_request.erl
+++ b/src/mochiweb/mochiweb_request.erl
@@ -7,9 +7,9 @@
-author('bob@mochimedia.com').
-include_lib("kernel/include/file.hrl").
+-include("internal.hrl").
-define(QUIP, "Any of you quaids got a smint?").
--define(READ_SIZE, 8192).
-export([get_header_value/1, get_primary_header_value/1, get/1, dump/0]).
-export([send/1, recv/1, recv/2, recv_body/0, recv_body/1, stream_body/3]).
@@ -21,7 +21,6 @@
-export([parse_cookie/0, get_cookie_value/1]).
-export([serve_file/2, serve_file/3]).
-export([accepted_encodings/1]).
--export([test/0]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
@@ -40,8 +39,8 @@
%% @type response(). A mochiweb_response parameterized module instance.
%% @type ioheaders() = headers() | [{key(), value()}].
-% 5 minute default idle timeout
--define(IDLE_TIMEOUT, 300000).
+% 10 second default idle timeout
+-define(IDLE_TIMEOUT, 10000).
% Maximum recv_body() length of 1MB
-define(MAX_RECV_BODY, (1024*1024)).
@@ -54,12 +53,23 @@ get_header_value(K) ->
get_primary_header_value(K) ->
mochiweb_headers:get_primary_value(K, Headers).
-%% @type field() = socket | method | raw_path | version | headers | peer | path | body_length | range
+%% @type field() = socket | scheme | method | raw_path | version | headers | peer | path | body_length | range
%% @spec get(field()) -> term()
-%% @doc Return the internal representation of the given field.
+%% @doc Return the internal representation of the given field. If
+%% <code>socket</code> is requested on a HTTPS connection, then
+%% an ssl socket will be returned as <code>{ssl, SslSocket}</code>.
+%% You can use <code>SslSocket</code> with the <code>ssl</code>
+%% application, eg: <code>ssl:peercert(SslSocket)</code>.
get(socket) ->
Socket;
+get(scheme) ->
+ case mochiweb_socket:type(Socket) of
+ plain ->
+ http;
+ ssl ->
+ https
+ end;
get(method) ->
Method;
get(raw_path) ->
@@ -69,7 +79,7 @@ get(version) ->
get(headers) ->
Headers;
get(peer) ->
- case inet:peername(Socket) of
+ case mochiweb_socket:peername(Socket) of
{ok, {Addr={10, _, _, _}, _Port}} ->
case get_header_value("x-forwarded-for") of
undefined ->
@@ -85,7 +95,9 @@ get(peer) ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {Addr, _Port}} ->
- inet_parse:ntoa(Addr)
+ inet_parse:ntoa(Addr);
+ {error, enotconn} ->
+ exit(normal)
end;
get(path) ->
case erlang:get(?SAVE_PATH) of
@@ -98,13 +110,20 @@ get(path) ->
Cached
end;
get(body_length) ->
- erlang:get(?SAVE_BODY_LENGTH);
+ case erlang:get(?SAVE_BODY_LENGTH) of
+ undefined ->
+ BodyLength = body_length(),
+ put(?SAVE_BODY_LENGTH, {cached, BodyLength}),
+ BodyLength;
+ {cached, Cached} ->
+ Cached
+ end;
get(range) ->
case get_header_value(range) of
undefined ->
undefined;
RawRange ->
- parse_range_request(RawRange)
+ mochiweb_http:parse_range_request(RawRange)
end.
%% @spec dump() -> {mochiweb_request, [{atom(), term()}]}
@@ -119,7 +138,7 @@ dump() ->
%% @spec send(iodata()) -> ok
%% @doc Send data over the socket.
send(Data) ->
- case gen_tcp:send(Socket, Data) of
+ case mochiweb_socket:send(Socket, Data) of
ok ->
ok;
_ ->
@@ -136,7 +155,7 @@ recv(Length) ->
%% @doc Receive Length bytes from the client as a binary, with the given
%% Timeout in msec.
recv(Length, Timeout) ->
- case gen_tcp:recv(Socket, Length, Timeout) of
+ case mochiweb_socket:recv(Socket, Length, Timeout) of
{ok, Data} ->
put(?SAVE_RECV, true),
Data;
@@ -172,20 +191,24 @@ recv_body() ->
%% @doc Receive the body of the HTTP request (defined by Content-Length).
%% Will receive up to MaxBody bytes.
recv_body(MaxBody) ->
- % we could use a sane constant for max chunk size
- Body = stream_body(?MAX_RECV_BODY, fun
- ({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
- iolist_to_binary(lists:reverse(BinAcc));
- ({Length, Bin}, {LengthAcc, BinAcc}) ->
- NewLength = Length + LengthAcc,
- if NewLength > MaxBody ->
- exit({body_too_large, chunked});
- true ->
- {NewLength, [Bin | BinAcc]}
- end
- end, {0, []}, MaxBody),
- put(?SAVE_BODY, Body),
- Body.
+ case erlang:get(?SAVE_BODY) of
+ undefined ->
+ % we could use a sane constant for max chunk size
+ Body = stream_body(?MAX_RECV_BODY, fun
+ ({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
+ iolist_to_binary(lists:reverse(BinAcc));
+ ({Length, Bin}, {LengthAcc, BinAcc}) ->
+ NewLength = Length + LengthAcc,
+ if NewLength > MaxBody ->
+ exit({body_too_large, chunked});
+ true ->
+ {NewLength, [Bin | BinAcc]}
+ end
+ end, {0, []}, MaxBody),
+ put(?SAVE_BODY, Body),
+ Body;
+ Cached -> Cached
+ end.
stream_body(MaxChunkSize, ChunkFun, FunState) ->
stream_body(MaxChunkSize, ChunkFun, FunState, undefined).
@@ -242,7 +265,7 @@ start_response({Code, ResponseHeaders}) ->
%% ResponseHeaders.
start_raw_response({Code, ResponseHeaders}) ->
F = fun ({K, V}, Acc) ->
- [make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
+ [mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
end,
End = lists:foldl(F, [<<"\r\n">>],
mochiweb_headers:to_list(ResponseHeaders)),
@@ -266,13 +289,13 @@ start_response_length({Code, ResponseHeaders, Length}) ->
%% will be set by the Body length, and the server will insert header
%% defaults.
respond({Code, ResponseHeaders, {file, IoDevice}}) ->
- Length = iodevice_size(IoDevice),
+ Length = mochiweb_io:iodevice_size(IoDevice),
Response = start_response_length({Code, ResponseHeaders, Length}),
case Method of
'HEAD' ->
ok;
_ ->
- iodevice_stream(IoDevice)
+ mochiweb_io:iodevice_stream(fun send/1, IoDevice)
end,
Response;
respond({Code, ResponseHeaders, chunked}) ->
@@ -327,8 +350,12 @@ ok({ContentType, Body}) ->
ok({ContentType, ResponseHeaders, Body}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
- X when X =:= undefined; X =:= fail ->
- HResponse1 = mochiweb_headers:enter("Content-Type", ContentType, HResponse),
+ X when (X =:= undefined orelse X =:= fail) orelse Body =:= chunked ->
+ %% http://code.google.com/p/mochiweb/issues/detail?id=54
+ %% Range header not supported when chunked, return 200 and provide
+ %% full response.
+ HResponse1 = mochiweb_headers:enter("Content-Type", ContentType,
+ HResponse),
respond({200, HResponse1, Body});
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
@@ -341,7 +368,7 @@ ok({ContentType, ResponseHeaders, Body}) ->
respond({200, HResponse1, Body});
PartList ->
{RangeHeaders, RangeBody} =
- parts_to_body(PartList, ContentType, Size),
+ mochiweb_multipart:parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
@@ -458,26 +485,23 @@ stream_chunked_body(MaxChunkSize, Fun, FunState) ->
stream_unchunked_body(0, Fun, FunState) ->
Fun({0, <<>>}, FunState);
stream_unchunked_body(Length, Fun, FunState) when Length > 0 ->
- Bin = recv(0),
- BinSize = byte_size(Bin),
- if BinSize > Length ->
- <<OurBody:Length/binary, Extra/binary>> = Bin,
- gen_tcp:unrecv(Socket, Extra),
- NewState = Fun({Length, OurBody}, FunState),
- stream_unchunked_body(0, Fun, NewState);
- true ->
- NewState = Fun({BinSize, Bin}, FunState),
- stream_unchunked_body(Length - BinSize, Fun, NewState)
- end.
-
+ PktSize = case Length > ?RECBUF_SIZE of
+ true ->
+ ?RECBUF_SIZE;
+ false ->
+ Length
+ end,
+ Bin = recv(PktSize),
+ NewState = Fun({PktSize, Bin}, FunState),
+ stream_unchunked_body(Length - PktSize, Fun, NewState).
%% @spec read_chunk_length() -> integer()
%% @doc Read the length of the next HTTP chunk.
read_chunk_length() ->
- inet:setopts(Socket, [{packet, line}]),
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ mochiweb_socket:setopts(Socket, [{packet, line}]),
+ case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, Header} ->
- inet:setopts(Socket, [{packet, raw}]),
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
Splitter = fun (C) ->
C =/= $\r andalso C =/= $\n andalso C =/= $
end,
@@ -491,9 +515,9 @@ read_chunk_length() ->
%% @doc Read in a HTTP chunk of the given length. If Length is 0, then read the
%% HTTP footers (as a list of binaries, since they're nominal).
read_chunk(0) ->
- inet:setopts(Socket, [{packet, line}]),
+ mochiweb_socket:setopts(Socket, [{packet, line}]),
F = fun (F1, Acc) ->
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, <<"\r\n">>} ->
Acc;
{ok, Footer} ->
@@ -503,10 +527,11 @@ read_chunk(0) ->
end
end,
Footers = F(F, []),
- inet:setopts(Socket, [{packet, raw}]),
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ put(?SAVE_RECV, true),
Footers;
read_chunk(Length) ->
- case gen_tcp:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
{ok, <<Chunk:Length/binary, "\r\n">>} ->
Chunk;
_ ->
@@ -601,13 +626,6 @@ server_headers() ->
[{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
{"Date", httpd_util:rfc1123_date()}].
-make_io(Atom) when is_atom(Atom) ->
- atom_to_list(Atom);
-make_io(Integer) when is_integer(Integer) ->
- integer_to_list(Integer);
-make_io(Io) when is_list(Io); is_binary(Io) ->
- Io.
-
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
@@ -618,56 +636,10 @@ make_version({1, 0}) ->
make_version(_) ->
<<"HTTP/1.1 ">>.
-iodevice_stream(IoDevice) ->
- case file:read(IoDevice, ?READ_SIZE) of
- eof ->
- ok;
- {ok, Data} ->
- ok = send(Data),
- iodevice_stream(IoDevice)
- end.
-
-
-parts_to_body([{Start, End, Body}], ContentType, Size) ->
- %% return body for a range reponse with a single body
- HeaderList = [{"Content-Type", ContentType},
- {"Content-Range",
- ["bytes ",
- make_io(Start), "-", make_io(End),
- "/", make_io(Size)]}],
- {HeaderList, Body};
-parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
- %% return
- %% header Content-Type: multipart/byteranges; boundary=441934886133bdee4
- %% and multipart body
- Boundary = mochihex:to_hex(crypto:rand_bytes(8)),
- HeaderList = [{"Content-Type",
- ["multipart/byteranges; ",
- "boundary=", Boundary]}],
- MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
-
- {HeaderList, MultiPartBody}.
-
-multipart_body([], _ContentType, Boundary, _Size) ->
- ["--", Boundary, "--\r\n"];
-multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
- ["--", Boundary, "\r\n",
- "Content-Type: ", ContentType, "\r\n",
- "Content-Range: ",
- "bytes ", make_io(Start), "-", make_io(End),
- "/", make_io(Size), "\r\n\r\n",
- Body, "\r\n"
- | multipart_body(BodyList, ContentType, Boundary, Size)].
-
-iodevice_size(IoDevice) ->
- {ok, Size} = file:position(IoDevice, eof),
- {ok, 0} = file:position(IoDevice, bof),
- Size.
-
range_parts({file, IoDevice}, Ranges) ->
- Size = iodevice_size(IoDevice),
+ Size = mochiweb_io:iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
- case range_skip_length(Spec, Size) of
+ case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
@@ -685,7 +657,7 @@ range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
- case range_skip_length(Spec, Size) of
+ case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
@@ -695,45 +667,8 @@ range_parts(Body0, Ranges) ->
end,
{lists:foldr(F, [], Ranges), Size}.
-range_skip_length(Spec, Size) ->
- case Spec of
- {none, R} when R =< Size, R >= 0 ->
- {Size - R, R};
- {none, _OutOfRange} ->
- {0, Size};
- {R, none} when R >= 0, R < Size ->
- {R, Size - R};
- {_OutOfRange, none} ->
- invalid_range;
- {Start, End} when 0 =< Start, Start =< End, End < Size ->
- {Start, End - Start + 1};
- {_OutOfRange, _End} ->
- invalid_range
- end.
-
-parse_range_request(RawRange) when is_list(RawRange) ->
- try
- "bytes=" ++ RangeString = RawRange,
- Ranges = string:tokens(RangeString, ","),
- lists:map(fun ("-" ++ V) ->
- {none, list_to_integer(V)};
- (R) ->
- case string:tokens(R, "-") of
- [S1, S2] ->
- {list_to_integer(S1), list_to_integer(S2)};
- [S] ->
- {list_to_integer(S), none}
- end
- end,
- Ranges)
- catch
- _:_ ->
- fail
- end.
-
-%% @spec accepted_encodings([encoding()]) -> [encoding()] | error()
-%% @type encoding() -> string()
-%% @type error() -> bad_accept_encoding_value
+%% @spec accepted_encodings([encoding()]) -> [encoding()] | bad_accept_encoding_value
+%% @type encoding() = string().
%%
%% @doc Returns a list of encodings accepted by a request. Encodings that are
%% not supported by the server will not be included in the return list.
@@ -772,96 +707,9 @@ accepted_encodings(SupportedEncodings) ->
)
end.
-test() ->
- ok = test_range(),
- ok.
-
-test_range() ->
- %% valid, single ranges
- io:format("Testing parse_range_request with valid single ranges~n"),
- io:format("1"),
- [{20, 30}] = parse_range_request("bytes=20-30"),
- io:format("2"),
- [{20, none}] = parse_range_request("bytes=20-"),
- io:format("3"),
- [{none, 20}] = parse_range_request("bytes=-20"),
- io:format(".. ok ~n"),
-
- %% invalid, single ranges
- io:format("Testing parse_range_request with invalid ranges~n"),
- io:format("1"),
- fail = parse_range_request(""),
- io:format("2"),
- fail = parse_range_request("garbage"),
- io:format("3"),
- fail = parse_range_request("bytes=-20-30"),
- io:format(".. ok ~n"),
-
- %% valid, multiple range
- io:format("Testing parse_range_request with valid multiple ranges~n"),
- io:format("1"),
- [{20, 30}, {50, 100}, {110, 200}] =
- parse_range_request("bytes=20-30,50-100,110-200"),
- io:format("2"),
- [{20, none}, {50, 100}, {none, 200}] =
- parse_range_request("bytes=20-,50-100,-200"),
- io:format(".. ok~n"),
-
- %% no ranges
- io:format("Testing out parse_range_request with no ranges~n"),
- io:format("1"),
- [] = parse_range_request("bytes="),
- io:format(".. ok~n"),
-
- Body = <<"012345678901234567890123456789012345678901234567890123456789">>,
- BodySize = byte_size(Body), %% 60
- BodySize = 60,
-
- %% these values assume BodySize =:= 60
- io:format("Testing out range_skip_length on valid ranges~n"),
- io:format("1"),
- {1,9} = range_skip_length({1,9}, BodySize), %% 1-9
- io:format("2"),
- {10,10} = range_skip_length({10,19}, BodySize), %% 10-19
- io:format("3"),
- {40, 20} = range_skip_length({none, 20}, BodySize), %% -20
- io:format("4"),
- {30, 30} = range_skip_length({30, none}, BodySize), %% 30-
- io:format(".. ok ~n"),
-
- %% valid edge cases for range_skip_length
- io:format("Testing out range_skip_length on valid edge case ranges~n"),
- io:format("1"),
- {BodySize, 0} = range_skip_length({none, 0}, BodySize),
- io:format("2"),
- {0, BodySize} = range_skip_length({none, BodySize}, BodySize),
- io:format("3"),
- {0, BodySize} = range_skip_length({0, none}, BodySize),
- BodySizeLess1 = BodySize - 1,
- io:format("4"),
- {BodySizeLess1, 1} = range_skip_length({BodySize - 1, none}, BodySize),
-
- %% out of range, return whole thing
- io:format("5"),
- {0, BodySize} = range_skip_length({none, BodySize + 1}, BodySize),
- io:format("6"),
- {0, BodySize} = range_skip_length({none, -1}, BodySize),
- io:format(".. ok ~n"),
-
- %% invalid ranges
- io:format("Testing out range_skip_length on invalid ranges~n"),
- io:format("1"),
- invalid_range = range_skip_length({-1, 30}, BodySize),
- io:format("2"),
- invalid_range = range_skip_length({0, BodySize + 1}, BodySize),
- io:format("3"),
- invalid_range = range_skip_length({-1, BodySize + 1}, BodySize),
- io:format("4"),
- invalid_range = range_skip_length({BodySize, 40}, BodySize),
- io:format("5"),
- invalid_range = range_skip_length({-1, none}, BodySize),
- io:format("6"),
- invalid_range = range_skip_length({BodySize, none}, BodySize),
- io:format(".. ok ~n"),
- ok.
-
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_response.erl b/src/mochiweb/mochiweb_response.erl
index 6285c4c4..ab8ee61c 100644
--- a/src/mochiweb/mochiweb_response.erl
+++ b/src/mochiweb/mochiweb_response.erl
@@ -54,3 +54,11 @@ write_chunk(Data) ->
_ ->
send(Data)
end.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_skel.erl b/src/mochiweb/mochiweb_skel.erl
index 36b48be5..76eefa60 100644
--- a/src/mochiweb/mochiweb_skel.erl
+++ b/src/mochiweb/mochiweb_skel.erl
@@ -14,10 +14,11 @@ skelcopy(DestDir, Name) ->
N + 1
end,
skelcopy(src(), DestDir, Name, LDst),
+ DestLink = filename:join([DestDir, Name, "deps", "mochiweb-src"]),
+ ok = filelib:ensure_dir(DestLink),
ok = file:make_symlink(
- filename:join(filename:dirname(code:which(?MODULE)), ".."),
- filename:join([DestDir, Name, "deps", "mochiweb-src"])).
-
+ filename:join(filename:dirname(code:which(?MODULE)), ".."),
+ DestLink).
%% Internal API
@@ -37,17 +38,22 @@ skelcopy(Src, DestDir, Name, LDst) ->
EDst = lists:nthtail(LDst, Dir),
ok = ensuredir(Dir),
ok = file:write_file_info(Dir, #file_info{mode=Mode}),
- {ok, Files} = file:list_dir(Src),
- io:format("~s/~n", [EDst]),
- lists:foreach(fun ("." ++ _) -> ok;
- (F) ->
- skelcopy(filename:join(Src, F),
- Dir,
- Name,
- LDst)
- end,
- Files),
- ok;
+ case filename:basename(Src) of
+ "ebin" ->
+ ok;
+ _ ->
+ {ok, Files} = file:list_dir(Src),
+ io:format("~s/~n", [EDst]),
+ lists:foreach(fun ("." ++ _) -> ok;
+ (F) ->
+ skelcopy(filename:join(Src, F),
+ Dir,
+ Name,
+ LDst)
+ end,
+ Files),
+ ok
+ end;
{ok, #file_info{type=regular, mode=Mode}} ->
OutFile = filename:join(DestDir, Dest),
{ok, B} = file:read_file(Src),
@@ -71,3 +77,10 @@ ensuredir(Dir) ->
E ->
E
end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_socket.erl b/src/mochiweb/mochiweb_socket.erl
new file mode 100644
index 00000000..76b018c8
--- /dev/null
+++ b/src/mochiweb/mochiweb_socket.erl
@@ -0,0 +1,84 @@
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc MochiWeb socket - wrapper for plain and ssl sockets.
+
+-module(mochiweb_socket).
+
+-export([listen/4, accept/1, recv/3, send/2, close/1, port/1, peername/1,
+ setopts/2, type/1]).
+
+-define(ACCEPT_TIMEOUT, 2000).
+
+listen(Ssl, Port, Opts, SslOpts) ->
+ case Ssl of
+ true ->
+ case ssl:listen(Port, Opts ++ SslOpts) of
+ {ok, ListenSocket} ->
+ {ok, {ssl, ListenSocket}};
+ {error, _} = Err ->
+ Err
+ end;
+ false ->
+ gen_tcp:listen(Port, Opts)
+ end.
+
+accept({ssl, ListenSocket}) ->
+ % There's a bug in ssl:transport_accept/2 at the moment, which is the
+ % reason for the try...catch block. Should be fixed in OTP R14.
+ try ssl:transport_accept(ListenSocket) of
+ {ok, Socket} ->
+ case ssl:ssl_accept(Socket) of
+ ok ->
+ {ok, {ssl, Socket}};
+ {error, _} = Err ->
+ Err
+ end;
+ {error, _} = Err ->
+ Err
+ catch
+ error:{badmatch, {error, Reason}} ->
+ {error, Reason}
+ end;
+accept(ListenSocket) ->
+ gen_tcp:accept(ListenSocket, ?ACCEPT_TIMEOUT).
+
+recv({ssl, Socket}, Length, Timeout) ->
+ ssl:recv(Socket, Length, Timeout);
+recv(Socket, Length, Timeout) ->
+ gen_tcp:recv(Socket, Length, Timeout).
+
+send({ssl, Socket}, Data) ->
+ ssl:send(Socket, Data);
+send(Socket, Data) ->
+ gen_tcp:send(Socket, Data).
+
+close({ssl, Socket}) ->
+ ssl:close(Socket);
+close(Socket) ->
+ gen_tcp:close(Socket).
+
+port({ssl, Socket}) ->
+ case ssl:sockname(Socket) of
+ {ok, {_, Port}} ->
+ {ok, Port};
+ {error, _} = Err ->
+ Err
+ end;
+port(Socket) ->
+ inet:port(Socket).
+
+peername({ssl, Socket}) ->
+ ssl:peername(Socket);
+peername(Socket) ->
+ inet:peername(Socket).
+
+setopts({ssl, Socket}, Opts) ->
+ ssl:setopts(Socket, Opts);
+setopts(Socket, Opts) ->
+ inet:setopts(Socket, Opts).
+
+type({ssl, _}) ->
+ ssl;
+type(_) ->
+ plain.
+
diff --git a/src/mochiweb/mochiweb_socket_server.erl b/src/mochiweb/mochiweb_socket_server.erl
index 7aafe290..1aae09ac 100644
--- a/src/mochiweb/mochiweb_socket_server.erl
+++ b/src/mochiweb/mochiweb_socket_server.erl
@@ -7,22 +7,28 @@
-author('bob@mochimedia.com').
-behaviour(gen_server).
+-include("internal.hrl").
+
-export([start/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, terminate/2, code_change/3,
handle_info/2]).
-export([get/2]).
--export([acceptor_loop/1]).
-
-record(mochiweb_socket_server,
{port,
loop,
name=undefined,
+ %% NOTE: This is currently ignored.
max=2048,
ip=any,
listen=null,
- acceptor=null,
- backlog=128}).
+ nodelay=false,
+ backlog=128,
+ active_sockets=0,
+ acceptor_pool_size=16,
+ ssl=false,
+ ssl_opts=[{ssl_imp, new}],
+ acceptor_pool=sets:new()}).
start(State=#mochiweb_socket_server{}) ->
start_server(State);
@@ -54,6 +60,8 @@ parse_options([], State) ->
parse_options([{name, L} | Rest], State) when is_list(L) ->
Name = {local, list_to_atom(L)},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
+parse_options([{name, A} | Rest], State) when A =:= undefined ->
+ parse_options(Rest, State#mochiweb_socket_server{name=A});
parse_options([{name, A} | Rest], State) when is_atom(A) ->
Name = {local, A},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
@@ -79,16 +87,32 @@ parse_options([{loop, Loop} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{loop=Loop});
parse_options([{backlog, Backlog} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{backlog=Backlog});
+parse_options([{nodelay, NoDelay} | Rest], State) ->
+ parse_options(Rest, State#mochiweb_socket_server{nodelay=NoDelay});
+parse_options([{acceptor_pool_size, Max} | Rest], State) ->
+ MaxInt = ensure_int(Max),
+ parse_options(Rest,
+ State#mochiweb_socket_server{acceptor_pool_size=MaxInt});
parse_options([{max, Max} | Rest], State) ->
- MaxInt = case Max of
- Max when is_list(Max) ->
- list_to_integer(Max);
- Max when is_integer(Max) ->
- Max
- end,
- parse_options(Rest, State#mochiweb_socket_server{max=MaxInt}).
-
-start_server(State=#mochiweb_socket_server{name=Name}) ->
+ error_logger:info_report([{warning, "TODO: max is currently unsupported"},
+ {max, Max}]),
+ MaxInt = ensure_int(Max),
+ parse_options(Rest, State#mochiweb_socket_server{max=MaxInt});
+parse_options([{ssl, Ssl} | Rest], State) when is_boolean(Ssl) ->
+ parse_options(Rest, State#mochiweb_socket_server{ssl=Ssl});
+parse_options([{ssl_opts, SslOpts} | Rest], State) when is_list(SslOpts) ->
+ SslOpts1 = [{ssl_imp, new} | proplists:delete(ssl_imp, SslOpts)],
+ parse_options(Rest, State#mochiweb_socket_server{ssl_opts=SslOpts1}).
+
+start_server(State=#mochiweb_socket_server{ssl=Ssl, name=Name}) ->
+ case Ssl of
+ true ->
+ application:start(crypto),
+ application:start(public_key),
+ application:start(ssl);
+ false ->
+ void
+ end,
case Name of
undefined ->
gen_server:start_link(?MODULE, State, []);
@@ -96,6 +120,11 @@ start_server(State=#mochiweb_socket_server{name=Name}) ->
gen_server:start_link(Name, ?MODULE, State, [])
end.
+ensure_int(N) when is_integer(N) ->
+ N;
+ensure_int(S) when is_list(S) ->
+ integer_to_list(S).
+
ipv6_supported() ->
case (catch inet:getaddr("localhost", inet6)) of
{ok, _Addr} ->
@@ -104,15 +133,15 @@ ipv6_supported() ->
false
end.
-init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
+init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=NoDelay}) ->
process_flag(trap_exit, true),
BaseOpts = [binary,
{reuseaddr, true},
{packet, 0},
{backlog, Backlog},
- {recbuf, 8192},
+ {recbuf, ?RECBUF_SIZE},
{active, false},
- {nodelay, true}],
+ {nodelay, NoDelay}],
Opts = case Ip of
any ->
case ipv6_supported() of % IPv4, and IPv6 if supported
@@ -124,7 +153,7 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
{_, _, _, _, _, _, _, _} -> % IPv6
[inet6, {ip, Ip} | BaseOpts]
end,
- case gen_tcp_listen(Port, Opts, State) of
+ case listen(Port, Opts, State) of
{stop, eacces} ->
case Port < 1024 of
true ->
@@ -132,7 +161,7 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
{ok, _} ->
case fdsrv:bind_socket(tcp, Port) of
{ok, Fd} ->
- gen_tcp_listen(Port, [{fd, Fd} | Opts], State);
+ listen(Port, [{fd, Fd} | Opts], State);
_ ->
{stop, fdsrv_bind_failed}
end;
@@ -146,47 +175,33 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
Other
end.
-gen_tcp_listen(Port, Opts, State) ->
- case gen_tcp:listen(Port, Opts) of
+new_acceptor_pool(Listen,
+ State=#mochiweb_socket_server{acceptor_pool=Pool,
+ acceptor_pool_size=Size,
+ loop=Loop}) ->
+ F = fun (_, S) ->
+ Pid = mochiweb_acceptor:start_link(self(), Listen, Loop),
+ sets:add_element(Pid, S)
+ end,
+ Pool1 = lists:foldl(F, Pool, lists:seq(1, Size)),
+ State#mochiweb_socket_server{acceptor_pool=Pool1}.
+
+listen(Port, Opts, State=#mochiweb_socket_server{ssl=Ssl, ssl_opts=SslOpts}) ->
+ case mochiweb_socket:listen(Ssl, Port, Opts, SslOpts) of
{ok, Listen} ->
- {ok, ListenPort} = inet:port(Listen),
- {ok, new_acceptor(State#mochiweb_socket_server{listen=Listen,
- port=ListenPort})};
+ {ok, ListenPort} = mochiweb_socket:port(Listen),
+ {ok, new_acceptor_pool(
+ Listen,
+ State#mochiweb_socket_server{listen=Listen,
+ port=ListenPort})};
{error, Reason} ->
{stop, Reason}
end.
-new_acceptor(State=#mochiweb_socket_server{max=0}) ->
- io:format("Not accepting new connections~n"),
- State#mochiweb_socket_server{acceptor=null};
-new_acceptor(State=#mochiweb_socket_server{listen=Listen,loop=Loop}) ->
- Pid = proc_lib:spawn_link(?MODULE, acceptor_loop,
- [{self(), Listen, Loop}]),
- State#mochiweb_socket_server{acceptor=Pid}.
-
-call_loop({M, F}, Socket) ->
- M:F(Socket);
-call_loop(Loop, Socket) ->
- Loop(Socket).
-
-acceptor_loop({Server, Listen, Loop}) ->
- case catch gen_tcp:accept(Listen) of
- {ok, Socket} ->
- gen_server:cast(Server, {accepted, self()}),
- call_loop(Loop, Socket);
- {error, closed} ->
- exit({error, closed});
- Other ->
- error_logger:error_report(
- [{application, mochiweb},
- "Accept failed error",
- lists:flatten(io_lib:format("~p", [Other]))]),
- exit({error, accept_failed})
- end.
-
-
do_get(port, #mochiweb_socket_server{port=Port}) ->
- Port.
+ Port;
+do_get(active_sockets, #mochiweb_socket_server{active_sockets=ActiveSockets}) ->
+ ActiveSockets.
handle_call({get, Property}, _From, State) ->
Res = do_get(Property, State),
@@ -195,16 +210,15 @@ handle_call(_Message, _From, State) ->
Res = error,
{reply, Res, State}.
-handle_cast({accepted, Pid},
- State=#mochiweb_socket_server{acceptor=Pid, max=Max}) ->
- % io:format("accepted ~p~n", [Pid]),
- State1 = State#mochiweb_socket_server{max=Max - 1},
- {noreply, new_acceptor(State1)};
+handle_cast({accepted, Pid, _Timing},
+ State=#mochiweb_socket_server{active_sockets=ActiveSockets}) ->
+ State1 = State#mochiweb_socket_server{active_sockets=1 + ActiveSockets},
+ {noreply, recycle_acceptor(Pid, State1)};
handle_cast(stop, State) ->
{stop, normal, State}.
terminate(_Reason, #mochiweb_socket_server{listen=Listen, port=Port}) ->
- gen_tcp:close(Listen),
+ mochiweb_socket:close(Listen),
case Port < 1024 of
true ->
catch fdsrv:stop(),
@@ -216,33 +230,43 @@ terminate(_Reason, #mochiweb_socket_server{listen=Listen, port=Port}) ->
code_change(_OldVsn, State, _Extra) ->
State.
-handle_info({'EXIT', Pid, normal},
- State=#mochiweb_socket_server{acceptor=Pid}) ->
- % io:format("normal acceptor down~n"),
- {noreply, new_acceptor(State)};
+recycle_acceptor(Pid, State=#mochiweb_socket_server{
+ acceptor_pool=Pool,
+ listen=Listen,
+ loop=Loop,
+ active_sockets=ActiveSockets}) ->
+ case sets:is_element(Pid, Pool) of
+ true ->
+ Acceptor = mochiweb_acceptor:start_link(self(), Listen, Loop),
+ Pool1 = sets:add_element(Acceptor, sets:del_element(Pid, Pool)),
+ State#mochiweb_socket_server{acceptor_pool=Pool1};
+ false ->
+ State#mochiweb_socket_server{active_sockets=ActiveSockets - 1}
+ end.
+
+handle_info({'EXIT', Pid, normal}, State) ->
+ {noreply, recycle_acceptor(Pid, State)};
handle_info({'EXIT', Pid, Reason},
- State=#mochiweb_socket_server{acceptor=Pid}) ->
- error_logger:error_report({?MODULE, ?LINE,
- {acceptor_error, Reason}}),
- timer:sleep(100),
- {noreply, new_acceptor(State)};
-handle_info({'EXIT', _LoopPid, Reason},
- State=#mochiweb_socket_server{acceptor=Pid, max=Max}) ->
- case Reason of
- normal ->
- ok;
- _ ->
+ State=#mochiweb_socket_server{acceptor_pool=Pool}) ->
+ case sets:is_element(Pid, Pool) of
+ true ->
+ %% If there was an unexpected error accepting, log and sleep.
error_logger:error_report({?MODULE, ?LINE,
- {child_error, Reason}})
+ {acceptor_error, Reason}}),
+ timer:sleep(100);
+ false ->
+ ok
end,
- State1 = State#mochiweb_socket_server{max=Max + 1},
- State2 = case Pid of
- null ->
- new_acceptor(State1);
- _ ->
- State1
- end,
- {noreply, State2};
+ {noreply, recycle_acceptor(Pid, State)};
handle_info(Info, State) ->
error_logger:info_report([{'INFO', Info}, {'State', State}]),
{noreply, State}.
+
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_sup.erl b/src/mochiweb/mochiweb_sup.erl
index 5cb525b5..af7df9b3 100644
--- a/src/mochiweb/mochiweb_sup.erl
+++ b/src/mochiweb/mochiweb_sup.erl
@@ -32,3 +32,10 @@ upgrade() ->
init([]) ->
Processes = [],
{ok, {{one_for_one, 10, 10}, Processes}}.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_util.erl b/src/mochiweb/mochiweb_util.erl
index d8fc89d5..d1cc59de 100644
--- a/src/mochiweb/mochiweb_util.erl
+++ b/src/mochiweb/mochiweb_util.erl
@@ -9,11 +9,11 @@
-export([path_split/1]).
-export([urlsplit/1, urlsplit_path/1, urlunsplit/1, urlunsplit_path/1]).
-export([guess_mime/1, parse_header/1]).
--export([shell_quote/1, cmd/1, cmd_string/1, cmd_port/2]).
+-export([shell_quote/1, cmd/1, cmd_string/1, cmd_port/2, cmd_status/1]).
-export([record_to_proplist/2, record_to_proplist/3]).
-export([safe_relative_path/1, partition/2]).
-export([parse_qvalues/1, pick_accepted_encodings/3]).
--export([test/0]).
+-export([make_io/1]).
-define(PERCENT, 37). % $\%
-define(FULLSTOP, 46). % $\.
@@ -115,11 +115,32 @@ cmd(Argv) ->
%% @spec cmd_string([string()]) -> string()
%% @doc Create a shell quoted command string from a list of arguments.
cmd_string(Argv) ->
- join([shell_quote(X) || X <- Argv], " ").
+ string:join([shell_quote(X) || X <- Argv], " ").
+
+%% @spec cmd_status([string()]) -> {ExitStatus::integer(), Stdout::binary()}
+%% @doc Accumulate the output and exit status from the given application, will be
+%% spawned with cmd_port/2.
+cmd_status(Argv) ->
+ Port = cmd_port(Argv, [exit_status, stderr_to_stdout,
+ use_stdio, binary]),
+ try cmd_loop(Port, [])
+ after catch port_close(Port)
+ end.
+
+%% @spec cmd_loop(port(), list()) -> {ExitStatus::integer(), Stdout::binary()}
+%% @doc Accumulate the output and exit status from a port.
+cmd_loop(Port, Acc) ->
+ receive
+ {Port, {exit_status, Status}} ->
+ {Status, iolist_to_binary(lists:reverse(Acc))};
+ {Port, {data, Data}} ->
+ cmd_loop(Port, [Data | Acc])
+ end.
-%% @spec join([string()], Separator) -> string()
-%% @doc Join a list of strings together with the given separator
-%% string or char.
+%% @spec join([iolist()], iolist()) -> iolist()
+%% @doc Join a list of strings or binaries together with the given separator
+%% string or char or binary. The output is flattened, but may be an
+%% iolist() instead of a string() if any of the inputs are binary().
join([], _Separator) ->
[];
join([S], _Separator) ->
@@ -160,10 +181,11 @@ quote_plus([C | Rest], Acc) ->
%% @spec urlencode([{Key, Value}]) -> string()
%% @doc URL encode the property list.
urlencode(Props) ->
- RevPairs = lists:foldl(fun ({K, V}, Acc) ->
- [[quote_plus(K), $=, quote_plus(V)] | Acc]
- end, [], Props),
- lists:flatten(revjoin(RevPairs, $&, [])).
+ Pairs = lists:foldr(
+ fun ({K, V}, Acc) ->
+ [quote_plus(K) ++ "=" ++ quote_plus(V) | Acc]
+ end, [], Props),
+ string:join(Pairs, "&").
%% @spec parse_qs(string() | binary()) -> [{Key, Value}]
%% @doc Parse a query string or application/x-www-form-urlencoded.
@@ -234,20 +256,31 @@ urlsplit(Url) ->
{Scheme, Netloc, Path, Query, Fragment}.
urlsplit_scheme(Url) ->
- urlsplit_scheme(Url, []).
+ case urlsplit_scheme(Url, []) of
+ no_scheme ->
+ {"", Url};
+ Res ->
+ Res
+ end.
-urlsplit_scheme([], Acc) ->
- {"", lists:reverse(Acc)};
-urlsplit_scheme(":" ++ Rest, Acc) ->
+urlsplit_scheme([C | Rest], Acc) when ((C >= $a andalso C =< $z) orelse
+ (C >= $A andalso C =< $Z) orelse
+ (C >= $0 andalso C =< $9) orelse
+ C =:= $+ orelse C =:= $- orelse
+ C =:= $.) ->
+ urlsplit_scheme(Rest, [C | Acc]);
+urlsplit_scheme([$: | Rest], Acc=[_ | _]) ->
{string:to_lower(lists:reverse(Acc)), Rest};
-urlsplit_scheme([C | Rest], Acc) ->
- urlsplit_scheme(Rest, [C | Acc]).
+urlsplit_scheme(_Rest, _Acc) ->
+ no_scheme.
urlsplit_netloc("//" ++ Rest) ->
urlsplit_netloc(Rest, []);
urlsplit_netloc(Path) ->
{"", Path}.
+urlsplit_netloc("", Acc) ->
+ {lists:reverse(Acc), ""};
urlsplit_netloc(Rest=[C | _], Acc) when C =:= $/; C =:= $?; C =:= $# ->
{lists:reverse(Acc), Rest};
urlsplit_netloc([C | Rest], Acc) ->
@@ -312,67 +345,11 @@ urlsplit_query([C | Rest], Acc) ->
%% @spec guess_mime(string()) -> string()
%% @doc Guess the mime type of a file by the extension of its filename.
guess_mime(File) ->
- case filename:extension(File) of
- ".html" ->
- "text/html";
- ".xhtml" ->
- "application/xhtml+xml";
- ".xml" ->
- "application/xml";
- ".css" ->
- "text/css";
- ".js" ->
- "application/x-javascript";
- ".jpg" ->
- "image/jpeg";
- ".gif" ->
- "image/gif";
- ".png" ->
- "image/png";
- ".swf" ->
- "application/x-shockwave-flash";
- ".zip" ->
- "application/zip";
- ".bz2" ->
- "application/x-bzip2";
- ".gz" ->
- "application/x-gzip";
- ".tar" ->
- "application/x-tar";
- ".tgz" ->
- "application/x-gzip";
- ".txt" ->
+ case mochiweb_mime:from_extension(filename:extension(File)) of
+ undefined ->
"text/plain";
- ".doc" ->
- "application/msword";
- ".pdf" ->
- "application/pdf";
- ".xls" ->
- "application/vnd.ms-excel";
- ".rtf" ->
- "application/rtf";
- ".mov" ->
- "video/quicktime";
- ".mp3" ->
- "audio/mpeg";
- ".z" ->
- "application/x-compress";
- ".wav" ->
- "audio/x-wav";
- ".ico" ->
- "image/x-icon";
- ".bmp" ->
- "image/bmp";
- ".m4a" ->
- "audio/mpeg";
- ".m3u" ->
- "audio/x-mpegurl";
- ".exe" ->
- "application/octet-stream";
- ".csv" ->
- "text/csv";
- _ ->
- "text/plain"
+ Mime ->
+ Mime
end.
%% @spec parse_header(string()) -> {Type, [{K, V}]}
@@ -436,11 +413,9 @@ shell_quote([C | Rest], Acc) when C =:= $\" orelse C =:= $\` orelse
shell_quote([C | Rest], Acc) ->
shell_quote(Rest, [C | Acc]).
-%% @spec parse_qvalues(string()) -> [qvalue()] | error()
-%% @type qvalue() -> {element(), q()}
-%% @type element() -> string()
-%% @type q() -> 0.0 .. 1.0
-%% @type error() -> invalid_qvalue_string
+%% @spec parse_qvalues(string()) -> [qvalue()] | invalid_qvalue_string
+%% @type qvalue() = {encoding(), float()}.
+%% @type encoding() = string().
%%
%% @doc Parses a list (given as a string) of elements with Q values associated
%% to them. Elements are separated by commas and each element is separated
@@ -489,11 +464,8 @@ parse_qvalues(QValuesStr) ->
invalid_qvalue_string
end.
-%% @spec pick_accepted_encodings(qvalues(), [encoding()], encoding()) ->
+%% @spec pick_accepted_encodings([qvalue()], [encoding()], encoding()) ->
%% [encoding()]
-%% @type qvalues() -> [ {encoding(), q()} ]
-%% @type encoding() -> string()
-%% @type q() -> 0.0 .. 1.0
%%
%% @doc Determines which encodings specified in the given Q values list are
%% valid according to a list of supported encodings and a default encoding.
@@ -566,46 +538,118 @@ pick_accepted_encodings(AcceptedEncs, SupportedEncs, DefaultEnc) ->
[E || E <- Accepted2, lists:member(E, SupportedEncs),
not lists:member(E, Refused1)].
-test() ->
- test_join(),
- test_quote_plus(),
- test_unquote(),
- test_urlencode(),
- test_parse_qs(),
- test_urlsplit_path(),
- test_urlunsplit_path(),
- test_urlsplit(),
- test_urlunsplit(),
- test_path_split(),
- test_guess_mime(),
- test_parse_header(),
- test_shell_quote(),
- test_cmd(),
- test_cmd_string(),
- test_partition(),
- test_safe_relative_path(),
- test_parse_qvalues(),
- test_pick_accepted_encodings(),
+make_io(Atom) when is_atom(Atom) ->
+ atom_to_list(Atom);
+make_io(Integer) when is_integer(Integer) ->
+ integer_to_list(Integer);
+make_io(Io) when is_list(Io); is_binary(Io) ->
+ Io.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+make_io_test() ->
+ ?assertEqual(
+ <<"atom">>,
+ iolist_to_binary(make_io(atom))),
+ ?assertEqual(
+ <<"20">>,
+ iolist_to_binary(make_io(20))),
+ ?assertEqual(
+ <<"list">>,
+ iolist_to_binary(make_io("list"))),
+ ?assertEqual(
+ <<"binary">>,
+ iolist_to_binary(make_io(<<"binary">>))),
+ ok.
+
+-record(test_record, {field1=f1, field2=f2}).
+record_to_proplist_test() ->
+ ?assertEqual(
+ [{'__record', test_record},
+ {field1, f1},
+ {field2, f2}],
+ record_to_proplist(#test_record{}, record_info(fields, test_record))),
+ ?assertEqual(
+ [{'typekey', test_record},
+ {field1, f1},
+ {field2, f2}],
+ record_to_proplist(#test_record{},
+ record_info(fields, test_record),
+ typekey)),
ok.
-test_shell_quote() ->
- "\"foo \\$bar\\\"\\`' baz\"" = shell_quote("foo $bar\"`' baz"),
+shell_quote_test() ->
+ ?assertEqual(
+ "\"foo \\$bar\\\"\\`' baz\"",
+ shell_quote("foo $bar\"`' baz")),
+ ok.
+
+cmd_port_test_spool(Port, Acc) ->
+ receive
+ {Port, eof} ->
+ Acc;
+ {Port, {data, {eol, Data}}} ->
+ cmd_port_test_spool(Port, ["\n", Data | Acc]);
+ {Port, Unknown} ->
+ throw({unknown, Unknown})
+ after 100 ->
+ throw(timeout)
+ end.
+
+cmd_port_test() ->
+ Port = cmd_port(["echo", "$bling$ `word`!"],
+ [eof, stream, {line, 4096}]),
+ Res = try lists:append(lists:reverse(cmd_port_test_spool(Port, [])))
+ after catch port_close(Port)
+ end,
+ self() ! {Port, wtf},
+ try cmd_port_test_spool(Port, [])
+ catch throw:{unknown, wtf} -> ok
+ end,
+ try cmd_port_test_spool(Port, [])
+ catch throw:timeout -> ok
+ end,
+ ?assertEqual(
+ "$bling$ `word`!\n",
+ Res).
+
+cmd_test() ->
+ ?assertEqual(
+ "$bling$ `word`!\n",
+ cmd(["echo", "$bling$ `word`!"])),
ok.
-test_cmd() ->
- "$bling$ `word`!\n" = cmd(["echo", "$bling$ `word`!"]),
+cmd_string_test() ->
+ ?assertEqual(
+ "\"echo\" \"\\$bling\\$ \\`word\\`!\"",
+ cmd_string(["echo", "$bling$ `word`!"])),
ok.
-test_cmd_string() ->
- "\"echo\" \"\\$bling\\$ \\`word\\`!\"" = cmd_string(["echo", "$bling$ `word`!"]),
+cmd_status_test() ->
+ ?assertEqual(
+ {0, <<"$bling$ `word`!\n">>},
+ cmd_status(["echo", "$bling$ `word`!"])),
ok.
-test_parse_header() ->
- {"multipart/form-data", [{"boundary", "AaB03x"}]} =
- parse_header("multipart/form-data; boundary=AaB03x"),
+
+parse_header_test() ->
+ ?assertEqual(
+ {"multipart/form-data", [{"boundary", "AaB03x"}]},
+ parse_header("multipart/form-data; boundary=AaB03x")),
+ %% This tests (currently) intentionally broken behavior
+ ?assertEqual(
+ {"multipart/form-data",
+ [{"b", ""},
+ {"cgi", "is"},
+ {"broken", "true\"e"}]},
+ parse_header("multipart/form-data;b=;cgi=\"i\\s;broken=true\"e;=z;z")),
ok.
-test_guess_mime() ->
+guess_mime_test() ->
"text/plain" = guess_mime(""),
"text/plain" = guess_mime(".text"),
"application/zip" = guess_mime(".zip"),
@@ -614,19 +658,22 @@ test_guess_mime() ->
"application/xhtml+xml" = guess_mime("x.xhtml"),
ok.
-test_path_split() ->
+path_split_test() ->
{"", "foo/bar"} = path_split("/foo/bar"),
{"foo", "bar"} = path_split("foo/bar"),
{"bar", ""} = path_split("bar"),
ok.
-test_urlsplit() ->
+urlsplit_test() ->
{"", "", "/foo", "", "bar?baz"} = urlsplit("/foo#bar?baz"),
{"http", "host:port", "/foo", "", "bar?baz"} =
urlsplit("http://host:port/foo#bar?baz"),
+ {"http", "host", "", "", ""} = urlsplit("http://host"),
+ {"", "", "/wiki/Category:Fruit", "", ""} =
+ urlsplit("/wiki/Category:Fruit"),
ok.
-test_urlsplit_path() ->
+urlsplit_path_test() ->
{"/foo/bar", "", ""} = urlsplit_path("/foo/bar"),
{"/foo", "baz", ""} = urlsplit_path("/foo?baz"),
{"/foo", "", "bar?baz"} = urlsplit_path("/foo#bar?baz"),
@@ -635,13 +682,13 @@ test_urlsplit_path() ->
{"/foo", "bar?baz", "baz"} = urlsplit_path("/foo?bar?baz#baz"),
ok.
-test_urlunsplit() ->
+urlunsplit_test() ->
"/foo#bar?baz" = urlunsplit({"", "", "/foo", "", "bar?baz"}),
"http://host:port/foo#bar?baz" =
urlunsplit({"http", "host:port", "/foo", "", "bar?baz"}),
ok.
-test_urlunsplit_path() ->
+urlunsplit_path_test() ->
"/foo/bar" = urlunsplit_path({"/foo/bar", "", ""}),
"/foo?baz" = urlunsplit_path({"/foo", "baz", ""}),
"/foo#bar?baz" = urlunsplit_path({"/foo", "", "bar?baz"}),
@@ -650,16 +697,28 @@ test_urlunsplit_path() ->
"/foo?bar?baz#baz" = urlunsplit_path({"/foo", "bar?baz", "baz"}),
ok.
-test_join() ->
- "foo,bar,baz" = join(["foo", "bar", "baz"], $,),
- "foo,bar,baz" = join(["foo", "bar", "baz"], ","),
- "foo bar" = join([["foo", " bar"]], ","),
- "foo bar,baz" = join([["foo", " bar"], "baz"], ","),
- "foo" = join(["foo"], ","),
- "foobarbaz" = join(["foo", "bar", "baz"], ""),
+join_test() ->
+ ?assertEqual("foo,bar,baz",
+ join(["foo", "bar", "baz"], $,)),
+ ?assertEqual("foo,bar,baz",
+ join(["foo", "bar", "baz"], ",")),
+ ?assertEqual("foo bar",
+ join([["foo", " bar"]], ",")),
+ ?assertEqual("foo bar,baz",
+ join([["foo", " bar"], "baz"], ",")),
+ ?assertEqual("foo",
+ join(["foo"], ",")),
+ ?assertEqual("foobarbaz",
+ join(["foo", "bar", "baz"], "")),
+ ?assertEqual("foo" ++ [<<>>] ++ "bar" ++ [<<>>] ++ "baz",
+ join(["foo", "bar", "baz"], <<>>)),
+ ?assertEqual("foobar" ++ [<<"baz">>],
+ join(["foo", "bar", <<"baz">>], "")),
+ ?assertEqual("",
+ join([], "any")),
ok.
-test_quote_plus() ->
+quote_plus_test() ->
"foo" = quote_plus(foo),
"1" = quote_plus(1),
"1.1" = quote_plus(1.1),
@@ -668,26 +727,45 @@ test_quote_plus() ->
"foo%0A" = quote_plus("foo\n"),
"foo%0A" = quote_plus("foo\n"),
"foo%3B%26%3D" = quote_plus("foo;&="),
+ "foo%3B%26%3D" = quote_plus(<<"foo;&=">>),
ok.
-test_unquote() ->
- "foo bar" = unquote("foo+bar"),
- "foo bar" = unquote("foo%20bar"),
- "foo\r\n" = unquote("foo%0D%0A"),
+unquote_test() ->
+ ?assertEqual("foo bar",
+ unquote("foo+bar")),
+ ?assertEqual("foo bar",
+ unquote("foo%20bar")),
+ ?assertEqual("foo\r\n",
+ unquote("foo%0D%0A")),
+ ?assertEqual("foo\r\n",
+ unquote(<<"foo%0D%0A">>)),
ok.
-test_urlencode() ->
+urlencode_test() ->
"foo=bar&baz=wibble+%0D%0A&z=1" = urlencode([{foo, "bar"},
{"baz", "wibble \r\n"},
{z, 1}]),
ok.
-test_parse_qs() ->
- [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}] =
- parse_qs("foo=bar&baz=wibble+%0D%0A&z=1"),
+parse_qs_test() ->
+ ?assertEqual(
+ [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}],
+ parse_qs("foo=bar&baz=wibble+%0D%0a&z=1")),
+ ?assertEqual(
+ [{"", "bar"}, {"baz", "wibble \r\n"}, {"z", ""}],
+ parse_qs("=bar&baz=wibble+%0D%0a&z=")),
+ ?assertEqual(
+ [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}],
+ parse_qs(<<"foo=bar&baz=wibble+%0D%0a&z=1">>)),
+ ?assertEqual(
+ [],
+ parse_qs("")),
+ ?assertEqual(
+ [{"foo", ""}, {"bar", ""}, {"baz", ""}],
+ parse_qs("foo;bar&baz")),
ok.
-test_partition() ->
+partition_test() ->
{"foo", "", ""} = partition("foo", "/"),
{"foo", "/", "bar"} = partition("foo/bar", "/"),
{"foo", "/", ""} = partition("foo/", "/"),
@@ -695,7 +773,7 @@ test_partition() ->
{"f", "oo/ba", "r"} = partition("foo/bar", "oo/ba"),
ok.
-test_safe_relative_path() ->
+safe_relative_path_test() ->
"foo" = safe_relative_path("foo"),
"foo/" = safe_relative_path("foo/"),
"foo" = safe_relative_path("foo/bar/.."),
@@ -709,7 +787,7 @@ test_safe_relative_path() ->
undefined = safe_relative_path("foo//"),
ok.
-test_parse_qvalues() ->
+parse_qvalues_test() ->
[] = parse_qvalues(""),
[{"identity", 0.0}] = parse_qvalues("identity;q=0"),
[{"identity", 0.0}] = parse_qvalues("identity ;q=0"),
@@ -748,9 +826,10 @@ test_parse_qvalues() ->
invalid_qvalue_string = parse_qvalues("gzip; q=0.5, deflate;q=2"),
invalid_qvalue_string = parse_qvalues("gzip, deflate;q=AB"),
invalid_qvalue_string = parse_qvalues("gzip; q=2.1, deflate"),
+ invalid_qvalue_string = parse_qvalues("gzip; q=0.1234, deflate"),
ok.
-test_pick_accepted_encodings() ->
+pick_accepted_encodings_test() ->
["identity"] = pick_accepted_encodings(
[],
["gzip", "identity"],
@@ -857,3 +936,5 @@ test_pick_accepted_encodings() ->
"identity"
),
ok.
+
+-endif.
diff --git a/src/mochiweb/reloader.erl b/src/mochiweb/reloader.erl
index 6835f8f9..c0f5de88 100644
--- a/src/mochiweb/reloader.erl
+++ b/src/mochiweb/reloader.erl
@@ -13,7 +13,9 @@
-export([start/0, start_link/0]).
-export([stop/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-
+-export([all_changed/0]).
+-export([is_changed/1]).
+-export([reload_modules/1]).
-record(state, {last, tref}).
%% External API
@@ -74,8 +76,37 @@ terminate(_Reason, State) ->
code_change(_Vsn, State, _Extra) ->
{ok, State}.
+%% @spec reload_modules([atom()]) -> [{module, atom()} | {error, term()}]
+%% @doc code:purge/1 and code:load_file/1 the given list of modules in order,
+%% return the results of code:load_file/1.
+reload_modules(Modules) ->
+ [begin code:purge(M), code:load_file(M) end || M <- Modules].
+
+%% @spec all_changed() -> [atom()]
+%% @doc Return a list of beam modules that have changed.
+all_changed() ->
+ [M || {M, Fn} <- code:all_loaded(), is_list(Fn), is_changed(M)].
+
+%% @spec is_changed(atom()) -> boolean()
+%% @doc true if the loaded module is a beam with a vsn attribute
+%% and does not match the on-disk beam file, returns false otherwise.
+is_changed(M) ->
+ try
+ module_vsn(M:module_info()) =/= module_vsn(code:get_object_code(M))
+ catch _:_ ->
+ false
+ end.
+
%% Internal API
+module_vsn({M, Beam, _Fn}) ->
+ {ok, {M, Vsn}} = beam_lib:version(Beam),
+ Vsn;
+module_vsn(L) when is_list(L) ->
+ {_, Attrs} = lists:keyfind(attributes, 1, L),
+ {_, Vsn} = lists:keyfind(vsn, 1, Attrs),
+ Vsn.
+
doit(From, To) ->
[case file:read_file_info(Filename) of
{ok, #file_info{mtime = Mtime}} when Mtime >= From, Mtime < To ->
@@ -121,3 +152,10 @@ reload(Module) ->
stamp() ->
erlang:localtime().
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.