summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rwxr-xr-xtest/etap/010-file-basics.t18
-rwxr-xr-xtest/etap/021-btree-reductions.t4
-rwxr-xr-xtest/etap/030-doc-from-json.t16
-rwxr-xr-xtest/etap/031-doc-to-json.t21
-rwxr-xr-xtest/etap/040-util.t37
-rwxr-xr-xtest/etap/050-stream.t12
-rwxr-xr-xtest/etap/060-kt-merging.t97
-rwxr-xr-xtest/etap/070-couch-db.t13
-rwxr-xr-xtest/etap/090-task-status.t4
-rwxr-xr-xtest/etap/100-ref-counter.t31
-rwxr-xr-xtest/etap/110-replication-httpc.t6
-rwxr-xr-xtest/etap/111-replication-changes-feed.t16
-rwxr-xr-xtest/etap/112-replication-missing-revs.t21
-rwxr-xr-xtest/etap/113-replication-attachment-comp.t317
-rwxr-xr-xtest/etap/121-stats-aggregates.t4
-rwxr-xr-xtest/etap/130-attachments-md5.t8
-rwxr-xr-xtest/etap/140-attachment-comp.t715
-rwxr-xr-xtest/etap/150-invalid-view-seq.t192
-rwxr-xr-xtest/etap/160-vhosts.t290
-rwxr-xr-xtest/etap/170-os-daemons.es26
-rwxr-xr-xtest/etap/170-os-daemons.t114
-rwxr-xr-xtest/etap/171-os-daemons-config.es83
-rwxr-xr-xtest/etap/171-os-daemons-config.t74
-rw-r--r--test/etap/172-os-daemon-errors.1.es22
-rwxr-xr-xtest/etap/172-os-daemon-errors.2.es16
-rwxr-xr-xtest/etap/172-os-daemon-errors.3.es17
-rwxr-xr-xtest/etap/172-os-daemon-errors.4.es17
-rwxr-xr-xtest/etap/172-os-daemon-errors.t126
-rwxr-xr-xtest/etap/173-os-daemon-cfg-register.es35
-rwxr-xr-xtest/etap/173-os-daemon-cfg-register.t98
-rw-r--r--test/etap/180-http-proxy.ini20
-rwxr-xr-xtest/etap/180-http-proxy.t357
-rw-r--r--test/etap/Makefile.am25
-rw-r--r--test/etap/test_web.erl99
-rw-r--r--test/javascript/couch_http.js13
-rw-r--r--test/view_server/query_server_spec.rb40
36 files changed, 2833 insertions, 171 deletions
diff --git a/test/etap/010-file-basics.t b/test/etap/010-file-basics.t
index 09b2f2b1..ed71f5e8 100755
--- a/test/etap/010-file-basics.t
+++ b/test/etap/010-file-basics.t
@@ -16,7 +16,7 @@ filename() -> test_util:build_file("test/etap/temp.010").
main(_) ->
test_util:init_code_path(),
- etap:plan(16),
+ etap:plan(19),
case (catch test()) of
ok ->
etap:end_tests();
@@ -67,11 +67,25 @@ test() ->
{ok, BinPos} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos),
"Reading a term from a written binary term representation succeeds."),
+
+ BigBin = list_to_binary(lists:duplicate(100000, 0)),
+ {ok, BigBinPos} = couch_file:append_binary(Fd, BigBin),
+ etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos),
+ "Reading a large term from a written representation succeeds."),
+
+ ok = couch_file:write_header(Fd, hello),
+ etap:is({ok, hello}, couch_file:read_header(Fd),
+ "Reading a header succeeds."),
+
+ {ok, BigBinPos2} = couch_file:append_binary(Fd, BigBin),
+ etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2),
+ "Reading a large term from a written representation succeeds 2."),
% append_binary == append_iolist?
% Possible bug in pread_iolist or iolist() -> append_binary
{ok, IOLPos} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
- etap:is({ok, [<<"foombam">>]}, couch_file:pread_iolist(Fd, IOLPos),
+ {ok, IoList} = couch_file:pread_iolist(Fd, IOLPos),
+ etap:is(<<"foombam">>, iolist_to_binary(IoList),
"Reading an results in a binary form of the written iolist()"),
% XXX: How does on test fsync?
diff --git a/test/etap/021-btree-reductions.t b/test/etap/021-btree-reductions.t
index 3e19c767..331e49af 100755
--- a/test/etap/021-btree-reductions.t
+++ b/test/etap/021-btree-reductions.t
@@ -106,7 +106,7 @@ test()->
(_) -> false
end,
couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK1}]),
- "Reducing foward over first half works with a startkey and endkey."
+ "Reducing forward over first half works with a startkey and endkey."
),
etap:fun_is(
@@ -115,7 +115,7 @@ test()->
(_) -> false
end,
couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK2}, {end_key, EK2}]),
- "Reducing foward over second half works with second startkey and endkey"
+ "Reducing forward over second half works with second startkey and endkey"
),
etap:fun_is(
diff --git a/test/etap/030-doc-from-json.t b/test/etap/030-doc-from-json.t
index dc3327aa..c4ef649a 100755
--- a/test/etap/030-doc-from-json.t
+++ b/test/etap/030-doc-from-json.t
@@ -17,7 +17,11 @@
%% XXX: Figure out how to -include("couch_db.hrl")
-record(doc, {id= <<"">>, revs={0, []}, body={[]},
atts=[], deleted=false, meta=[]}).
--record(att, {name, type, len, md5= <<>>, revpos=0, data}).
+-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
+ encoding=identity}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
main(_) ->
test_util:init_code_path(),
@@ -32,6 +36,8 @@ main(_) ->
ok.
test() ->
+ couch_config:start_link([default_config()]),
+ couch_config:set("attachments", "compression_level", "0"),
ok = test_from_json_success(),
ok = test_from_json_errors(),
ok.
@@ -85,13 +91,17 @@ test_from_json_success() ->
name = <<"my_attachment.fu">>,
data = stub,
type = <<"application/awesome">>,
- len = 45
+ att_len = 45,
+ disk_len = 45,
+ revpos = nil
},
#att{
name = <<"noahs_private_key.gpg">>,
data = <<"I have a pet fish!">>,
type = <<"application/pgp-signature">>,
- len = 18
+ att_len = 18,
+ disk_len = 18,
+ revpos = 0
}
]},
"Attachments are parsed correctly."
diff --git a/test/etap/031-doc-to-json.t b/test/etap/031-doc-to-json.t
index 4e7a175f..605a6d00 100755
--- a/test/etap/031-doc-to-json.t
+++ b/test/etap/031-doc-to-json.t
@@ -17,7 +17,11 @@
%% XXX: Figure out how to -include("couch_db.hrl")
-record(doc, {id= <<"">>, revs={0, []}, body={[]},
atts=[], deleted=false, meta=[]}).
--record(att, {name, type, len, md5= <<>>, revpos=0, data}).
+-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
+ encoding=identity}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
main(_) ->
test_util:init_code_path(),
@@ -32,6 +36,8 @@ main(_) ->
ok.
test() ->
+ couch_config:start_link([default_config()]),
+ couch_config:set("attachments", "compression_level", "0"),
ok = test_to_json_success(),
ok.
@@ -72,8 +78,8 @@ test_to_json_success() ->
},
{
#doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}},
- {[{<<"_id">>, <<>>}, {<<"_deleted">>, true}]},
- "Deleted docs drop body members."
+ {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]},
+ "Deleted docs no longer drop body members."
},
{
#doc{meta=[
@@ -116,14 +122,16 @@ test_to_json_success() ->
type = <<"xml/sucks">>,
data = fun() -> ok end,
revpos = 1,
- len = 400
+ att_len = 400,
+ disk_len = 400
},
#att{
name = <<"fast.json">>,
type = <<"json/ftw">>,
data = <<"{\"so\": \"there!\"}">>,
revpos = 1,
- len = 16
+ att_len = 16,
+ disk_len = 16
}
]},
{[
@@ -153,7 +161,8 @@ test_to_json_success() ->
type = <<"text/plain">>,
data = fun() -> <<"diet pepsi">> end,
revpos = 1,
- len = 10
+ att_len = 10,
+ disk_len = 10
},
#att{
name = <<"food.now">>,
diff --git a/test/etap/040-util.t b/test/etap/040-util.t
index 6d6da2c1..8f80db87 100755
--- a/test/etap/040-util.t
+++ b/test/etap/040-util.t
@@ -17,7 +17,7 @@ main(_) ->
test_util:init_code_path(),
application:start(crypto),
- etap:plan(11),
+ etap:plan(14),
case (catch test()) of
ok ->
etap:end_tests();
@@ -35,29 +35,6 @@ test() ->
etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"),
"A list of atoms is one munged atom."),
- % terminate_linked
- Self = self(),
-
- spawn(fun() ->
- SecondSelf = self(),
- ChildPid = spawn_link(fun() ->
- SecondSelf ! {child, started},
- receive shutdown -> ok end
- end),
- PidUp = receive
- {child, started} -> ok
- after 1000 ->
- {error, timeout}
- end,
- etap:is(ok, PidUp, "Started a linked process."),
- couch_util:terminate_linked(normal),
- Self ! {pid, ChildPid}
- end),
- receive
- {pid, Pid} ->
- etap:ok(not is_process_alive(Pid), "Linked process was killed.")
- end,
-
% implode
etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"),
"use & as separator in list."),
@@ -88,4 +65,16 @@ test() ->
etap:ok(not couch_util:should_flush(),
"Checking to flush invokes GC."),
+ % verify
+ etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"),
+ "String comparison."),
+ etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"),
+ "String comparison (unequal lengths)."),
+ etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>),
+ "Binary comparison."),
+ etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>),
+ "Binary comparison (unequal lengths)."),
+ etap:is(false, couch_util:verify(nil, <<"ahBase3r">>),
+ "Binary comparison with atom."),
+
ok.
diff --git a/test/etap/050-stream.t b/test/etap/050-stream.t
index 9324916c..d30b524a 100755
--- a/test/etap/050-stream.t
+++ b/test/etap/050-stream.t
@@ -42,8 +42,8 @@ test() ->
etap:is(ok, couch_stream:write(Stream, <<>>),
"Writing an empty binary does nothing."),
- {Ptrs, Length, _} = couch_stream:close(Stream),
- etap:is(Ptrs, [0], "Close returns the file pointers."),
+ {Ptrs, Length, _, _, _} = couch_stream:close(Stream),
+ etap:is(Ptrs, [{0, 8}], "Close returns the file pointers."),
etap:is(Length, 8, "Close also returns the number of bytes written."),
etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."),
@@ -58,8 +58,8 @@ test() ->
etap:is(ok, couch_stream:write(Stream2, ZeroBits),
"Successfully wrote 80 0 bits."),
- {Ptrs2, Length2, _} = couch_stream:close(Stream2),
- etap:is(Ptrs2, [ExpPtr], "Closing stream returns the file pointers."),
+ {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2),
+ etap:is(Ptrs2, [{ExpPtr, 20}], "Closing stream returns the file pointers."),
etap:is(Length2, 20, "Length written is 160 bytes."),
AllBits = iolist_to_binary([OneBits,ZeroBits]),
@@ -73,14 +73,14 @@ test() ->
couch_stream:write(Stream3, Data),
[Data | Acc]
end, [], lists:seq(1, 1024)),
- {Ptrs3, Length3, _} = couch_stream:close(Stream3),
+ {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3),
% 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold)
% + 5 puts us over the threshold
% + 4 bytes for the term_to_binary adding a length header
% + 1 byte every 4K for tail append headers
SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1,
- etap:is(Ptrs3, [ExpPtr2, SecondPtr], "Pointers every 4K bytes."),
+ etap:is(Ptrs3, [{ExpPtr2, 4100}, {SecondPtr, 1020}], "Pointers every 4K bytes."),
etap:is(Length3, 5120, "Wrote the expected 5K bytes."),
couch_file:close(Fd),
diff --git a/test/etap/060-kt-merging.t b/test/etap/060-kt-merging.t
index d6b13d6d..5a8571ac 100755
--- a/test/etap/060-kt-merging.t
+++ b/test/etap/060-kt-merging.t
@@ -15,7 +15,7 @@
main(_) ->
test_util:init_code_path(),
- etap:plan(16),
+ etap:plan(12),
case (catch test()) of
ok ->
etap:end_tests();
@@ -26,113 +26,88 @@ main(_) ->
ok.
test() ->
- EmptyTree = [],
- One = [{0, {"1","foo",[]}}],
+ One = {0, {"1","foo",[]}},
TwoSibs = [{0, {"1","foo",[]}},
{0, {"2","foo",[]}}],
- OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
- TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", []}]}}],
- TwoChildSibs2 = [{0, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", [{"1bb", "boo", []}]}]}}],
- Stemmed1b = [{1, {"1a", "bar", []}}],
- Stemmed1a = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
- Stemmed1aa = [{2, {"1aa", "bar", []}}],
- Stemmed1bb = [{2, {"1bb", "boo", []}}],
-
- etap:is(
- {EmptyTree, no_conflicts},
- couch_key_tree:merge(EmptyTree, EmptyTree),
- "Merging two empty trees yields an empty tree."
- ),
-
- etap:is(
- {One, no_conflicts},
- couch_key_tree:merge(EmptyTree, One),
+ OneChild = {0, {"1","foo",[{"1a", "bar", []}]}},
+ TwoChild = {0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+ TwoChildSibs = {0, {"1","foo", [{"1a", "bar", []},
+ {"1b", "bar", []}]}},
+ TwoChildSibs2 = {0, {"1","foo", [{"1a", "bar", []},
+ {"1b", "bar", [{"1bb", "boo", []}]}]}},
+ Stemmed1b = {1, {"1a", "bar", []}},
+ Stemmed1a = {1, {"1a", "bar", [{"1aa", "bar", []}]}},
+ Stemmed1aa = {2, {"1aa", "bar", []}},
+ Stemmed1bb = {2, {"1bb", "boo", []}},
+
+ etap:is(
+ {[One], no_conflicts},
+ couch_key_tree:merge([], One),
"The empty tree is the identity for merge."
),
etap:is(
- {One, no_conflicts},
- couch_key_tree:merge(One, EmptyTree),
- "Merging is commutative."
- ),
-
- etap:is(
{TwoSibs, no_conflicts},
- couch_key_tree:merge(One, TwoSibs),
+ couch_key_tree:merge(TwoSibs, One),
"Merging a prefix of a tree with the tree yields the tree."
),
etap:is(
- {One, no_conflicts},
- couch_key_tree:merge(One, One),
+ {[One], no_conflicts},
+ couch_key_tree:merge([One], One),
"Merging is reflexive."
),
etap:is(
- {TwoChild, no_conflicts},
- couch_key_tree:merge(TwoChild, TwoChild),
+ {[TwoChild], no_conflicts},
+ couch_key_tree:merge([TwoChild], TwoChild),
"Merging two children is still reflexive."
),
etap:is(
- {TwoChildSibs, no_conflicts},
- couch_key_tree:merge(TwoChildSibs, TwoChildSibs),
+ {[TwoChildSibs], no_conflicts},
+ couch_key_tree:merge([TwoChildSibs], TwoChildSibs),
"Merging a tree to itself is itself."),
etap:is(
- {TwoChildSibs, no_conflicts},
- couch_key_tree:merge(TwoChildSibs, Stemmed1b),
+ {[TwoChildSibs], no_conflicts},
+ couch_key_tree:merge([TwoChildSibs], Stemmed1b),
"Merging a tree with a stem."
),
etap:is(
- {TwoChildSibs, no_conflicts},
- couch_key_tree:merge(Stemmed1b, TwoChildSibs),
- "Merging in the opposite direction."
- ),
-
- etap:is(
- {TwoChildSibs2, no_conflicts},
- couch_key_tree:merge(TwoChildSibs2, Stemmed1bb),
+ {[TwoChildSibs2], no_conflicts},
+ couch_key_tree:merge([TwoChildSibs2], Stemmed1bb),
"Merging a stem at a deeper level."
),
etap:is(
- {TwoChildSibs2, no_conflicts},
- couch_key_tree:merge(Stemmed1bb, TwoChildSibs2),
- "Merging a deeper level in opposite order."
- ),
-
- etap:is(
- {TwoChild, no_conflicts},
- couch_key_tree:merge(TwoChild, Stemmed1aa),
+ {[TwoChild], no_conflicts},
+ couch_key_tree:merge([TwoChild], Stemmed1aa),
"Merging a single tree with a deeper stem."
),
etap:is(
- {TwoChild, no_conflicts},
- couch_key_tree:merge(TwoChild, Stemmed1a),
+ {[TwoChild], no_conflicts},
+ couch_key_tree:merge([TwoChild], Stemmed1a),
"Merging a larger stem."
),
etap:is(
- {Stemmed1a, no_conflicts},
- couch_key_tree:merge(Stemmed1a, Stemmed1aa),
+ {[Stemmed1a], no_conflicts},
+ couch_key_tree:merge([Stemmed1a], Stemmed1aa),
"More merging."
),
- Expect1 = OneChild ++ Stemmed1aa,
+ Expect1 = [OneChild, Stemmed1aa],
etap:is(
{Expect1, conflicts},
- couch_key_tree:merge(OneChild, Stemmed1aa),
+ couch_key_tree:merge([OneChild], Stemmed1aa),
"Merging should create conflicts."
),
etap:is(
- {TwoChild, no_conflicts},
+ {[TwoChild], no_conflicts},
couch_key_tree:merge(Expect1, TwoChild),
"Merge should have no conflicts."
),
diff --git a/test/etap/070-couch-db.t b/test/etap/070-couch-db.t
index bf20dc0a..4b14aba6 100755
--- a/test/etap/070-couch-db.t
+++ b/test/etap/070-couch-db.t
@@ -41,6 +41,7 @@ test() ->
etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2),
"Database was deleted."),
+ gen_server:call(couch_server, {set_max_dbs_open, 3}),
MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end,
lists:foreach(fun(Int) ->
@@ -51,24 +52,24 @@ test() ->
end,
{ok, Db} = couch_db:create(MkDbName(Int), []),
ok = couch_db:close(Db)
- end, lists:seq(1, 200)),
+ end, lists:seq(1, 6)),
{ok, AllDbs3} = couch_server:all_databases(),
NumCreated = lists:foldl(fun(Int, Acc) ->
true = lists:member(MkDbName(Int), AllDbs3),
Acc+1
- end, 0, lists:seq(1, 200)),
- etap:is(200, NumCreated, "Created all databases."),
+ end, 0, lists:seq(1, 6)),
+ etap:is(6, NumCreated, "Created all databases."),
lists:foreach(fun(Int) ->
ok = couch_server:delete(MkDbName(Int), [])
- end, lists:seq(1, 200)),
+ end, lists:seq(1, 6)),
{ok, AllDbs4} = couch_server:all_databases(),
NumDeleted = lists:foldl(fun(Int, Acc) ->
false = lists:member(MkDbName(Int), AllDbs4),
Acc+1
- end, 0, lists:seq(1, 200)),
- etap:is(200, NumDeleted, "Deleted all databases."),
+ end, 0, lists:seq(1, 6)),
+ etap:is(6, NumDeleted, "Deleted all databases."),
ok.
diff --git a/test/etap/090-task-status.t b/test/etap/090-task-status.t
index b6ebbe4c..b278de7f 100755
--- a/test/etap/090-task-status.t
+++ b/test/etap/090-task-status.t
@@ -29,7 +29,7 @@ check_status(Pid,ListPropLists) ->
From = list_to_binary(pid_to_list(Pid)),
Element = lists:foldl(
fun(PropList,Acc) ->
- case proplists:get_value(pid,PropList) of
+ case couch_util:get_value(pid,PropList) of
From ->
[PropList | Acc];
_ ->
@@ -38,7 +38,7 @@ check_status(Pid,ListPropLists) ->
end,
[], ListPropLists
),
- proplists:get_value(status,hd(Element)).
+ couch_util:get_value(status,hd(Element)).
loop() ->
receive
diff --git a/test/etap/100-ref-counter.t b/test/etap/100-ref-counter.t
index 6f18d828..8f996d04 100755
--- a/test/etap/100-ref-counter.t
+++ b/test/etap/100-ref-counter.t
@@ -27,17 +27,14 @@ main(_) ->
loop() ->
receive
- {ping, From} ->
- From ! pong
+ close -> ok
end.
wait() ->
receive
- _ ->
- ok
- after
- 1000 ->
- throw(timeout_error)
+ {'DOWN', _, _, _, _} -> ok
+ after 1000 ->
+ throw(timeout_error)
end.
test() ->
@@ -94,11 +91,23 @@ test() ->
"Sanity checking that the Pid was re-added."
),
- ChildPid1 ! {ping, self()},
+ erlang:monitor(process, ChildPid1),
+ ChildPid1 ! close,
wait(),
- etap:is(
- couch_ref_counter:count(RefCtr),
- 1,
+
+ CheckFun = fun
+ (Iter, nil) ->
+ case couch_ref_counter:count(RefCtr) of
+ 1 -> Iter;
+ _ -> nil
+ end;
+ (_, Acc) ->
+ Acc
+ end,
+ Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)),
+ etap:isnt(
+ Result,
+ nil,
"The referer count was decremented automatically on process exit."
),
diff --git a/test/etap/110-replication-httpc.t b/test/etap/110-replication-httpc.t
index 492732bc..529239c5 100755
--- a/test/etap/110-replication-httpc.t
+++ b/test/etap/110-replication-httpc.t
@@ -19,7 +19,7 @@
auth = [],
resource = "",
headers = [
- {"User-Agent", "CouchDb/"++couch_server:get_version()},
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
{"Accept", "application/json"},
{"Accept-Encoding", "gzip"}
],
@@ -107,8 +107,8 @@ test_put() ->
method = put
},
{Resp} = couch_rep_httpc:request(Req),
- etap:ok(proplists:get_value(<<"ok">>, Resp), "ok:true on upload"),
- etap:is(<<"test_put">>, proplists:get_value(<<"id">>, Resp), "id is correct").
+ etap:ok(couch_util:get_value(<<"ok">>, Resp), "ok:true on upload"),
+ etap:is(<<"test_put">>, couch_util:get_value(<<"id">>, Resp), "id is correct").
test_qs() ->
Req = #http_db{
diff --git a/test/etap/111-replication-changes-feed.t b/test/etap/111-replication-changes-feed.t
index b03c1ac7..778b99dd 100755
--- a/test/etap/111-replication-changes-feed.t
+++ b/test/etap/111-replication-changes-feed.t
@@ -22,7 +22,7 @@
auth = [],
resource = "",
headers = [
- {"User-Agent", "CouchDb/"++couch_server:get_version()},
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
{"Accept", "application/json"},
{"Accept-Encoding", "gzip"}
],
@@ -153,11 +153,11 @@ test_deleted_conflicts(Type) ->
{ExpectProps} = generate_conflict(),
%% delete the conflict revision
- Id = proplists:get_value(<<"id">>, ExpectProps),
- [Win, {[{<<"rev">>, Lose}]}] = proplists:get_value(<<"changes">>, ExpectProps),
+ Id = couch_util:get_value(<<"id">>, ExpectProps),
+ [Win, {[{<<"rev">>, Lose}]}] = couch_util:get_value(<<"changes">>, ExpectProps),
Doc = couch_doc:from_json_obj({[
{<<"_id">>, Id},
- {<<"_rev">>, couch_doc:rev_to_str(Lose)},
+ {<<"_rev">>, Lose},
{<<"_deleted">>, true}
]}),
Db = get_db(),
@@ -167,7 +167,7 @@ test_deleted_conflicts(Type) ->
Expect = {[
{<<"seq">>, get_update_seq()},
{<<"id">>, Id},
- {<<"changes">>, [Win, {[{<<"rev">>, Rev}]}]}
+ {<<"changes">>, [Win, {[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]}
]},
{ok, Pid} = start_changes_feed(Type, Since, false),
@@ -210,7 +210,7 @@ generate_change(Id, EJson) ->
{[
{<<"seq">>, get_update_seq()},
{<<"id">>, Id},
- {<<"changes">>, [{[{<<"rev">>, Rev}]}]}
+ {<<"changes">>, [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]}
]}.
generate_conflict() ->
@@ -220,9 +220,9 @@ generate_conflict() ->
Doc2 = (couch_doc:from_json_obj({[<<"foo">>, <<"baz">>]}))#doc{id = Id},
{ok, Rev1} = couch_db:update_doc(Db, Doc1, [full_commit]),
{ok, Rev2} = couch_db:update_doc(Db, Doc2, [full_commit, all_or_nothing]),
-
+
%% relies on undocumented CouchDB conflict winner algo and revision sorting!
- RevList = [{[{<<"rev">>, R}]} || R
+ RevList = [{[{<<"rev">>, couch_doc:rev_to_str(R)}]} || R
<- lists:sort(fun(A,B) -> B<A end, [Rev1,Rev2])],
{[
{<<"seq">>, get_update_seq()},
diff --git a/test/etap/112-replication-missing-revs.t b/test/etap/112-replication-missing-revs.t
index 8aabfd37..71971088 100755
--- a/test/etap/112-replication-missing-revs.t
+++ b/test/etap/112-replication-missing-revs.t
@@ -23,7 +23,7 @@
auth = [],
resource = "",
headers = [
- {"User-Agent", "CouchDb/"++couch_server:get_version()},
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
{"Accept", "application/json"},
{"Accept-Encoding", "gzip"}
],
@@ -188,8 +188,21 @@ start_changes_feed(remote, Since, Continuous) ->
Db = #http_db{url = "http://127.0.0.1:5984/etap-test-source/"},
couch_rep_changes_feed:start_link(self(), Db, Since, Props).
+couch_rep_pid(Db) ->
+ spawn(fun() -> couch_rep_pid_loop(Db) end).
+
+couch_rep_pid_loop(Db) ->
+ receive
+ {'$gen_call', From, get_target_db} ->
+ gen_server:reply(From, {ok, Db})
+ end,
+ couch_rep_pid_loop(Db).
+
start_missing_revs(local, Changes) ->
- couch_rep_missing_revs:start_link(self(), get_db(target), Changes, []);
+ TargetDb = get_db(target),
+ MainPid = couch_rep_pid(TargetDb),
+ couch_rep_missing_revs:start_link(MainPid, TargetDb, Changes, []);
start_missing_revs(remote, Changes) ->
- Db = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"},
- couch_rep_missing_revs:start_link(self(), Db, Changes, []).
+ TargetDb = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"},
+ MainPid = couch_rep_pid(TargetDb),
+ couch_rep_missing_revs:start_link(MainPid, TargetDb, Changes, []).
diff --git a/test/etap/113-replication-attachment-comp.t b/test/etap/113-replication-attachment-comp.t
new file mode 100755
index 00000000..19c48fc6
--- /dev/null
+++ b/test/etap/113-replication-attachment-comp.t
@@ -0,0 +1,317 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+ name = null,
+ roles = [],
+ handler
+}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
+
+test_db_a_name() ->
+ <<"couch_test_rep_att_comp_a">>.
+
+test_db_b_name() ->
+ <<"couch_test_rep_att_comp_b">>.
+
+main(_) ->
+ test_util:init_code_path(),
+ etap:plan(45),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_server_sup:start_link([default_config()]),
+ put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+ put(port, couch_config:get("httpd", "port", "5984")),
+ application:start(inets),
+ ibrowse:start(),
+ timer:sleep(1000),
+
+ %
+ % test pull replication
+ %
+
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ create_db(test_db_a_name()),
+ create_db(test_db_b_name()),
+
+ % enable compression
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ % store doc with text attachment in DB A
+ put_text_att(test_db_a_name()),
+
+ % disable attachment compression
+ couch_config:set("attachments", "compression_level", "0"),
+
+ % do pull replication
+ do_pull_replication(test_db_a_name(), test_db_b_name()),
+
+ % verify that DB B has the attachment stored in compressed form
+ check_att_is_compressed(test_db_b_name()),
+ check_server_can_decompress_att(test_db_b_name()),
+ check_att_stubs(test_db_a_name(), test_db_b_name()),
+
+ %
+ % test push replication
+ %
+
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ create_db(test_db_a_name()),
+ create_db(test_db_b_name()),
+
+ % enable compression
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ % store doc with text attachment in DB A
+ put_text_att(test_db_a_name()),
+
+ % disable attachment compression
+ couch_config:set("attachments", "compression_level", "0"),
+
+ % do push replication
+ do_push_replication(test_db_a_name(), test_db_b_name()),
+
+ % verify that DB B has the attachment stored in compressed form
+ check_att_is_compressed(test_db_b_name()),
+ check_server_can_decompress_att(test_db_b_name()),
+ check_att_stubs(test_db_a_name(), test_db_b_name()),
+
+ %
+ % test local replication
+ %
+
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ create_db(test_db_a_name()),
+ create_db(test_db_b_name()),
+
+ % enable compression
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ % store doc with text attachment in DB A
+ put_text_att(test_db_a_name()),
+
+ % disable attachment compression
+ couch_config:set("attachments", "compression_level", "0"),
+
+ % do local-local replication
+ do_local_replication(test_db_a_name(), test_db_b_name()),
+
+ % verify that DB B has the attachment stored in compressed form
+ check_att_is_compressed(test_db_b_name()),
+ check_server_can_decompress_att(test_db_b_name()),
+ check_att_stubs(test_db_a_name(), test_db_b_name()),
+
+ timer:sleep(3000), % to avoid mochiweb socket closed exceptions
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ couch_server_sup:stop(),
+ ok.
+
+put_text_att(DbName) ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url(DbName) ++ "/testdoc1/readme.txt", [],
+ "text/plain", test_text_data()},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created text attachment"),
+ ok.
+
+do_pull_replication(SourceDbName, TargetDbName) ->
+ RepObj = {[
+ {<<"source">>, list_to_binary(db_url(SourceDbName))},
+ {<<"target">>, TargetDbName}
+ ]},
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ post,
+ {rep_url(), [],
+ "application/json", list_to_binary(couch_util:json_encode(RepObj))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Pull replication successfully triggered"),
+ Json = couch_util:json_decode(Body),
+ RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]),
+ etap:is(RepOk, true, "Pull replication completed with success"),
+ ok.
+
+do_push_replication(SourceDbName, TargetDbName) ->
+ RepObj = {[
+ {<<"source">>, SourceDbName},
+ {<<"target">>, list_to_binary(db_url(TargetDbName))}
+ ]},
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ post,
+ {rep_url(), [],
+ "application/json", list_to_binary(couch_util:json_encode(RepObj))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Push replication successfully triggered"),
+ Json = couch_util:json_decode(Body),
+ RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]),
+ etap:is(RepOk, true, "Push replication completed with success"),
+ ok.
+
+do_local_replication(SourceDbName, TargetDbName) ->
+ RepObj = {[
+ {<<"source">>, SourceDbName},
+ {<<"target">>, TargetDbName}
+ ]},
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ post,
+ {rep_url(), [],
+ "application/json", list_to_binary(couch_util:json_encode(RepObj))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Local replication successfully triggered"),
+ Json = couch_util:json_decode(Body),
+ RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]),
+ etap:is(RepOk, true, "Local replication completed with success"),
+ ok.
+
+check_att_is_compressed(DbName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url(DbName) ++ "/testdoc1/readme.txt",
+ [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code for the attachment request is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "The attachment was received in compressed form"),
+ Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))),
+ etap:is(
+ Uncompressed,
+ test_text_data(),
+ "The attachment content is valid after decompression at the client side"
+ ),
+ ok.
+
+check_server_can_decompress_att(DbName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url(DbName) ++ "/testdoc1/readme.txt", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code for the attachment request is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(
+ Gziped, false, "The attachment was not received in compressed form"
+ ),
+ etap:is(
+ Body,
+ test_text_data(),
+ "The attachment content is valid after server decompression"
+ ),
+ ok.
+
+check_att_stubs(SourceDbName, TargetDbName) ->
+ {ok, {{_, Code1, _}, _Headers1, Body1}} = http:request(
+ get,
+ {db_url(SourceDbName) ++ "/testdoc1?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code1,
+ 200,
+ "HTTP response code is 200 for the source DB doc request"
+ ),
+ Json1 = couch_util:json_decode(Body1),
+ SourceAttStub = couch_util:get_nested_json_value(
+ Json1,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ {ok, {{_, Code2, _}, _Headers2, Body2}} = http:request(
+ get,
+ {db_url(TargetDbName) ++ "/testdoc1?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code2,
+ 200,
+ "HTTP response code is 200 for the target DB doc request"
+ ),
+ Json2 = couch_util:json_decode(Body2),
+ TargetAttStub = couch_util:get_nested_json_value(
+ Json2,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ IdenticalStubs = (SourceAttStub =:= TargetAttStub),
+ etap:is(IdenticalStubs, true, "Attachment stubs are identical"),
+ TargetAttStubLength = couch_util:get_nested_json_value(
+ TargetAttStub,
+ [<<"length">>]
+ ),
+ TargetAttStubEnc = couch_util:get_nested_json_value(
+ TargetAttStub,
+ [<<"encoding">>]
+ ),
+ etap:is(
+ TargetAttStubEnc,
+ <<"gzip">>,
+ "Attachment stub has encoding property set to gzip"
+ ),
+ TargetAttStubEncLength = couch_util:get_nested_json_value(
+ TargetAttStub,
+ [<<"encoded_length">>]
+ ),
+ EncLengthDefined = is_integer(TargetAttStubEncLength),
+ etap:is(
+ EncLengthDefined,
+ true,
+ "Stubs have the encoded_length field properly defined"
+ ),
+ EncLengthSmaller = (TargetAttStubEncLength < TargetAttStubLength),
+ etap:is(
+ EncLengthSmaller,
+ true,
+ "Stubs have the encoded_length field smaller than their length field"
+ ),
+ ok.
+
+admin_user_ctx() ->
+ {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+create_db(DbName) ->
+ {ok, _} = couch_db:create(DbName, [admin_user_ctx()]).
+
+delete_db(DbName) ->
+ couch_server:delete(DbName, [admin_user_ctx()]).
+
+db_url(DbName) ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+ binary_to_list(DbName).
+
+rep_url() ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/_replicate".
+
+test_text_data() ->
+ {ok, Data} = file:read_file(test_util:source_file("README")),
+ binary_to_list(Data).
diff --git a/test/etap/121-stats-aggregates.t b/test/etap/121-stats-aggregates.t
index cd6b1430..d678aa9d 100755
--- a/test/etap/121-stats-aggregates.t
+++ b/test/etap/121-stats-aggregates.t
@@ -46,13 +46,13 @@ test_all_empty() ->
etap:is(length(Aggs), 2, "There are only two aggregate types in testing."),
etap:is(
- proplists:get_value(testing, Aggs),
+ couch_util:get_value(testing, Aggs),
{[{stuff, make_agg(<<"yay description">>,
null, null, null, null, null)}]},
"{testing, stuff} is empty at start."
),
etap:is(
- proplists:get_value(number, Aggs),
+ couch_util:get_value(number, Aggs),
{[{'11', make_agg(<<"randomosity">>,
null, null, null, null, null)}]},
"{number, '11'} is empty at start."
diff --git a/test/etap/130-attachments-md5.t b/test/etap/130-attachments-md5.t
index fe6732d6..4c40f83a 100755
--- a/test/etap/130-attachments-md5.t
+++ b/test/etap/130-attachments-md5.t
@@ -103,7 +103,7 @@ test_identity_with_valid_md5() ->
"PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
"Content-Type: text/plain\r\n",
"Content-Length: 34\r\n",
- "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n",
+ "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
"\r\n",
AttData],
@@ -118,7 +118,7 @@ test_chunked_with_valid_md5_header() ->
"PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
"Content-Type: text/plain\r\n",
"Transfer-Encoding: chunked\r\n",
- "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n",
+ "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
"\r\n",
to_hex(size(Part1)), "\r\n",
Part1, "\r\n",
@@ -145,7 +145,7 @@ test_chunked_with_valid_md5_trailer() ->
to_hex(size(Part2)), "\r\n",
Part2, "\r\n",
"0\r\n",
- "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n",
+ "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
"\r\n"],
{Code, Json} = do_request(Data),
@@ -227,7 +227,7 @@ get_socket() ->
do_request(Request) ->
Sock = get_socket(),
gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))),
- timer:sleep(100),
+ timer:sleep(1000),
{ok, R} = gen_tcp:recv(Sock, 0),
gen_tcp:close(Sock),
[Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]),
diff --git a/test/etap/140-attachment-comp.t b/test/etap/140-attachment-comp.t
new file mode 100755
index 00000000..1a90bf0b
--- /dev/null
+++ b/test/etap/140-attachment-comp.t
@@ -0,0 +1,715 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
+
+test_db_name() ->
+ <<"couch_test_atts_compression">>.
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(78),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_server_sup:start_link([default_config()]),
+ put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+ put(port, couch_config:get("httpd", "port", "5984")),
+ application:start(inets),
+ timer:sleep(1000),
+ couch_server:delete(test_db_name(), []),
+ couch_db:create(test_db_name(), []),
+
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ create_1st_text_att(),
+ create_1st_png_att(),
+ create_2nd_text_att(),
+ create_2nd_png_att(),
+
+ tests_for_1st_text_att(),
+ tests_for_1st_png_att(),
+ tests_for_2nd_text_att(),
+ tests_for_2nd_png_att(),
+
+ create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
+ test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
+
+ test_create_already_compressed_att_with_invalid_content_encoding(
+ db_url() ++ "/doc_att_deflate",
+ "readme.txt",
+ zlib:compress(test_text_data()),
+ "deflate"
+ ),
+
+ test_create_already_compressed_att_with_invalid_content_encoding(
+ db_url() ++ "/doc_att_compress",
+ "readme.txt",
+ % Note: As of OTP R13B04, it seems there's no LZW compression
+ % (i.e. UNIX compress utility implementation) lib in OTP.
+ % However there's a simple working Erlang implementation at:
+ % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php
+ test_text_data(),
+ "compress"
+ ),
+
+ timer:sleep(3000), % to avoid mochiweb socket closed exceptions
+ couch_server:delete(test_db_name(), []),
+ couch_server_sup:stop(),
+ ok.
+
+db_url() ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+ binary_to_list(test_db_name()).
+
+create_1st_text_att() ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc1/readme.txt", [],
+ "text/plain", test_text_data()},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created text attachment using the standalone api"),
+ ok.
+
+create_1st_png_att() ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc2/icon.png", [],
+ "image/png", test_png_data()},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created png attachment using the standalone api"),
+ ok.
+
+% create a text attachment using the non-standalone attachment api
+create_2nd_text_att() ->
+ DocJson = {[
+ {<<"_attachments">>, {[
+ {<<"readme.txt">>, {[
+ {<<"content_type">>, <<"text/plain">>},
+ {<<"data">>, base64:encode(test_text_data())}
+ ]}
+ }]}}
+ ]},
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc3", [],
+ "application/json", list_to_binary(couch_util:json_encode(DocJson))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created text attachment using the non-standalone api"),
+ ok.
+
+% create a png attachment using the non-standalone attachment api
+create_2nd_png_att() ->
+ DocJson = {[
+ {<<"_attachments">>, {[
+ {<<"icon.png">>, {[
+ {<<"content_type">>, <<"image/png">>},
+ {<<"data">>, base64:encode(test_png_data())}
+ ]}
+ }]}}
+ ]},
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc4", [],
+ "application/json", list_to_binary(couch_util:json_encode(DocJson))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created png attachment using the non-standalone api"),
+ ok.
+
+create_already_compressed_att(DocUri, AttName) ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {DocUri ++ "/" ++ AttName, [{"Content-Encoding", "gzip"}],
+ "text/plain", zlib:gzip(test_text_data())},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code,
+ 201,
+ "Created already compressed attachment using the standalone api"
+ ),
+ ok.
+
+tests_for_1st_text_att() ->
+ test_get_1st_text_att_with_accept_encoding_gzip(),
+ test_get_1st_text_att_without_accept_encoding_header(),
+ test_get_1st_text_att_with_accept_encoding_deflate(),
+ test_get_1st_text_att_with_accept_encoding_deflate_only(),
+ test_get_doc_with_1st_text_att(),
+ test_1st_text_att_stub().
+
+tests_for_1st_png_att() ->
+ test_get_1st_png_att_without_accept_encoding_header(),
+ test_get_1st_png_att_with_accept_encoding_gzip(),
+ test_get_1st_png_att_with_accept_encoding_deflate(),
+ test_get_doc_with_1st_png_att(),
+ test_1st_png_att_stub().
+
+tests_for_2nd_text_att() ->
+ test_get_2nd_text_att_with_accept_encoding_gzip(),
+ test_get_2nd_text_att_without_accept_encoding_header(),
+ test_get_doc_with_2nd_text_att(),
+ test_2nd_text_att_stub().
+
+tests_for_2nd_png_att() ->
+ test_get_2nd_png_att_without_accept_encoding_header(),
+ test_get_2nd_png_att_with_accept_encoding_gzip(),
+ test_get_doc_with_2nd_png_att(),
+ test_2nd_png_att_stub().
+
+test_get_1st_text_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "received body is gziped"),
+ Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))),
+ etap:is(
+ Uncompressed,
+ test_text_data(),
+ "received data for the 1st text attachment is ok"
+ ),
+ ok.
+
+test_get_1st_text_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_text_data(),
+ "received data for the 1st text attachment is ok"
+ ),
+ ok.
+
+test_get_1st_text_att_with_accept_encoding_deflate() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "deflate"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ Deflated = lists:member({"content-encoding", "deflate"}, Headers),
+ etap:is(Deflated, false, "received body is not deflated"),
+ etap:is(
+ Body,
+ test_text_data(),
+ "received data for the 1st text attachment is ok"
+ ),
+ ok.
+
+test_get_1st_text_att_with_accept_encoding_deflate_only() ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt",
+ [{"Accept-Encoding", "deflate, *;q=0"}]},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code,
+ 406,
+ "HTTP response code is 406 for an unsupported content encoding request"
+ ),
+ ok.
+
+test_get_1st_png_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2/icon.png", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 1st png attachment is ok"
+ ),
+ ok.
+
+test_get_1st_png_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 1st png attachment is ok"
+ ),
+ ok.
+
+test_get_1st_png_att_with_accept_encoding_deflate() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "deflate"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Deflated = lists:member({"content-encoding", "deflate"}, Headers),
+ etap:is(Deflated, false, "received body is not deflated"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 1st png attachment is ok"
+ ),
+ ok.
+
+test_get_doc_with_1st_text_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ TextAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttType = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"content_type">>]
+ ),
+ TextAttData = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"data">>]
+ ),
+ etap:is(
+ TextAttType,
+ <<"text/plain">>,
+ "1st text attachment has type text/plain"
+ ),
+ %% check the attachment's data is the base64 encoding of the plain text
+ %% and not the base64 encoding of the gziped plain text
+ etap:is(
+ TextAttData,
+ base64:encode(test_text_data()),
+ "1st text attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_1st_text_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {TextAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
+ etap:is(
+ TextAttLength,
+ length(test_text_data()),
+ "1st text attachment stub length matches the uncompressed length"
+ ),
+ TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
+ etap:is(
+ TextAttEncoding,
+ <<"gzip">>,
+ "1st text attachment stub has the encoding field set to gzip"
+ ),
+ TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
+ etap:is(
+ TextAttEncLength,
+ iolist_size(zlib:gzip(test_text_data())),
+ "1st text attachment stub encoded_length matches the compressed length"
+ ),
+ ok.
+
+test_get_doc_with_1st_png_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ PngAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttType = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"content_type">>]
+ ),
+ PngAttData = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"data">>]
+ ),
+ etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
+ etap:is(
+ PngAttData,
+ base64:encode(test_png_data()),
+ "1st png attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_1st_png_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {PngAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
+ etap:is(
+ PngAttLength,
+ length(test_png_data()),
+ "1st png attachment stub length matches the uncompressed length"
+ ),
+ PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
+ etap:is(
+ PngEncoding,
+ undefined,
+ "1st png attachment stub doesn't have an encoding field"
+ ),
+ PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
+ etap:is(
+ PngEncLength,
+ undefined,
+ "1st png attachment stub doesn't have an encoded_length field"
+ ),
+ ok.
+
+test_get_2nd_text_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3/readme.txt", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "received body is gziped"),
+ Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))),
+ etap:is(
+ Uncompressed,
+ test_text_data(),
+ "received data for the 2nd text attachment is ok"
+ ),
+ ok.
+
+test_get_2nd_text_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3/readme.txt", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_text_data(),
+ "received data for the 2nd text attachment is ok"
+ ),
+ ok.
+
+test_get_2nd_png_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4/icon.png", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 2nd png attachment is ok"
+ ),
+ ok.
+
+test_get_2nd_png_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4/icon.png", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 2nd png attachment is ok"
+ ),
+ ok.
+
+test_get_doc_with_2nd_text_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ TextAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttType = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"content_type">>]
+ ),
+ TextAttData = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"data">>]
+ ),
+ etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"),
+ %% check the attachment's data is the base64 encoding of the plain text
+ %% and not the base64 encoding of the gziped plain text
+ etap:is(
+ TextAttData,
+ base64:encode(test_text_data()),
+ "2nd text attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_2nd_text_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {TextAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
+ etap:is(
+ TextAttLength,
+ length(test_text_data()),
+ "2nd text attachment stub length matches the uncompressed length"
+ ),
+ TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
+ etap:is(
+ TextAttEncoding,
+ <<"gzip">>,
+ "2nd text attachment stub has the encoding field set to gzip"
+ ),
+ TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
+ etap:is(
+ TextAttEncLength,
+ iolist_size(zlib:gzip(test_text_data())),
+ "2nd text attachment stub encoded_length matches the compressed length"
+ ),
+ ok.
+
+test_get_doc_with_2nd_png_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ PngAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttType = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"content_type">>]
+ ),
+ PngAttData = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"data">>]
+ ),
+ etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
+ etap:is(
+ PngAttData,
+ base64:encode(test_png_data()),
+ "2nd png attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_2nd_png_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {PngAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
+ etap:is(
+ PngAttLength,
+ length(test_png_data()),
+ "2nd png attachment stub length matches the uncompressed length"
+ ),
+ PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
+ etap:is(
+ PngEncoding,
+ undefined,
+ "2nd png attachment stub doesn't have an encoding field"
+ ),
+ PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
+ etap:is(
+ PngEncLength,
+ undefined,
+ "2nd png attachment stub doesn't have an encoded_length field"
+ ),
+ ok.
+
+test_already_compressed_att(DocUri, AttName) ->
+ test_get_already_compressed_att_with_accept_gzip(DocUri, AttName),
+ test_get_already_compressed_att_without_accept(DocUri, AttName),
+ test_get_already_compressed_att_stub(DocUri, AttName).
+
+test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {DocUri ++ "/" ++ AttName, [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "received body is gziped"),
+ etap:is(
+ iolist_to_binary(Body),
+ iolist_to_binary(zlib:gzip(test_text_data())),
+ "received data for the already compressed attachment is ok"
+ ),
+ ok.
+
+test_get_already_compressed_att_without_accept(DocUri, AttName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {DocUri ++ "/" ++ AttName, []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ iolist_to_binary(Body),
+ iolist_to_binary(test_text_data()),
+ "received data for the already compressed attachment is ok"
+ ),
+ ok.
+
+test_get_already_compressed_att_stub(DocUri, AttName) ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {DocUri ++ "?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {AttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, iolist_to_binary(AttName)]
+ ),
+ AttLength = couch_util:get_value(<<"length">>, AttJson),
+ etap:is(
+ AttLength,
+ iolist_size((zlib:gzip(test_text_data()))),
+ "Already compressed attachment stub length matches the "
+ "compressed length"
+ ),
+ Encoding = couch_util:get_value(<<"encoding">>, AttJson),
+ etap:is(
+ Encoding,
+ <<"gzip">>,
+ "Already compressed attachment stub has the encoding field set to gzip"
+ ),
+ EncLength = couch_util:get_value(<<"encoded_length">>, AttJson),
+ etap:is(
+ EncLength,
+ AttLength,
+ "Already compressed attachment stub encoded_length matches the "
+ "length field value"
+ ),
+ ok.
+
+test_create_already_compressed_att_with_invalid_content_encoding(
+ DocUri, AttName, AttData, Encoding) ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {DocUri ++ "/" ++ AttName, [{"Content-Encoding", Encoding}],
+ "text/plain", AttData},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code,
+ 415,
+ "Couldn't create an already compressed attachment using the "
+ "unsupported encoding '" ++ Encoding ++ "'"
+ ),
+ ok.
+
+test_png_data() ->
+ {ok, Data} = file:read_file(
+ test_util:source_file("share/www/image/logo.png")
+ ),
+ binary_to_list(Data).
+
+test_text_data() ->
+ {ok, Data} = file:read_file(
+ test_util:source_file("README")
+ ),
+ binary_to_list(Data).
diff --git a/test/etap/150-invalid-view-seq.t b/test/etap/150-invalid-view-seq.t
new file mode 100755
index 00000000..0664c116
--- /dev/null
+++ b/test/etap/150-invalid-view-seq.t
@@ -0,0 +1,192 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+ name = null,
+ roles = [],
+ handler
+}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
+
+test_db_name() ->
+ <<"couch_test_invalid_view_seq">>.
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(10),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+%% NOTE: since during the test we stop the server,
+%% a huge and ugly but harmless stack trace is sent to stderr
+%%
+test() ->
+ couch_server_sup:start_link([default_config()]),
+ timer:sleep(1000),
+ delete_db(),
+ create_db(),
+
+ create_docs(),
+ create_design_doc(),
+
+ % make DB file backup
+ backup_db_file(),
+
+ put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+ put(port, couch_config:get("httpd", "port", "5984")),
+ application:start(inets),
+
+ create_new_doc(),
+ query_view_before_restore_backup(),
+
+ % restore DB file backup after querying view
+ restore_backup_db_file(),
+
+ query_view_after_restore_backup(),
+
+ delete_db(),
+ couch_server_sup:stop(),
+ ok.
+
+admin_user_ctx() ->
+ {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+create_db() ->
+ {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]).
+
+delete_db() ->
+ couch_server:delete(test_db_name(), [admin_user_ctx()]).
+
+create_docs() ->
+ {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+ Doc1 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc1">>},
+ {<<"value">>, 1}
+
+ ]}),
+ Doc2 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc2">>},
+ {<<"value">>, 2}
+
+ ]}),
+ Doc3 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc3">>},
+ {<<"value">>, 3}
+
+ ]}),
+ {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
+ couch_db:ensure_full_commit(Db),
+ couch_db:close(Db).
+
+create_design_doc() ->
+ {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+ DDoc = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"_design/foo">>},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>, {[
+ {<<"bar">>, {[
+ {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
+ ]}}
+ ]}}
+ ]}),
+ {ok, _} = couch_db:update_docs(Db, [DDoc]),
+ couch_db:ensure_full_commit(Db),
+ couch_db:close(Db).
+
+backup_db_file() ->
+ DbFile = test_util:build_file("tmp/lib/" ++
+ binary_to_list(test_db_name()) ++ ".couch"),
+ {ok, _} = file:copy(DbFile, DbFile ++ ".backup"),
+ ok.
+
+create_new_doc() ->
+ {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+ Doc666 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc666">>},
+ {<<"value">>, 999}
+
+ ]}),
+ {ok, _} = couch_db:update_docs(Db, [Doc666]),
+ couch_db:ensure_full_commit(Db),
+ couch_db:close(Db).
+
+db_url() ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+ binary_to_list(test_db_name()).
+
+query_view_before_restore_backup() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/_design/foo/_view/bar", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Got view response before restoring backup."),
+ ViewJson = couch_util:json_decode(Body),
+ Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
+ HasDoc1 = has_doc("doc1", Rows),
+ HasDoc2 = has_doc("doc2", Rows),
+ HasDoc3 = has_doc("doc3", Rows),
+ HasDoc666 = has_doc("doc666", Rows),
+ etap:is(HasDoc1, true, "Before backup restore, view has doc1"),
+ etap:is(HasDoc2, true, "Before backup restore, view has doc2"),
+ etap:is(HasDoc3, true, "Before backup restore, view has doc3"),
+ etap:is(HasDoc666, true, "Before backup restore, view has doc666"),
+ ok.
+
+has_doc(DocId1, Rows) ->
+ DocId = iolist_to_binary(DocId1),
+ lists:any(
+ fun({R}) -> lists:member({<<"id">>, DocId}, R) end,
+ Rows
+ ).
+
+restore_backup_db_file() ->
+ couch_server_sup:stop(),
+ timer:sleep(3000),
+ DbFile = test_util:build_file("tmp/lib/" ++
+ binary_to_list(test_db_name()) ++ ".couch"),
+ ok = file:delete(DbFile),
+ ok = file:rename(DbFile ++ ".backup", DbFile),
+ couch_server_sup:start_link([default_config()]),
+ timer:sleep(1000),
+ ok.
+
+query_view_after_restore_backup() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/_design/foo/_view/bar", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Got view response after restoring backup."),
+ ViewJson = couch_util:json_decode(Body),
+ Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
+ HasDoc1 = has_doc("doc1", Rows),
+ HasDoc2 = has_doc("doc2", Rows),
+ HasDoc3 = has_doc("doc3", Rows),
+ HasDoc666 = has_doc("doc666", Rows),
+ etap:is(HasDoc1, true, "After backup restore, view has doc1"),
+ etap:is(HasDoc2, true, "After backup restore, view has doc2"),
+ etap:is(HasDoc3, true, "After backup restore, view has doc3"),
+ etap:is(HasDoc666, false, "After backup restore, view does not have doc666"),
+ ok.
diff --git a/test/etap/160-vhosts.t b/test/etap/160-vhosts.t
new file mode 100755
index 00000000..f4bd5e27
--- /dev/null
+++ b/test/etap/160-vhosts.t
@@ -0,0 +1,290 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+%% XXX: Figure out how to -include("couch_rep.hrl")
+-record(http_db, {
+ url,
+ auth = [],
+ resource = "",
+ headers = [
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
+ {"Accept", "application/json"},
+ {"Accept-Encoding", "gzip"}
+ ],
+ qs = [],
+ method = get,
+ body = nil,
+ options = [
+ {response_format,binary},
+ {inactivity_timeout, 30000}
+ ],
+ retries = 10,
+ pause = 1,
+ conn = nil
+}).
+
+-record(user_ctx, {
+ name = null,
+ roles = [],
+ handler
+}).
+
+server() -> "http://127.0.0.1:5984/".
+dbname() -> "etap-test-db".
+admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini",
+ "etc/couchdb/local_dev.ini"
+ ]).
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(14),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_server_sup:start_link(config_files()),
+ ibrowse:start(),
+ crypto:start(),
+
+ timer:sleep(1000),
+ couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]),
+ {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]),
+
+ Doc = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc1">>},
+ {<<"value">>, 666}
+ ]}),
+
+ Doc1 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"_design/doc1">>},
+ {<<"shows">>, {[
+ {<<"test">>, <<"function(doc, req) {
+ return { json: {
+ requested_path: '/' + req.requested_path.join('/'),
+ path: '/' + req.path.join('/')
+ }};
+}">>}
+ ]}},
+ {<<"rewrites">>, [
+ {[
+ {<<"from">>, <<"/">>},
+ {<<"to">>, <<"_show/test">>}
+ ]}
+ ]}
+ ]}),
+
+ {ok, _} = couch_db:update_docs(Db, [Doc, Doc1]),
+
+ couch_db:ensure_full_commit(Db),
+
+ %% end boilerplate, start test
+
+ ok = couch_config:set("vhosts", "example.com", "/etap-test-db", false),
+ ok = couch_config:set("vhosts", "*.example.com",
+ "/etap-test-db/_design/doc1/_rewrite", false),
+ ok = couch_config:set("vhosts", "example.com/test", "/etap-test-db", false),
+ ok = couch_config:set("vhosts", "example1.com",
+ "/etap-test-db/_design/doc1/_rewrite/", false),
+ ok = couch_config:set("vhosts",":appname.:dbname.example1.com",
+ "/:dbname/_design/:appname/_rewrite/", false),
+ ok = couch_config:set("vhosts", ":dbname.example1.com", "/:dbname", false),
+
+ ok = couch_config:set("vhosts", "*.example2.com", "/*", false),
+ ok = couch_config:set("vhosts", "*/test", "/etap-test-db", false),
+ ok = couch_config:set("vhosts", "*.example2.com/test", "/*", false),
+ ok = couch_config:set("vhosts", "*/test1",
+ "/etap-test-db/_design/doc1/_show/test", false),
+
+ test_regular_request(),
+ test_vhost_request(),
+ test_vhost_request_with_qs(),
+ test_vhost_request_with_global(),
+ test_vhost_requested_path(),
+ test_vhost_requested_path_path(),
+ test_vhost_request_wildcard(),
+ test_vhost_request_replace_var(),
+ test_vhost_request_replace_var1(),
+ test_vhost_request_replace_wildcard(),
+ test_vhost_request_path(),
+ test_vhost_request_path1(),
+ test_vhost_request_path2(),
+ test_vhost_request_path3(),
+
+ %% restart boilerplate
+ couch_db:close(Db),
+ timer:sleep(3000),
+ couch_server_sup:stop(),
+
+ ok.
+
+test_regular_request() ->
+ Result = case ibrowse:send_req(server(), [], get, []) of
+ {ok, _, _, Body} ->
+ {[{<<"couchdb">>, <<"Welcome">>},
+ {<<"version">>,_}
+ ]} = couch_util:json_decode(Body),
+ etap:is(true, true, "should return server info");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_with_qs() ->
+ Url = server() ++ "doc1?revs_info=true",
+ case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body} ->
+ {JsonProps} = couch_util:json_decode(Body),
+ HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
+ etap:is(HasRevsInfo, true, "should return _revs_info");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_with_global() ->
+ Url2 = server() ++ "_utils/index.html",
+ case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body2} ->
+ "<!DOCTYPE" ++ _Foo = Body2,
+ etap:is(true, true, "should serve /_utils even inside vhosts");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_requested_path() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"requested_path">>, Json) of
+ <<"/">> -> true;
+ _ -> false
+ end, true, <<"requested path in req ok">>);
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_requested_path_path() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"path in req ok">>);
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_wildcard()->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "test.example.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"wildcard ok">>);
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+
+test_vhost_request_replace_var() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example1.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_replace_var1() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "doc1.etap-test-db.example1.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"wildcard ok">>);
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_replace_wildcard() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path() ->
+ Uri = server() ++ "test",
+ case ibrowse:send_req(Uri, [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path1() ->
+ Url = server() ++ "test/doc1?revs_info=true",
+ case ibrowse:send_req(Url, [], get, [], []) of
+ {ok, _, _, Body} ->
+ {JsonProps} = couch_util:json_decode(Body),
+ HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
+ etap:is(HasRevsInfo, true, "should return _revs_info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path2() ->
+ Uri = server() ++ "test",
+ case ibrowse:send_req(Uri, [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path3() ->
+ Uri = server() ++ "test1",
+ case ibrowse:send_req(Uri, [], get, [], []) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"path in req ok">>);
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
diff --git a/test/etap/170-os-daemons.es b/test/etap/170-os-daemons.es
new file mode 100755
index 00000000..73974e90
--- /dev/null
+++ b/test/etap/170-os-daemons.es
@@ -0,0 +1,26 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+loop() ->
+ loop(io:read("")).
+
+loop({ok, _}) ->
+ loop(io:read(""));
+loop(eof) ->
+ stop;
+loop({error, Reason}) ->
+ throw({error, Reason}).
+
+main([]) ->
+ loop().
diff --git a/test/etap/170-os-daemons.t b/test/etap/170-os-daemons.t
new file mode 100755
index 00000000..6feaa1bf
--- /dev/null
+++ b/test/etap/170-os-daemons.t
@@ -0,0 +1,114 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+daemon_cmd() ->
+ test_util:source_file("test/etap/170-os-daemons.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(49),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_os_daemons:start_link(),
+
+ etap:diag("Daemons boot after configuration added."),
+ couch_config:set("os_daemons", "foo", daemon_cmd(), false),
+ timer:sleep(1000),
+
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, "foo"),
+
+ % Check table form
+ {ok, Tab1} = couch_os_daemons:info(),
+ [T1] = ets:tab2list(Tab1),
+ check_daemon(T1, "foo"),
+
+ etap:diag("Daemons stop after configuration removed."),
+ couch_config:delete("os_daemons", "foo", false),
+ timer:sleep(500),
+
+ {ok, []} = couch_os_daemons:info([table]),
+ {ok, Tab2} = couch_os_daemons:info(),
+ etap:is(ets:tab2list(Tab2), [], "As table returns empty table."),
+
+ etap:diag("Adding multiple daemons causes both to boot."),
+ couch_config:set("os_daemons", "bar", daemon_cmd(), false),
+ couch_config:set("os_daemons", "baz", daemon_cmd(), false),
+ timer:sleep(500),
+ {ok, Daemons} = couch_os_daemons:info([table]),
+ lists:foreach(fun(D) ->
+ check_daemon(D)
+ end, Daemons),
+
+ {ok, Tab3} = couch_os_daemons:info(),
+ lists:foreach(fun(D) ->
+ check_daemon(D)
+ end, ets:tab2list(Tab3)),
+
+ etap:diag("Removing one daemon leaves the other alive."),
+ couch_config:delete("os_daemons", "bar", false),
+ timer:sleep(500),
+
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, "baz"),
+
+ % Check table version
+ {ok, Tab4} = couch_os_daemons:info(),
+ [T4] = ets:tab2list(Tab4),
+ check_daemon(T4, "baz"),
+
+ ok.
+
+check_daemon(D) ->
+ check_daemon(D, D#daemon.name).
+
+check_daemon(D, Name) ->
+ BaseName = "170-os-daemons.es",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.errors, [], "No errors occurred while booting."),
+ etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
diff --git a/test/etap/171-os-daemons-config.es b/test/etap/171-os-daemons-config.es
new file mode 100755
index 00000000..1f68ddc6
--- /dev/null
+++ b/test/etap/171-os-daemons-config.es
@@ -0,0 +1,83 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+filename() ->
+ list_to_binary(test_util:source_file("test/etap/171-os-daemons-config.es")).
+
+read() ->
+ case io:get_line('') of
+ eof ->
+ stop;
+ Data ->
+ couch_util:json_decode(Data)
+ end.
+
+write(Mesg) ->
+ Data = iolist_to_binary(couch_util:json_encode(Mesg)),
+ io:format(binary_to_list(Data) ++ "\n", []).
+
+get_cfg(Section) ->
+ write([<<"get">>, Section]),
+ read().
+
+get_cfg(Section, Name) ->
+ write([<<"get">>, Section, Name]),
+ read().
+
+log(Mesg) ->
+ write([<<"log">>, Mesg]).
+
+log(Mesg, Level) ->
+ write([<<"log">>, Mesg, {[{<<"level">>, Level}]}]).
+
+test_get_cfg1() ->
+ FileName = filename(),
+ {[{<<"foo">>, FileName}]} = get_cfg(<<"os_daemons">>).
+
+test_get_cfg2() ->
+ FileName = filename(),
+ FileName = get_cfg(<<"os_daemons">>, <<"foo">>),
+ <<"sequential">> = get_cfg(<<"uuids">>, <<"algorithm">>).
+
+test_get_unknown_cfg() ->
+ {[]} = get_cfg(<<"aal;3p4">>),
+ null = get_cfg(<<"aal;3p4">>, <<"313234kjhsdfl">>).
+
+test_log() ->
+ log(<<"foobar!">>),
+ log(<<"some stuff!">>, <<"debug">>),
+ log(2),
+ log(true),
+ write([<<"log">>, <<"stuff">>, 2]),
+ write([<<"log">>, 3, null]),
+ write([<<"log">>, [1, 2], {[{<<"level">>, <<"debug">>}]}]),
+ write([<<"log">>, <<"true">>, {[]}]).
+
+do_tests() ->
+ test_get_cfg1(),
+ test_get_cfg2(),
+ test_get_unknown_cfg(),
+ test_log(),
+ loop(io:read("")).
+
+loop({ok, _}) ->
+ loop(io:read(""));
+loop(eof) ->
+ init:stop();
+loop({error, _Reason}) ->
+ init:stop().
+
+main([]) ->
+ test_util:init_code_path(),
+ do_tests().
diff --git a/test/etap/171-os-daemons-config.t b/test/etap/171-os-daemons-config.t
new file mode 100755
index 00000000..e9dc3f32
--- /dev/null
+++ b/test/etap/171-os-daemons-config.t
@@ -0,0 +1,74 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+daemon_cmd() ->
+ test_util:source_file("test/etap/171-os-daemons-config.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(6),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_config:set("log", "level", "debug", false),
+ couch_log:start_link(),
+ couch_os_daemons:start_link(),
+
+ % "foo" is a required name by this test.
+ couch_config:set("os_daemons", "foo", daemon_cmd(), false),
+ timer:sleep(1000),
+
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, "foo"),
+
+ ok.
+
+check_daemon(D, Name) ->
+ BaseName = "171-os-daemons-config.es",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.errors, [], "No errors occurred while booting."),
+ etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
diff --git a/test/etap/172-os-daemon-errors.1.es b/test/etap/172-os-daemon-errors.1.es
new file mode 100644
index 00000000..a9defba1
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.1.es
@@ -0,0 +1,22 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+% Please do not make this file executable as that's the error being tested.
+
+loop() ->
+ timer:sleep(5000),
+ loop().
+
+main([]) ->
+ loop().
diff --git a/test/etap/172-os-daemon-errors.2.es b/test/etap/172-os-daemon-errors.2.es
new file mode 100755
index 00000000..52de0401
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.2.es
@@ -0,0 +1,16 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main([]) ->
+ init:stop().
diff --git a/test/etap/172-os-daemon-errors.3.es b/test/etap/172-os-daemon-errors.3.es
new file mode 100755
index 00000000..64229800
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.3.es
@@ -0,0 +1,17 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main([]) ->
+ timer:sleep(1000),
+ init:stop().
diff --git a/test/etap/172-os-daemon-errors.4.es b/test/etap/172-os-daemon-errors.4.es
new file mode 100755
index 00000000..577f3410
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.4.es
@@ -0,0 +1,17 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main([]) ->
+ timer:sleep(2000),
+ init:stop().
diff --git a/test/etap/172-os-daemon-errors.t b/test/etap/172-os-daemon-errors.t
new file mode 100755
index 00000000..287a0812
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.t
@@ -0,0 +1,126 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+bad_perms() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.1.es").
+
+die_on_boot() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.2.es").
+
+die_quickly() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.3.es").
+
+can_reboot() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.4.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(36),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_os_daemons:start_link(),
+
+ etap:diag("Daemon not executable."),
+ test_halts("foo", bad_perms(), 1000),
+
+ etap:diag("Daemon dies on boot."),
+ test_halts("bar", die_on_boot(), 1000),
+
+ etap:diag("Daemon dies quickly after boot."),
+ test_halts("baz", die_quickly(), 4000),
+
+ etap:diag("Daemon dies, but not quickly enough to be halted."),
+ test_runs("bam", can_reboot()),
+
+ ok.
+
+test_halts(Name, Cmd, Time) ->
+ couch_config:set("os_daemons", Name, Cmd ++ " 2> /dev/null", false),
+ timer:sleep(Time),
+ {ok, [D]} = couch_os_daemons:info([table]),
+ check_dead(D, Name, Cmd),
+ couch_config:delete("os_daemons", Name, false).
+
+test_runs(Name, Cmd) ->
+ couch_config:set("os_daemons", Name, Cmd, false),
+
+ timer:sleep(1000),
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, Name, Cmd, 0),
+
+ % Should reboot every two seconds. We're at 1s, so wait
+ % utnil 3s to be in the middle of the next invocation's
+ % life span.
+ timer:sleep(2000),
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, Name, Cmd, 1),
+
+ % If the kill command changed, that means we rebooted the process.
+ etap:isnt(D1#daemon.kill, D2#daemon.kill, "Kill command changed.").
+
+check_dead(D, Name, Cmd) ->
+ BaseName = filename:basename(Cmd) ++ " 2> /dev/null",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.status, halted, "Daemon has been halted."),
+ etap:is(D#daemon.errors, nil, "Errors have been disabled."),
+ etap:is(D#daemon.buf, nil, "Buffer has been switched off.").
+
+check_daemon(D, Name, Cmd, Errs) ->
+ BaseName = filename:basename(Cmd),
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.status, running, "Daemon still running."),
+ etap:is(length(D#daemon.errors), Errs, "Found expected number of errors."),
+ etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
+
diff --git a/test/etap/173-os-daemon-cfg-register.es b/test/etap/173-os-daemon-cfg-register.es
new file mode 100755
index 00000000..3d536dc7
--- /dev/null
+++ b/test/etap/173-os-daemon-cfg-register.es
@@ -0,0 +1,35 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+write(Mesg) ->
+ Data = iolist_to_binary(couch_util:json_encode(Mesg)),
+ io:format(binary_to_list(Data) ++ "\n", []).
+
+cfg_register(Section) ->
+ write([<<"register">>, Section]).
+
+cfg_register(Section, Key) ->
+ write([<<"register">>, Section, Key]).
+
+wait(_) ->
+ init:stop().
+
+do_tests() ->
+ cfg_register(<<"s1">>),
+ cfg_register(<<"s2">>, <<"k">>),
+ wait(io:read("")).
+
+main([]) ->
+ test_util:init_code_path(),
+ do_tests().
diff --git a/test/etap/173-os-daemon-cfg-register.t b/test/etap/173-os-daemon-cfg-register.t
new file mode 100755
index 00000000..3ee2969a
--- /dev/null
+++ b/test/etap/173-os-daemon-cfg-register.t
@@ -0,0 +1,98 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+daemon_name() ->
+ "wheee".
+
+daemon_cmd() ->
+ test_util:source_file("test/etap/173-os-daemon-cfg-register.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(27),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_os_daemons:start_link(),
+
+ DaemonCmd = daemon_cmd() ++ " 2> /dev/null",
+
+ etap:diag("Booting the daemon"),
+ couch_config:set("os_daemons", daemon_name(), DaemonCmd, false),
+ timer:sleep(1000),
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, running),
+
+ etap:diag("Daemon restarts when section changes."),
+ couch_config:set("s1", "k", "foo", false),
+ timer:sleep(1000),
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, running),
+ etap:isnt(D2#daemon.kill, D1#daemon.kill, "Kill command shows restart."),
+
+ etap:diag("Daemon doesn't restart for ignored section key."),
+ couch_config:set("s2", "k2", "baz", false),
+ timer:sleep(1000),
+ {ok, [D3]} = couch_os_daemons:info([table]),
+ etap:is(D3, D2, "Same daemon info after ignored config change."),
+
+ etap:diag("Daemon restarts for specific section/key pairs."),
+ couch_config:set("s2", "k", "bingo", false),
+ timer:sleep(1000),
+ {ok, [D4]} = couch_os_daemons:info([table]),
+ check_daemon(D4, running),
+ etap:isnt(D4#daemon.kill, D3#daemon.kill, "Kill command changed again."),
+
+ ok.
+
+check_daemon(D, Status) ->
+ BaseName = filename:basename(daemon_cmd()) ++ " 2> /dev/null",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, daemon_name(), "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.status, Status, "Daemon status is correct."),
+ etap:is(D#daemon.cfg_patterns, [{"s1"}, {"s2", "k"}], "Cfg patterns set"),
+ etap:is(D#daemon.errors, [], "No errors have occurred."),
+ etap:isnt(D#daemon.buf, nil, "Buffer is active.").
diff --git a/test/etap/180-http-proxy.ini b/test/etap/180-http-proxy.ini
new file mode 100644
index 00000000..72a63f66
--- /dev/null
+++ b/test/etap/180-http-proxy.ini
@@ -0,0 +1,20 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements. See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership. The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License. You may obtain a copy of the License at
+;
+; http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied. See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[httpd_global_handlers]
+_test = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:5985/">>}
+_error = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:5986/">>} \ No newline at end of file
diff --git a/test/etap/180-http-proxy.t b/test/etap/180-http-proxy.t
new file mode 100755
index 00000000..b91d901b
--- /dev/null
+++ b/test/etap/180-http-proxy.t
@@ -0,0 +1,357 @@
+#!/usr/bin/env escript
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(req, {method=get, path="", headers=[], body="", opts=[]}).
+
+default_config() ->
+ [
+ test_util:build_file("etc/couchdb/default_dev.ini"),
+ test_util:source_file("test/etap/180-http-proxy.ini")
+ ].
+
+server() -> "http://127.0.0.1:5984/_test/".
+proxy() -> "http://127.0.0.1:5985/".
+external() -> "https://www.google.com/".
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(61),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag("Test died abnormally: ~p", [Other]),
+ etap:bail("Bad return value.")
+ end,
+ ok.
+
+check_request(Name, Req, Remote, Local) ->
+ case Remote of
+ no_remote -> ok;
+ _ -> test_web:set_assert(Remote)
+ end,
+ Url = case proplists:lookup(url, Req#req.opts) of
+ none -> server() ++ Req#req.path;
+ {url, DestUrl} -> DestUrl
+ end,
+ Opts = [{headers_as_is, true} | Req#req.opts],
+ Resp =ibrowse:send_req(
+ Url, Req#req.headers, Req#req.method, Req#req.body, Opts
+ ),
+ %etap:diag("ibrowse response: ~p", [Resp]),
+ case Local of
+ no_local -> ok;
+ _ -> etap:fun_is(Local, Resp, Name)
+ end,
+ case {Remote, Local} of
+ {no_remote, _} ->
+ ok;
+ {_, no_local} ->
+ ok;
+ _ ->
+ etap:is(test_web:check_last(), was_ok, Name ++ " - request handled")
+ end,
+ Resp.
+
+test() ->
+ couch_server_sup:start_link(default_config()),
+ ibrowse:start(),
+ crypto:start(),
+ test_web:start_link(),
+
+ test_basic(),
+ test_alternate_status(),
+ test_trailing_slash(),
+ test_passes_header(),
+ test_passes_host_header(),
+ test_passes_header_back(),
+ test_rewrites_location_headers(),
+ test_doesnt_rewrite_external_locations(),
+ test_rewrites_relative_location(),
+ test_uses_same_version(),
+ test_passes_body(),
+ test_passes_eof_body_back(),
+ test_passes_chunked_body(),
+ test_passes_chunked_body_back(),
+
+ test_connect_error(),
+
+ ok.
+
+test_basic() ->
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/" = Req:get(path),
+ undefined = Req:get(body_length),
+ undefined = Req:recv_body(),
+ {ok, {200, [{"Content-Type", "text/plain"}], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ check_request("Basic proxy test", #req{}, Remote, Local).
+
+test_alternate_status() ->
+ Remote = fun(Req) ->
+ "/alternate_status" = Req:get(path),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{path="alternate_status"},
+ check_request("Alternate status", Req, Remote, Local).
+
+test_trailing_slash() ->
+ Remote = fun(Req) ->
+ "/trailing_slash/" = Req:get(path),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{path="trailing_slash/"},
+ check_request("Trailing slash", Req, Remote, Local).
+
+test_passes_header() ->
+ Remote = fun(Req) ->
+ "/passes_header" = Req:get(path),
+ "plankton" = Req:get_header_value("X-CouchDB-Ralph"),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ path="passes_header",
+ headers=[{"X-CouchDB-Ralph", "plankton"}]
+ },
+ check_request("Passes header", Req, Remote, Local).
+
+test_passes_host_header() ->
+ Remote = fun(Req) ->
+ "/passes_host_header" = Req:get(path),
+ "www.google.com" = Req:get_header_value("Host"),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ path="passes_host_header",
+ headers=[{"Host", "www.google.com"}]
+ },
+ check_request("Passes host header", Req, Remote, Local).
+
+test_passes_header_back() ->
+ Remote = fun(Req) ->
+ "/passes_header_back" = Req:get(path),
+ {ok, {200, [{"X-CouchDB-Plankton", "ralph"}], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", Headers, "ok"}) ->
+ lists:member({"X-CouchDB-Plankton", "ralph"}, Headers);
+ (_) ->
+ false
+ end,
+ Req = #req{path="passes_header_back"},
+ check_request("Passes header back", Req, Remote, Local).
+
+test_rewrites_location_headers() ->
+ etap:diag("Testing location header rewrites."),
+ do_rewrite_tests([
+ {"Location", proxy() ++ "foo/bar", server() ++ "foo/bar"},
+ {"Content-Location", proxy() ++ "bing?q=2", server() ++ "bing?q=2"},
+ {"Uri", proxy() ++ "zip#frag", server() ++ "zip#frag"},
+ {"Destination", proxy(), server()}
+ ]).
+
+test_doesnt_rewrite_external_locations() ->
+ etap:diag("Testing no rewrite of external locations."),
+ do_rewrite_tests([
+ {"Location", external() ++ "search", external() ++ "search"},
+ {"Content-Location", external() ++ "s?q=2", external() ++ "s?q=2"},
+ {"Uri", external() ++ "f#f", external() ++ "f#f"},
+ {"Destination", external() ++ "f?q=2#f", external() ++ "f?q=2#f"}
+ ]).
+
+test_rewrites_relative_location() ->
+ etap:diag("Testing relative rewrites."),
+ do_rewrite_tests([
+ {"Location", "/foo", server() ++ "foo"},
+ {"Content-Location", "bar", server() ++ "bar"},
+ {"Uri", "/zing?q=3", server() ++ "zing?q=3"},
+ {"Destination", "bing?q=stuff#yay", server() ++ "bing?q=stuff#yay"}
+ ]).
+
+do_rewrite_tests(Tests) ->
+ lists:foreach(fun({Header, Location, Url}) ->
+ do_rewrite_test(Header, Location, Url)
+ end, Tests).
+
+do_rewrite_test(Header, Location, Url) ->
+ Remote = fun(Req) ->
+ "/rewrite_test" = Req:get(path),
+ {ok, {302, [{Header, Location}], "ok"}}
+ end,
+ Local = fun
+ ({ok, "302", Headers, "ok"}) ->
+ etap:is(
+ couch_util:get_value(Header, Headers),
+ Url,
+ "Header rewritten correctly."
+ ),
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{path="rewrite_test"},
+ Label = "Rewrite test for ",
+ check_request(Label ++ Header, Req, Remote, Local).
+
+test_uses_same_version() ->
+ Remote = fun(Req) ->
+ "/uses_same_version" = Req:get(path),
+ {1, 0} = Req:get(version),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ path="uses_same_version",
+ opts=[{http_vsn, {1, 0}}]
+ },
+ check_request("Uses same version", Req, Remote, Local).
+
+test_passes_body() ->
+ Remote = fun(Req) ->
+ 'PUT' = Req:get(method),
+ "/passes_body" = Req:get(path),
+ <<"Hooray!">> = Req:recv_body(),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ method=put,
+ path="passes_body",
+ body="Hooray!"
+ },
+ check_request("Passes body", Req, Remote, Local).
+
+test_passes_eof_body_back() ->
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/passes_eof_body" = Req:get(path),
+ {raw, {200, [{"Connection", "close"}], BodyChunks}}
+ end,
+ Local = fun({ok, "200", _, "foobarbazinga"}) -> true; (_) -> false end,
+ Req = #req{path="passes_eof_body"},
+ check_request("Passes eof body", Req, Remote, Local).
+
+test_passes_chunked_body() ->
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ Remote = fun(Req) ->
+ 'POST' = Req:get(method),
+ "/passes_chunked_body" = Req:get(path),
+ RecvBody = fun
+ ({Length, Chunk}, [Chunk | Rest]) ->
+ Length = size(Chunk),
+ Rest;
+ ({0, []}, []) ->
+ ok
+ end,
+ ok = Req:stream_body(1024*1024, RecvBody, BodyChunks),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ method=post,
+ path="passes_chunked_body",
+ headers=[{"Transfer-Encoding", "chunked"}],
+ body=mk_chunked_body(BodyChunks)
+ },
+ check_request("Passes chunked body", Req, Remote, Local).
+
+test_passes_chunked_body_back() ->
+ Name = "Passes chunked body back",
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/passes_chunked_body_back" = Req:get(path),
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ {chunked, {200, [{"Transfer-Encoding", "chunked"}], BodyChunks}}
+ end,
+ Req = #req{
+ path="passes_chunked_body_back",
+ opts=[{stream_to, self()}]
+ },
+
+ Resp = check_request(Name, Req, Remote, no_local),
+
+ etap:fun_is(
+ fun({ibrowse_req_id, _}) -> true; (_) -> false end,
+ Resp,
+ "Received an ibrowse request id."
+ ),
+ {_, ReqId} = Resp,
+
+ % Grab headers from response
+ receive
+ {ibrowse_async_headers, ReqId, "200", Headers} ->
+ etap:is(
+ proplists:get_value("Transfer-Encoding", Headers),
+ "chunked",
+ "Response included the Transfer-Encoding: chunked header"
+ ),
+ ibrowse:stream_next(ReqId)
+ after 1000 ->
+ throw({error, timeout})
+ end,
+
+ % Check body received
+ % TODO: When we upgrade to ibrowse >= 2.0.0 this check needs to
+ % check that the chunks returned are what we sent from the
+ % Remote test.
+ etap:diag("TODO: UPGRADE IBROWSE"),
+ etap:is(recv_body(ReqId, []), <<"foobarbazinga">>, "Decoded chunked body."),
+
+ % Check test_web server.
+ etap:is(test_web:check_last(), was_ok, Name ++ " - request handled").
+
+test_connect_error() ->
+ Local = fun({ok, "500", _Headers, _Body}) -> true; (_) -> false end,
+ Req = #req{opts=[{url, "http://127.0.0.1:5984/_error"}]},
+ check_request("Connect error", Req, no_remote, Local).
+
+
+mk_chunked_body(Chunks) ->
+ mk_chunked_body(Chunks, []).
+
+mk_chunked_body([], Acc) ->
+ iolist_to_binary(lists:reverse(Acc, "0\r\n\r\n"));
+mk_chunked_body([Chunk | Rest], Acc) ->
+ Size = to_hex(size(Chunk)),
+ mk_chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]).
+
+to_hex(Val) ->
+ to_hex(Val, []).
+
+to_hex(0, Acc) ->
+ Acc;
+to_hex(Val, Acc) ->
+ to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
+
+hex_char(V) when V < 10 -> $0 + V;
+hex_char(V) -> $A + V - 10.
+
+recv_body(ReqId, Acc) ->
+ receive
+ {ibrowse_async_response, ReqId, Data} ->
+ recv_body(ReqId, [Data | Acc]);
+ {ibrowse_async_response_end, ReqId} ->
+ iolist_to_binary(lists:reverse(Acc));
+ Else ->
+ throw({error, unexpected_mesg, Else})
+ after 5000 ->
+ throw({error, timeout})
+ end.
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index d0f751f8..59d21cda 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -11,10 +11,10 @@
## the License.
noinst_SCRIPTS = run
-noinst_DATA = test_util.beam
+noinst_DATA = test_util.beam test_web.beam
%.beam: %.erl
- erlc $<
+ $(ERLC) $<
run: run.tpl
sed -e "s|%abs_top_srcdir%|@abs_top_srcdir@|g" \
@@ -27,6 +27,7 @@ DISTCLEANFILES = temp.*
EXTRA_DIST = \
run.tpl \
+ test_web.erl \
001-load.t \
002-icu-driver.t \
010-file-basics.t \
@@ -58,7 +59,25 @@ EXTRA_DIST = \
110-replication-httpc.t \
111-replication-changes-feed.t \
112-replication-missing-revs.t \
+ 113-replication-attachment-comp.t \
120-stats-collect.t \
121-stats-aggregates.cfg \
121-stats-aggregates.ini \
- 121-stats-aggregates.t
+ 121-stats-aggregates.t \
+ 130-attachments-md5.t \
+ 140-attachment-comp.t \
+ 150-invalid-view-seq.t \
+ 160-vhosts.t \
+ 170-os-daemons.es \
+ 170-os-daemons.t \
+ 171-os-daemons-config.es \
+ 171-os-daemons-config.t \
+ 172-os-daemon-errors.1.es \
+ 172-os-daemon-errors.2.es \
+ 172-os-daemon-errors.3.es \
+ 172-os-daemon-errors.4.es \
+ 172-os-daemon-errors.t \
+ 173-os-daemon-cfg-register.es \
+ 173-os-daemon-cfg-register.t \
+ 180-http-proxy.ini \
+ 180-http-proxy.t
diff --git a/test/etap/test_web.erl b/test/etap/test_web.erl
new file mode 100644
index 00000000..16438b31
--- /dev/null
+++ b/test/etap/test_web.erl
@@ -0,0 +1,99 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(test_web).
+-behaviour(gen_server).
+
+-export([start_link/0, loop/1, get_port/0, set_assert/1, check_last/0]).
+-export([init/1, terminate/2, code_change/3]).
+-export([handle_call/3, handle_cast/2, handle_info/2]).
+
+-define(SERVER, test_web_server).
+-define(HANDLER, test_web_handler).
+
+start_link() ->
+ gen_server:start({local, ?HANDLER}, ?MODULE, [], []),
+ mochiweb_http:start([
+ {name, ?SERVER},
+ {loop, {?MODULE, loop}},
+ {port, 5985}
+ ]).
+
+loop(Req) ->
+ %etap:diag("Handling request: ~p", [Req]),
+ case gen_server:call(?HANDLER, {check_request, Req}) of
+ {ok, RespInfo} ->
+ {ok, Req:respond(RespInfo)};
+ {raw, {Status, Headers, BodyChunks}} ->
+ Resp = Req:start_response({Status, Headers}),
+ lists:foreach(fun(C) -> Resp:send(C) end, BodyChunks),
+ erlang:put(mochiweb_request_force_close, true),
+ {ok, Resp};
+ {chunked, {Status, Headers, BodyChunks}} ->
+ Resp = Req:respond({Status, Headers, chunked}),
+ timer:sleep(500),
+ lists:foreach(fun(C) -> Resp:write_chunk(C) end, BodyChunks),
+ Resp:write_chunk([]),
+ {ok, Resp};
+ {error, Reason} ->
+ etap:diag("Error: ~p", [Reason]),
+ Body = lists:flatten(io_lib:format("Error: ~p", [Reason])),
+ {ok, Req:respond({200, [], Body})}
+ end.
+
+get_port() ->
+ mochiweb_socket_server:get(?SERVER, port).
+
+set_assert(Fun) ->
+ ok = gen_server:call(?HANDLER, {set_assert, Fun}).
+
+check_last() ->
+ gen_server:call(?HANDLER, last_status).
+
+init(_) ->
+ {ok, nil}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+handle_call({check_request, Req}, _From, State) when is_function(State, 1) ->
+ Resp2 = case (catch State(Req)) of
+ {ok, Resp} -> {reply, {ok, Resp}, was_ok};
+ {raw, Resp} -> {reply, {raw, Resp}, was_ok};
+ {chunked, Resp} -> {reply, {chunked, Resp}, was_ok};
+ Error -> {reply, {error, Error}, not_ok}
+ end,
+ Req:cleanup(),
+ Resp2;
+handle_call({check_request, _Req}, _From, _State) ->
+ {reply, {error, no_assert_function}, not_ok};
+handle_call(last_status, _From, State) when is_atom(State) ->
+ {reply, State, nil};
+handle_call(last_status, _From, State) ->
+ {reply, {error, not_checked}, State};
+handle_call({set_assert, Fun}, _From, nil) ->
+ {reply, ok, Fun};
+handle_call({set_assert, _}, _From, State) ->
+ {reply, {error, assert_function_set}, State};
+handle_call(Msg, _From, State) ->
+ {reply, {ignored, Msg}, State}.
+
+handle_cast(Msg, State) ->
+ etap:diag("Ignoring cast message: ~p", [Msg]),
+ {noreply, State}.
+
+handle_info(Msg, State) ->
+ etap:diag("Ignoring info message: ~p", [Msg]),
+ {noreply, State}.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
diff --git a/test/javascript/couch_http.js b/test/javascript/couch_http.js
index f92cf119..5f4716d2 100644
--- a/test/javascript/couch_http.js
+++ b/test/javascript/couch_http.js
@@ -15,11 +15,15 @@
if(typeof(CouchHTTP) != "undefined") {
CouchHTTP.prototype.open = function(method, url, async) {
- if(/^\s*http:\/\//.test(url)) {
- return this._open(method, url, async);
- } else {
- return this._open(method, this.base_url + url, async);
+ if(!/^\s*http:\/\//.test(url)) {
+ if(/^[^\/]/.test(url)) {
+ url = this.base_url + "/" + url;
+ } else {
+ url = this.base_url + url;
+ }
}
+
+ return this._open(method, url, async);
};
CouchHTTP.prototype.setRequestHeader = function(name, value) {
@@ -52,6 +56,7 @@
}
})();
+CouchDB.urlPrefix = "";
CouchDB.newXhr = function() {
return new CouchHTTP();
};
diff --git a/test/view_server/query_server_spec.rb b/test/view_server/query_server_spec.rb
index 1de8e5bc..de1df5c1 100644
--- a/test/view_server/query_server_spec.rb
+++ b/test/view_server/query_server_spec.rb
@@ -139,7 +139,7 @@ functions = {
"js" => %{function(doc){emit("foo",doc.a); emit("bar",doc.a)}},
"erlang" => <<-ERLANG
fun({Doc}) ->
- A = proplists:get_value(<<"a">>, Doc, null),
+ A = couch_util:get_value(<<"a">>, Doc, null),
Emit(<<"foo">>, A),
Emit(<<"bar">>, A)
end.
@@ -153,7 +153,7 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({Doc}) ->
- A = proplists:get_value(<<"a">>, Doc, null),
+ A = couch_util:get_value(<<"a">>, Doc, null),
Emit(<<"baz">>, A)
end.
ERLANG
@@ -175,7 +175,7 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({NewDoc}, _OldDoc, _UserCtx) ->
- case proplists:get_value(<<"bad">>, NewDoc) of
+ case couch_util:get_value(<<"bad">>, NewDoc) of
undefined -> 1;
_ -> {[{forbidden, <<"bad doc">>}]}
end
@@ -191,8 +191,8 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({Doc}, Req) ->
- Title = proplists:get_value(<<"title">>, Doc),
- Body = proplists:get_value(<<"body">>, Doc),
+ Title = couch_util:get_value(<<"title">>, Doc),
+ Body = couch_util:get_value(<<"body">>, Doc),
Resp = <<Title/binary, " - ", Body/binary>>,
{[{<<"body">>, Resp}]}
end.
@@ -208,8 +208,8 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({Doc}, Req) ->
- Title = proplists:get_value(<<"title">>, Doc),
- Body = proplists:get_value(<<"body">>, Doc),
+ Title = couch_util:get_value(<<"title">>, Doc),
+ Body = couch_util:get_value(<<"body">>, Doc),
Resp = <<Title/binary, " - ", Body/binary>>,
{[
{<<"code">>, 200},
@@ -256,9 +256,9 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, nil}
end,
{ok, _} = FoldRows(Fun, nil),
@@ -283,7 +283,7 @@ functions = {
fun(Head, Req) ->
Send(<<"bacon">>),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
Send(<<"eggs">>),
{ok, nil}
end,
@@ -307,9 +307,9 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, nil}
end,
FoldRows(Fun, nil),
@@ -335,13 +335,13 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun
({Row}, Count) when Count < 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, Count+1};
({Row}, Count) when Count == 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{stop, <<"early tail">>}
end,
{ok, Tail} = FoldRows(Fun, 0),
@@ -380,10 +380,10 @@ functions = {
Send(<<"bacon">>),
Fun = fun
({Row}, Count) when Count < 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, Count+1};
({Row}, Count) when Count == 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{stop, <<"early">>}
end,
{ok, Tail} = FoldRows(Fun, 0),
@@ -408,9 +408,9 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, nil}
end,
FoldRows(Fun, nil),
@@ -428,7 +428,7 @@ functions = {
JS
"erlang" => <<-ERLANG,
fun({Doc}, Req) ->
- proplists:get_value(<<"good">>, Doc)
+ couch_util:get_value(<<"good">>, Doc)
end.
ERLANG
},