diff options
Diffstat (limited to 'test')
-rwxr-xr-x | test/etap/010-file-basics.t | 15 | ||||
-rwxr-xr-x | test/etap/030-doc-from-json.t | 16 | ||||
-rwxr-xr-x | test/etap/031-doc-to-json.t | 21 | ||||
-rwxr-xr-x | test/etap/040-util.t | 37 | ||||
-rwxr-xr-x | test/etap/050-stream.t | 6 | ||||
-rwxr-xr-x | test/etap/060-kt-merging.t | 97 | ||||
-rwxr-xr-x | test/etap/070-couch-db.t | 13 | ||||
-rwxr-xr-x | test/etap/090-task-status.t | 4 | ||||
-rwxr-xr-x | test/etap/100-ref-counter.t | 31 | ||||
-rwxr-xr-x | test/etap/110-replication-httpc.t | 6 | ||||
-rwxr-xr-x | test/etap/111-replication-changes-feed.t | 16 | ||||
-rwxr-xr-x | test/etap/112-replication-missing-revs.t | 21 | ||||
-rwxr-xr-x | test/etap/113-replication-attachment-comp.t | 317 | ||||
-rwxr-xr-x | test/etap/121-stats-aggregates.t | 4 | ||||
-rwxr-xr-x | test/etap/130-attachments-md5.t | 8 | ||||
-rwxr-xr-x | test/etap/140-attachment-comp.t | 711 | ||||
-rwxr-xr-x | test/etap/150-invalid-view-seq.t | 192 | ||||
-rwxr-xr-x | test/etap/160-vhosts.t | 131 | ||||
-rw-r--r-- | test/etap/Makefile.am | 9 | ||||
-rw-r--r-- | test/javascript/couch_http.js | 13 | ||||
-rw-r--r-- | test/view_server/query_server_spec.rb | 40 |
21 files changed, 1544 insertions, 164 deletions
diff --git a/test/etap/010-file-basics.t b/test/etap/010-file-basics.t index 09b2f2b1..a3599f1a 100755 --- a/test/etap/010-file-basics.t +++ b/test/etap/010-file-basics.t @@ -16,7 +16,7 @@ filename() -> test_util:build_file("test/etap/temp.010"). main(_) -> test_util:init_code_path(), - etap:plan(16), + etap:plan(19), case (catch test()) of ok -> etap:end_tests(); @@ -67,6 +67,19 @@ test() -> {ok, BinPos} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>), etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos), "Reading a term from a written binary term representation succeeds."), + + BigBin = list_to_binary(lists:duplicate(100000, 0)), + {ok, BigBinPos} = couch_file:append_binary(Fd, BigBin), + etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos), + "Reading a large term from a written representation succeeds."), + + ok = couch_file:write_header(Fd, hello), + etap:is({ok, hello}, couch_file:read_header(Fd), + "Reading a header succeeds."), + + {ok, BigBinPos2} = couch_file:append_binary(Fd, BigBin), + etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2), + "Reading a large term from a written representation succeeds 2."), % append_binary == append_iolist? % Possible bug in pread_iolist or iolist() -> append_binary diff --git a/test/etap/030-doc-from-json.t b/test/etap/030-doc-from-json.t index dc3327aa..c4ef649a 100755 --- a/test/etap/030-doc-from-json.t +++ b/test/etap/030-doc-from-json.t @@ -17,7 +17,11 @@ %% XXX: Figure out how to -include("couch_db.hrl") -record(doc, {id= <<"">>, revs={0, []}, body={[]}, atts=[], deleted=false, meta=[]}). --record(att, {name, type, len, md5= <<>>, revpos=0, data}). +-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data, + encoding=identity}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). main(_) -> test_util:init_code_path(), @@ -32,6 +36,8 @@ main(_) -> ok. test() -> + couch_config:start_link([default_config()]), + couch_config:set("attachments", "compression_level", "0"), ok = test_from_json_success(), ok = test_from_json_errors(), ok. @@ -85,13 +91,17 @@ test_from_json_success() -> name = <<"my_attachment.fu">>, data = stub, type = <<"application/awesome">>, - len = 45 + att_len = 45, + disk_len = 45, + revpos = nil }, #att{ name = <<"noahs_private_key.gpg">>, data = <<"I have a pet fish!">>, type = <<"application/pgp-signature">>, - len = 18 + att_len = 18, + disk_len = 18, + revpos = 0 } ]}, "Attachments are parsed correctly." diff --git a/test/etap/031-doc-to-json.t b/test/etap/031-doc-to-json.t index 4e7a175f..605a6d00 100755 --- a/test/etap/031-doc-to-json.t +++ b/test/etap/031-doc-to-json.t @@ -17,7 +17,11 @@ %% XXX: Figure out how to -include("couch_db.hrl") -record(doc, {id= <<"">>, revs={0, []}, body={[]}, atts=[], deleted=false, meta=[]}). --record(att, {name, type, len, md5= <<>>, revpos=0, data}). +-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data, + encoding=identity}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). main(_) -> test_util:init_code_path(), @@ -32,6 +36,8 @@ main(_) -> ok. test() -> + couch_config:start_link([default_config()]), + couch_config:set("attachments", "compression_level", "0"), ok = test_to_json_success(), ok. @@ -72,8 +78,8 @@ test_to_json_success() -> }, { #doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}}, - {[{<<"_id">>, <<>>}, {<<"_deleted">>, true}]}, - "Deleted docs drop body members." + {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]}, + "Deleted docs no longer drop body members." }, { #doc{meta=[ @@ -116,14 +122,16 @@ test_to_json_success() -> type = <<"xml/sucks">>, data = fun() -> ok end, revpos = 1, - len = 400 + att_len = 400, + disk_len = 400 }, #att{ name = <<"fast.json">>, type = <<"json/ftw">>, data = <<"{\"so\": \"there!\"}">>, revpos = 1, - len = 16 + att_len = 16, + disk_len = 16 } ]}, {[ @@ -153,7 +161,8 @@ test_to_json_success() -> type = <<"text/plain">>, data = fun() -> <<"diet pepsi">> end, revpos = 1, - len = 10 + att_len = 10, + disk_len = 10 }, #att{ name = <<"food.now">>, diff --git a/test/etap/040-util.t b/test/etap/040-util.t index 6d6da2c1..8f80db87 100755 --- a/test/etap/040-util.t +++ b/test/etap/040-util.t @@ -17,7 +17,7 @@ main(_) -> test_util:init_code_path(), application:start(crypto), - etap:plan(11), + etap:plan(14), case (catch test()) of ok -> etap:end_tests(); @@ -35,29 +35,6 @@ test() -> etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"), "A list of atoms is one munged atom."), - % terminate_linked - Self = self(), - - spawn(fun() -> - SecondSelf = self(), - ChildPid = spawn_link(fun() -> - SecondSelf ! {child, started}, - receive shutdown -> ok end - end), - PidUp = receive - {child, started} -> ok - after 1000 -> - {error, timeout} - end, - etap:is(ok, PidUp, "Started a linked process."), - couch_util:terminate_linked(normal), - Self ! {pid, ChildPid} - end), - receive - {pid, Pid} -> - etap:ok(not is_process_alive(Pid), "Linked process was killed.") - end, - % implode etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"), "use & as separator in list."), @@ -88,4 +65,16 @@ test() -> etap:ok(not couch_util:should_flush(), "Checking to flush invokes GC."), + % verify + etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"), + "String comparison."), + etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"), + "String comparison (unequal lengths)."), + etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>), + "Binary comparison."), + etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>), + "Binary comparison (unequal lengths)."), + etap:is(false, couch_util:verify(nil, <<"ahBase3r">>), + "Binary comparison with atom."), + ok. diff --git a/test/etap/050-stream.t b/test/etap/050-stream.t index 9324916c..545dd524 100755 --- a/test/etap/050-stream.t +++ b/test/etap/050-stream.t @@ -42,7 +42,7 @@ test() -> etap:is(ok, couch_stream:write(Stream, <<>>), "Writing an empty binary does nothing."), - {Ptrs, Length, _} = couch_stream:close(Stream), + {Ptrs, Length, _, _, _} = couch_stream:close(Stream), etap:is(Ptrs, [0], "Close returns the file pointers."), etap:is(Length, 8, "Close also returns the number of bytes written."), etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."), @@ -58,7 +58,7 @@ test() -> etap:is(ok, couch_stream:write(Stream2, ZeroBits), "Successfully wrote 80 0 bits."), - {Ptrs2, Length2, _} = couch_stream:close(Stream2), + {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2), etap:is(Ptrs2, [ExpPtr], "Closing stream returns the file pointers."), etap:is(Length2, 20, "Length written is 160 bytes."), @@ -73,7 +73,7 @@ test() -> couch_stream:write(Stream3, Data), [Data | Acc] end, [], lists:seq(1, 1024)), - {Ptrs3, Length3, _} = couch_stream:close(Stream3), + {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3), % 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold) % + 5 puts us over the threshold diff --git a/test/etap/060-kt-merging.t b/test/etap/060-kt-merging.t index d6b13d6d..5a8571ac 100755 --- a/test/etap/060-kt-merging.t +++ b/test/etap/060-kt-merging.t @@ -15,7 +15,7 @@ main(_) -> test_util:init_code_path(), - etap:plan(16), + etap:plan(12), case (catch test()) of ok -> etap:end_tests(); @@ -26,113 +26,88 @@ main(_) -> ok. test() -> - EmptyTree = [], - One = [{0, {"1","foo",[]}}], + One = {0, {"1","foo",[]}}, TwoSibs = [{0, {"1","foo",[]}}, {0, {"2","foo",[]}}], - OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}], - TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}], - TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, - {"1b", "bar", []}]}}], - TwoChildSibs2 = [{0, {"1","foo", [{"1a", "bar", []}, - {"1b", "bar", [{"1bb", "boo", []}]}]}}], - Stemmed1b = [{1, {"1a", "bar", []}}], - Stemmed1a = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], - Stemmed1aa = [{2, {"1aa", "bar", []}}], - Stemmed1bb = [{2, {"1bb", "boo", []}}], - - etap:is( - {EmptyTree, no_conflicts}, - couch_key_tree:merge(EmptyTree, EmptyTree), - "Merging two empty trees yields an empty tree." - ), - - etap:is( - {One, no_conflicts}, - couch_key_tree:merge(EmptyTree, One), + OneChild = {0, {"1","foo",[{"1a", "bar", []}]}}, + TwoChild = {0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}, + TwoChildSibs = {0, {"1","foo", [{"1a", "bar", []}, + {"1b", "bar", []}]}}, + TwoChildSibs2 = {0, {"1","foo", [{"1a", "bar", []}, + {"1b", "bar", [{"1bb", "boo", []}]}]}}, + Stemmed1b = {1, {"1a", "bar", []}}, + Stemmed1a = {1, {"1a", "bar", [{"1aa", "bar", []}]}}, + Stemmed1aa = {2, {"1aa", "bar", []}}, + Stemmed1bb = {2, {"1bb", "boo", []}}, + + etap:is( + {[One], no_conflicts}, + couch_key_tree:merge([], One), "The empty tree is the identity for merge." ), etap:is( - {One, no_conflicts}, - couch_key_tree:merge(One, EmptyTree), - "Merging is commutative." - ), - - etap:is( {TwoSibs, no_conflicts}, - couch_key_tree:merge(One, TwoSibs), + couch_key_tree:merge(TwoSibs, One), "Merging a prefix of a tree with the tree yields the tree." ), etap:is( - {One, no_conflicts}, - couch_key_tree:merge(One, One), + {[One], no_conflicts}, + couch_key_tree:merge([One], One), "Merging is reflexive." ), etap:is( - {TwoChild, no_conflicts}, - couch_key_tree:merge(TwoChild, TwoChild), + {[TwoChild], no_conflicts}, + couch_key_tree:merge([TwoChild], TwoChild), "Merging two children is still reflexive." ), etap:is( - {TwoChildSibs, no_conflicts}, - couch_key_tree:merge(TwoChildSibs, TwoChildSibs), + {[TwoChildSibs], no_conflicts}, + couch_key_tree:merge([TwoChildSibs], TwoChildSibs), "Merging a tree to itself is itself."), etap:is( - {TwoChildSibs, no_conflicts}, - couch_key_tree:merge(TwoChildSibs, Stemmed1b), + {[TwoChildSibs], no_conflicts}, + couch_key_tree:merge([TwoChildSibs], Stemmed1b), "Merging a tree with a stem." ), etap:is( - {TwoChildSibs, no_conflicts}, - couch_key_tree:merge(Stemmed1b, TwoChildSibs), - "Merging in the opposite direction." - ), - - etap:is( - {TwoChildSibs2, no_conflicts}, - couch_key_tree:merge(TwoChildSibs2, Stemmed1bb), + {[TwoChildSibs2], no_conflicts}, + couch_key_tree:merge([TwoChildSibs2], Stemmed1bb), "Merging a stem at a deeper level." ), etap:is( - {TwoChildSibs2, no_conflicts}, - couch_key_tree:merge(Stemmed1bb, TwoChildSibs2), - "Merging a deeper level in opposite order." - ), - - etap:is( - {TwoChild, no_conflicts}, - couch_key_tree:merge(TwoChild, Stemmed1aa), + {[TwoChild], no_conflicts}, + couch_key_tree:merge([TwoChild], Stemmed1aa), "Merging a single tree with a deeper stem." ), etap:is( - {TwoChild, no_conflicts}, - couch_key_tree:merge(TwoChild, Stemmed1a), + {[TwoChild], no_conflicts}, + couch_key_tree:merge([TwoChild], Stemmed1a), "Merging a larger stem." ), etap:is( - {Stemmed1a, no_conflicts}, - couch_key_tree:merge(Stemmed1a, Stemmed1aa), + {[Stemmed1a], no_conflicts}, + couch_key_tree:merge([Stemmed1a], Stemmed1aa), "More merging." ), - Expect1 = OneChild ++ Stemmed1aa, + Expect1 = [OneChild, Stemmed1aa], etap:is( {Expect1, conflicts}, - couch_key_tree:merge(OneChild, Stemmed1aa), + couch_key_tree:merge([OneChild], Stemmed1aa), "Merging should create conflicts." ), etap:is( - {TwoChild, no_conflicts}, + {[TwoChild], no_conflicts}, couch_key_tree:merge(Expect1, TwoChild), "Merge should have no conflicts." ), diff --git a/test/etap/070-couch-db.t b/test/etap/070-couch-db.t index bf20dc0a..4b14aba6 100755 --- a/test/etap/070-couch-db.t +++ b/test/etap/070-couch-db.t @@ -41,6 +41,7 @@ test() -> etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2), "Database was deleted."), + gen_server:call(couch_server, {set_max_dbs_open, 3}), MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end, lists:foreach(fun(Int) -> @@ -51,24 +52,24 @@ test() -> end, {ok, Db} = couch_db:create(MkDbName(Int), []), ok = couch_db:close(Db) - end, lists:seq(1, 200)), + end, lists:seq(1, 6)), {ok, AllDbs3} = couch_server:all_databases(), NumCreated = lists:foldl(fun(Int, Acc) -> true = lists:member(MkDbName(Int), AllDbs3), Acc+1 - end, 0, lists:seq(1, 200)), - etap:is(200, NumCreated, "Created all databases."), + end, 0, lists:seq(1, 6)), + etap:is(6, NumCreated, "Created all databases."), lists:foreach(fun(Int) -> ok = couch_server:delete(MkDbName(Int), []) - end, lists:seq(1, 200)), + end, lists:seq(1, 6)), {ok, AllDbs4} = couch_server:all_databases(), NumDeleted = lists:foldl(fun(Int, Acc) -> false = lists:member(MkDbName(Int), AllDbs4), Acc+1 - end, 0, lists:seq(1, 200)), - etap:is(200, NumDeleted, "Deleted all databases."), + end, 0, lists:seq(1, 6)), + etap:is(6, NumDeleted, "Deleted all databases."), ok. diff --git a/test/etap/090-task-status.t b/test/etap/090-task-status.t index b6ebbe4c..b278de7f 100755 --- a/test/etap/090-task-status.t +++ b/test/etap/090-task-status.t @@ -29,7 +29,7 @@ check_status(Pid,ListPropLists) -> From = list_to_binary(pid_to_list(Pid)), Element = lists:foldl( fun(PropList,Acc) -> - case proplists:get_value(pid,PropList) of + case couch_util:get_value(pid,PropList) of From -> [PropList | Acc]; _ -> @@ -38,7 +38,7 @@ check_status(Pid,ListPropLists) -> end, [], ListPropLists ), - proplists:get_value(status,hd(Element)). + couch_util:get_value(status,hd(Element)). loop() -> receive diff --git a/test/etap/100-ref-counter.t b/test/etap/100-ref-counter.t index 6f18d828..8f996d04 100755 --- a/test/etap/100-ref-counter.t +++ b/test/etap/100-ref-counter.t @@ -27,17 +27,14 @@ main(_) -> loop() -> receive - {ping, From} -> - From ! pong + close -> ok end. wait() -> receive - _ -> - ok - after - 1000 -> - throw(timeout_error) + {'DOWN', _, _, _, _} -> ok + after 1000 -> + throw(timeout_error) end. test() -> @@ -94,11 +91,23 @@ test() -> "Sanity checking that the Pid was re-added." ), - ChildPid1 ! {ping, self()}, + erlang:monitor(process, ChildPid1), + ChildPid1 ! close, wait(), - etap:is( - couch_ref_counter:count(RefCtr), - 1, + + CheckFun = fun + (Iter, nil) -> + case couch_ref_counter:count(RefCtr) of + 1 -> Iter; + _ -> nil + end; + (_, Acc) -> + Acc + end, + Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)), + etap:isnt( + Result, + nil, "The referer count was decremented automatically on process exit." ), diff --git a/test/etap/110-replication-httpc.t b/test/etap/110-replication-httpc.t index 492732bc..529239c5 100755 --- a/test/etap/110-replication-httpc.t +++ b/test/etap/110-replication-httpc.t @@ -19,7 +19,7 @@ auth = [], resource = "", headers = [ - {"User-Agent", "CouchDb/"++couch_server:get_version()}, + {"User-Agent", "CouchDB/"++couch_server:get_version()}, {"Accept", "application/json"}, {"Accept-Encoding", "gzip"} ], @@ -107,8 +107,8 @@ test_put() -> method = put }, {Resp} = couch_rep_httpc:request(Req), - etap:ok(proplists:get_value(<<"ok">>, Resp), "ok:true on upload"), - etap:is(<<"test_put">>, proplists:get_value(<<"id">>, Resp), "id is correct"). + etap:ok(couch_util:get_value(<<"ok">>, Resp), "ok:true on upload"), + etap:is(<<"test_put">>, couch_util:get_value(<<"id">>, Resp), "id is correct"). test_qs() -> Req = #http_db{ diff --git a/test/etap/111-replication-changes-feed.t b/test/etap/111-replication-changes-feed.t index b03c1ac7..778b99dd 100755 --- a/test/etap/111-replication-changes-feed.t +++ b/test/etap/111-replication-changes-feed.t @@ -22,7 +22,7 @@ auth = [], resource = "", headers = [ - {"User-Agent", "CouchDb/"++couch_server:get_version()}, + {"User-Agent", "CouchDB/"++couch_server:get_version()}, {"Accept", "application/json"}, {"Accept-Encoding", "gzip"} ], @@ -153,11 +153,11 @@ test_deleted_conflicts(Type) -> {ExpectProps} = generate_conflict(), %% delete the conflict revision - Id = proplists:get_value(<<"id">>, ExpectProps), - [Win, {[{<<"rev">>, Lose}]}] = proplists:get_value(<<"changes">>, ExpectProps), + Id = couch_util:get_value(<<"id">>, ExpectProps), + [Win, {[{<<"rev">>, Lose}]}] = couch_util:get_value(<<"changes">>, ExpectProps), Doc = couch_doc:from_json_obj({[ {<<"_id">>, Id}, - {<<"_rev">>, couch_doc:rev_to_str(Lose)}, + {<<"_rev">>, Lose}, {<<"_deleted">>, true} ]}), Db = get_db(), @@ -167,7 +167,7 @@ test_deleted_conflicts(Type) -> Expect = {[ {<<"seq">>, get_update_seq()}, {<<"id">>, Id}, - {<<"changes">>, [Win, {[{<<"rev">>, Rev}]}]} + {<<"changes">>, [Win, {[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]} ]}, {ok, Pid} = start_changes_feed(Type, Since, false), @@ -210,7 +210,7 @@ generate_change(Id, EJson) -> {[ {<<"seq">>, get_update_seq()}, {<<"id">>, Id}, - {<<"changes">>, [{[{<<"rev">>, Rev}]}]} + {<<"changes">>, [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]} ]}. generate_conflict() -> @@ -220,9 +220,9 @@ generate_conflict() -> Doc2 = (couch_doc:from_json_obj({[<<"foo">>, <<"baz">>]}))#doc{id = Id}, {ok, Rev1} = couch_db:update_doc(Db, Doc1, [full_commit]), {ok, Rev2} = couch_db:update_doc(Db, Doc2, [full_commit, all_or_nothing]), - + %% relies on undocumented CouchDB conflict winner algo and revision sorting! - RevList = [{[{<<"rev">>, R}]} || R + RevList = [{[{<<"rev">>, couch_doc:rev_to_str(R)}]} || R <- lists:sort(fun(A,B) -> B<A end, [Rev1,Rev2])], {[ {<<"seq">>, get_update_seq()}, diff --git a/test/etap/112-replication-missing-revs.t b/test/etap/112-replication-missing-revs.t index 8aabfd37..71971088 100755 --- a/test/etap/112-replication-missing-revs.t +++ b/test/etap/112-replication-missing-revs.t @@ -23,7 +23,7 @@ auth = [], resource = "", headers = [ - {"User-Agent", "CouchDb/"++couch_server:get_version()}, + {"User-Agent", "CouchDB/"++couch_server:get_version()}, {"Accept", "application/json"}, {"Accept-Encoding", "gzip"} ], @@ -188,8 +188,21 @@ start_changes_feed(remote, Since, Continuous) -> Db = #http_db{url = "http://127.0.0.1:5984/etap-test-source/"}, couch_rep_changes_feed:start_link(self(), Db, Since, Props). +couch_rep_pid(Db) -> + spawn(fun() -> couch_rep_pid_loop(Db) end). + +couch_rep_pid_loop(Db) -> + receive + {'$gen_call', From, get_target_db} -> + gen_server:reply(From, {ok, Db}) + end, + couch_rep_pid_loop(Db). + start_missing_revs(local, Changes) -> - couch_rep_missing_revs:start_link(self(), get_db(target), Changes, []); + TargetDb = get_db(target), + MainPid = couch_rep_pid(TargetDb), + couch_rep_missing_revs:start_link(MainPid, TargetDb, Changes, []); start_missing_revs(remote, Changes) -> - Db = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"}, - couch_rep_missing_revs:start_link(self(), Db, Changes, []). + TargetDb = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"}, + MainPid = couch_rep_pid(TargetDb), + couch_rep_missing_revs:start_link(MainPid, TargetDb, Changes, []). diff --git a/test/etap/113-replication-attachment-comp.t b/test/etap/113-replication-attachment-comp.t new file mode 100755 index 00000000..19c48fc6 --- /dev/null +++ b/test/etap/113-replication-attachment-comp.t @@ -0,0 +1,317 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-record(user_ctx, { + name = null, + roles = [], + handler +}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +test_db_a_name() -> + <<"couch_test_rep_att_comp_a">>. + +test_db_b_name() -> + <<"couch_test_rep_att_comp_b">>. + +main(_) -> + test_util:init_code_path(), + etap:plan(45), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link([default_config()]), + put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), + put(port, couch_config:get("httpd", "port", "5984")), + application:start(inets), + ibrowse:start(), + timer:sleep(1000), + + % + % test pull replication + % + + delete_db(test_db_a_name()), + delete_db(test_db_b_name()), + create_db(test_db_a_name()), + create_db(test_db_b_name()), + + % enable compression + couch_config:set("attachments", "compression_level", "8"), + couch_config:set("attachments", "compressible_types", "text/*"), + + % store doc with text attachment in DB A + put_text_att(test_db_a_name()), + + % disable attachment compression + couch_config:set("attachments", "compression_level", "0"), + + % do pull replication + do_pull_replication(test_db_a_name(), test_db_b_name()), + + % verify that DB B has the attachment stored in compressed form + check_att_is_compressed(test_db_b_name()), + check_server_can_decompress_att(test_db_b_name()), + check_att_stubs(test_db_a_name(), test_db_b_name()), + + % + % test push replication + % + + delete_db(test_db_a_name()), + delete_db(test_db_b_name()), + create_db(test_db_a_name()), + create_db(test_db_b_name()), + + % enable compression + couch_config:set("attachments", "compression_level", "8"), + couch_config:set("attachments", "compressible_types", "text/*"), + + % store doc with text attachment in DB A + put_text_att(test_db_a_name()), + + % disable attachment compression + couch_config:set("attachments", "compression_level", "0"), + + % do push replication + do_push_replication(test_db_a_name(), test_db_b_name()), + + % verify that DB B has the attachment stored in compressed form + check_att_is_compressed(test_db_b_name()), + check_server_can_decompress_att(test_db_b_name()), + check_att_stubs(test_db_a_name(), test_db_b_name()), + + % + % test local replication + % + + delete_db(test_db_a_name()), + delete_db(test_db_b_name()), + create_db(test_db_a_name()), + create_db(test_db_b_name()), + + % enable compression + couch_config:set("attachments", "compression_level", "8"), + couch_config:set("attachments", "compressible_types", "text/*"), + + % store doc with text attachment in DB A + put_text_att(test_db_a_name()), + + % disable attachment compression + couch_config:set("attachments", "compression_level", "0"), + + % do local-local replication + do_local_replication(test_db_a_name(), test_db_b_name()), + + % verify that DB B has the attachment stored in compressed form + check_att_is_compressed(test_db_b_name()), + check_server_can_decompress_att(test_db_b_name()), + check_att_stubs(test_db_a_name(), test_db_b_name()), + + timer:sleep(3000), % to avoid mochiweb socket closed exceptions + delete_db(test_db_a_name()), + delete_db(test_db_b_name()), + couch_server_sup:stop(), + ok. + +put_text_att(DbName) -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url(DbName) ++ "/testdoc1/readme.txt", [], + "text/plain", test_text_data()}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created text attachment"), + ok. + +do_pull_replication(SourceDbName, TargetDbName) -> + RepObj = {[ + {<<"source">>, list_to_binary(db_url(SourceDbName))}, + {<<"target">>, TargetDbName} + ]}, + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + post, + {rep_url(), [], + "application/json", list_to_binary(couch_util:json_encode(RepObj))}, + [], + [{sync, true}]), + etap:is(Code, 200, "Pull replication successfully triggered"), + Json = couch_util:json_decode(Body), + RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]), + etap:is(RepOk, true, "Pull replication completed with success"), + ok. + +do_push_replication(SourceDbName, TargetDbName) -> + RepObj = {[ + {<<"source">>, SourceDbName}, + {<<"target">>, list_to_binary(db_url(TargetDbName))} + ]}, + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + post, + {rep_url(), [], + "application/json", list_to_binary(couch_util:json_encode(RepObj))}, + [], + [{sync, true}]), + etap:is(Code, 200, "Push replication successfully triggered"), + Json = couch_util:json_decode(Body), + RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]), + etap:is(RepOk, true, "Push replication completed with success"), + ok. + +do_local_replication(SourceDbName, TargetDbName) -> + RepObj = {[ + {<<"source">>, SourceDbName}, + {<<"target">>, TargetDbName} + ]}, + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + post, + {rep_url(), [], + "application/json", list_to_binary(couch_util:json_encode(RepObj))}, + [], + [{sync, true}]), + etap:is(Code, 200, "Local replication successfully triggered"), + Json = couch_util:json_decode(Body), + RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]), + etap:is(RepOk, true, "Local replication completed with success"), + ok. + +check_att_is_compressed(DbName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url(DbName) ++ "/testdoc1/readme.txt", + [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code for the attachment request is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "The attachment was received in compressed form"), + Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), + etap:is( + Uncompressed, + test_text_data(), + "The attachment content is valid after decompression at the client side" + ), + ok. + +check_server_can_decompress_att(DbName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url(DbName) ++ "/testdoc1/readme.txt", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code for the attachment request is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is( + Gziped, false, "The attachment was not received in compressed form" + ), + etap:is( + Body, + test_text_data(), + "The attachment content is valid after server decompression" + ), + ok. + +check_att_stubs(SourceDbName, TargetDbName) -> + {ok, {{_, Code1, _}, _Headers1, Body1}} = http:request( + get, + {db_url(SourceDbName) ++ "/testdoc1?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is( + Code1, + 200, + "HTTP response code is 200 for the source DB doc request" + ), + Json1 = couch_util:json_decode(Body1), + SourceAttStub = couch_util:get_nested_json_value( + Json1, + [<<"_attachments">>, <<"readme.txt">>] + ), + {ok, {{_, Code2, _}, _Headers2, Body2}} = http:request( + get, + {db_url(TargetDbName) ++ "/testdoc1?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is( + Code2, + 200, + "HTTP response code is 200 for the target DB doc request" + ), + Json2 = couch_util:json_decode(Body2), + TargetAttStub = couch_util:get_nested_json_value( + Json2, + [<<"_attachments">>, <<"readme.txt">>] + ), + IdenticalStubs = (SourceAttStub =:= TargetAttStub), + etap:is(IdenticalStubs, true, "Attachment stubs are identical"), + TargetAttStubLength = couch_util:get_nested_json_value( + TargetAttStub, + [<<"length">>] + ), + TargetAttStubEnc = couch_util:get_nested_json_value( + TargetAttStub, + [<<"encoding">>] + ), + etap:is( + TargetAttStubEnc, + <<"gzip">>, + "Attachment stub has encoding property set to gzip" + ), + TargetAttStubEncLength = couch_util:get_nested_json_value( + TargetAttStub, + [<<"encoded_length">>] + ), + EncLengthDefined = is_integer(TargetAttStubEncLength), + etap:is( + EncLengthDefined, + true, + "Stubs have the encoded_length field properly defined" + ), + EncLengthSmaller = (TargetAttStubEncLength < TargetAttStubLength), + etap:is( + EncLengthSmaller, + true, + "Stubs have the encoded_length field smaller than their length field" + ), + ok. + +admin_user_ctx() -> + {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. + +create_db(DbName) -> + {ok, _} = couch_db:create(DbName, [admin_user_ctx()]). + +delete_db(DbName) -> + couch_server:delete(DbName, [admin_user_ctx()]). + +db_url(DbName) -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ + binary_to_list(DbName). + +rep_url() -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/_replicate". + +test_text_data() -> + {ok, Data} = file:read_file(test_util:source_file("README")), + binary_to_list(Data). diff --git a/test/etap/121-stats-aggregates.t b/test/etap/121-stats-aggregates.t index cd6b1430..d678aa9d 100755 --- a/test/etap/121-stats-aggregates.t +++ b/test/etap/121-stats-aggregates.t @@ -46,13 +46,13 @@ test_all_empty() -> etap:is(length(Aggs), 2, "There are only two aggregate types in testing."), etap:is( - proplists:get_value(testing, Aggs), + couch_util:get_value(testing, Aggs), {[{stuff, make_agg(<<"yay description">>, null, null, null, null, null)}]}, "{testing, stuff} is empty at start." ), etap:is( - proplists:get_value(number, Aggs), + couch_util:get_value(number, Aggs), {[{'11', make_agg(<<"randomosity">>, null, null, null, null, null)}]}, "{number, '11'} is empty at start." diff --git a/test/etap/130-attachments-md5.t b/test/etap/130-attachments-md5.t index fe6732d6..4c40f83a 100755 --- a/test/etap/130-attachments-md5.t +++ b/test/etap/130-attachments-md5.t @@ -103,7 +103,7 @@ test_identity_with_valid_md5() -> "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", "Content-Type: text/plain\r\n", "Content-Length: 34\r\n", - "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n", + "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", "\r\n", AttData], @@ -118,7 +118,7 @@ test_chunked_with_valid_md5_header() -> "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", "Content-Type: text/plain\r\n", "Transfer-Encoding: chunked\r\n", - "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n", + "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", "\r\n", to_hex(size(Part1)), "\r\n", Part1, "\r\n", @@ -145,7 +145,7 @@ test_chunked_with_valid_md5_trailer() -> to_hex(size(Part2)), "\r\n", Part2, "\r\n", "0\r\n", - "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n", + "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", "\r\n"], {Code, Json} = do_request(Data), @@ -227,7 +227,7 @@ get_socket() -> do_request(Request) -> Sock = get_socket(), gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))), - timer:sleep(100), + timer:sleep(1000), {ok, R} = gen_tcp:recv(Sock, 0), gen_tcp:close(Sock), [Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]), diff --git a/test/etap/140-attachment-comp.t b/test/etap/140-attachment-comp.t new file mode 100755 index 00000000..98d37abc --- /dev/null +++ b/test/etap/140-attachment-comp.t @@ -0,0 +1,711 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +test_db_name() -> + <<"couch_test_atts_compression">>. + +main(_) -> + test_util:init_code_path(), + + etap:plan(78), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link([default_config()]), + put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), + put(port, couch_config:get("httpd", "port", "5984")), + application:start(inets), + timer:sleep(1000), + couch_server:delete(test_db_name(), []), + couch_db:create(test_db_name(), []), + + couch_config:set("attachments", "compression_level", "8"), + couch_config:set("attachments", "compressible_types", "text/*"), + + create_1st_text_att(), + create_1st_png_att(), + create_2nd_text_att(), + create_2nd_png_att(), + + tests_for_1st_text_att(), + tests_for_1st_png_att(), + tests_for_2nd_text_att(), + tests_for_2nd_png_att(), + + create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"), + test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"), + + test_create_already_compressed_att_with_invalid_content_encoding( + db_url() ++ "/doc_att_deflate", + "readme.txt", + zlib:compress(test_text_data()), + "deflate" + ), + + test_create_already_compressed_att_with_invalid_content_encoding( + db_url() ++ "/doc_att_compress", + "readme.txt", + % Note: As of OTP R13B04, it seems there's no LZW compression + % (i.e. UNIX compress utility implementation) lib in OTP. + % However there's a simple working Erlang implementation at: + % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php + test_text_data(), + "compress" + ), + + timer:sleep(3000), % to avoid mochiweb socket closed exceptions + couch_server:delete(test_db_name(), []), + couch_server_sup:stop(), + ok. + +db_url() -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ + binary_to_list(test_db_name()). + +create_1st_text_att() -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc1/readme.txt", [], + "text/plain", test_text_data()}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created text attachment using the standalone api"), + ok. + +create_1st_png_att() -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc2/icon.png", [], + "image/png", test_png_data()}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created png attachment using the standalone api"), + ok. + +% create a text attachment using the non-standalone attachment api +create_2nd_text_att() -> + DocJson = {[ + {<<"_attachments">>, {[ + {<<"readme.txt">>, {[ + {<<"content_type">>, <<"text/plain">>}, + {<<"data">>, base64:encode(test_text_data())} + ]} + }]}} + ]}, + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc3", [], + "application/json", list_to_binary(couch_util:json_encode(DocJson))}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created text attachment using the non-standalone api"), + ok. + +% create a png attachment using the non-standalone attachment api +create_2nd_png_att() -> + DocJson = {[ + {<<"_attachments">>, {[ + {<<"icon.png">>, {[ + {<<"content_type">>, <<"image/png">>}, + {<<"data">>, base64:encode(test_png_data())} + ]} + }]}} + ]}, + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc4", [], + "application/json", list_to_binary(couch_util:json_encode(DocJson))}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created png attachment using the non-standalone api"), + ok. + +create_already_compressed_att(DocUri, AttName) -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {DocUri ++ "/" ++ AttName, [{"Content-Encoding", "gzip"}], + "text/plain", zlib:gzip(test_text_data())}, + [], + [{sync, true}]), + etap:is( + Code, + 201, + "Created already compressed attachment using the standalone api" + ), + ok. + +tests_for_1st_text_att() -> + test_get_1st_text_att_with_accept_encoding_gzip(), + test_get_1st_text_att_without_accept_encoding_header(), + test_get_1st_text_att_with_accept_encoding_deflate(), + test_get_1st_text_att_with_accept_encoding_deflate_only(), + test_get_doc_with_1st_text_att(), + test_1st_text_att_stub(). + +tests_for_1st_png_att() -> + test_get_1st_png_att_without_accept_encoding_header(), + test_get_1st_png_att_with_accept_encoding_gzip(), + test_get_1st_png_att_with_accept_encoding_deflate(), + test_get_doc_with_1st_png_att(), + test_1st_png_att_stub(). + +tests_for_2nd_text_att() -> + test_get_2nd_text_att_with_accept_encoding_gzip(), + test_get_2nd_text_att_without_accept_encoding_header(), + test_get_doc_with_2nd_text_att(), + test_2nd_text_att_stub(). + +tests_for_2nd_png_att() -> + test_get_2nd_png_att_without_accept_encoding_header(), + test_get_2nd_png_att_with_accept_encoding_gzip(), + test_get_doc_with_2nd_png_att(), + test_2nd_png_att_stub(). + +test_get_1st_text_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "received body is gziped"), + Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), + etap:is( + Uncompressed, + test_text_data(), + "received data for the 1st text attachment is ok" + ), + ok. + +test_get_1st_text_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_text_data(), + "received data for the 1st text attachment is ok" + ), + ok. + +test_get_1st_text_att_with_accept_encoding_deflate() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "deflate"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + Deflated = lists:member({"content-encoding", "deflate"}, Headers), + etap:is(Deflated, false, "received body is not deflated"), + etap:is( + Body, + test_text_data(), + "received data for the 1st text attachment is ok" + ), + ok. + +test_get_1st_text_att_with_accept_encoding_deflate_only() -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", + [{"Accept-Encoding", "deflate, *;q=0"}]}, + [], + [{sync, true}]), + etap:is( + Code, + 406, + "HTTP response code is 406 for an unsupported content encoding request" + ), + ok. + +test_get_1st_png_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2/icon.png", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 1st png attachment is ok" + ), + ok. + +test_get_1st_png_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 1st png attachment is ok" + ), + ok. + +test_get_1st_png_att_with_accept_encoding_deflate() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "deflate"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Deflated = lists:member({"content-encoding", "deflate"}, Headers), + etap:is(Deflated, false, "received body is not deflated"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 1st png attachment is ok" + ), + ok. + +test_get_doc_with_1st_text_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + TextAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttType = couch_util:get_nested_json_value( + TextAttJson, + [<<"content_type">>] + ), + TextAttData = couch_util:get_nested_json_value( + TextAttJson, + [<<"data">>] + ), + etap:is( + TextAttType, + <<"text/plain">>, + "1st text attachment has type text/plain" + ), + %% check the attachment's data is the base64 encoding of the plain text + %% and not the base64 encoding of the gziped plain text + etap:is( + TextAttData, + base64:encode(test_text_data()), + "1st text attachment data is properly base64 encoded" + ), + ok. + +test_1st_text_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {TextAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttLength = couch_util:get_value(<<"length">>, TextAttJson), + etap:is( + TextAttLength, + length(test_text_data()), + "1st text attachment stub length matches the uncompressed length" + ), + TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson), + etap:is( + TextAttEncoding, + <<"gzip">>, + "1st text attachment stub has the encoding field set to gzip" + ), + TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson), + etap:is( + TextAttEncLength, + iolist_size(zlib:gzip(test_text_data())), + "1st text attachment stub encoded_length matches the compressed length" + ), + ok. + +test_get_doc_with_1st_png_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + PngAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttType = couch_util:get_nested_json_value( + PngAttJson, + [<<"content_type">>] + ), + PngAttData = couch_util:get_nested_json_value( + PngAttJson, + [<<"data">>] + ), + etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"), + etap:is( + PngAttData, + base64:encode(test_png_data()), + "1st png attachment data is properly base64 encoded" + ), + ok. + +test_1st_png_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {PngAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttLength = couch_util:get_value(<<"length">>, PngAttJson), + etap:is( + PngAttLength, + length(test_png_data()), + "1st png attachment stub length matches the uncompressed length" + ), + PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson), + etap:is( + PngEncoding, + undefined, + "1st png attachment stub doesn't have an encoding field" + ), + PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson), + etap:is( + PngEncLength, + undefined, + "1st png attachment stub doesn't have an encoded_length field" + ), + ok. + +test_get_2nd_text_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3/readme.txt", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "received body is gziped"), + Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), + etap:is( + Uncompressed, + test_text_data(), + "received data for the 2nd text attachment is ok" + ), + ok. + +test_get_2nd_text_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3/readme.txt", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_text_data(), + "received data for the 2nd text attachment is ok" + ), + ok. + +test_get_2nd_png_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4/icon.png", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 2nd png attachment is ok" + ), + ok. + +test_get_2nd_png_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4/icon.png", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 2nd png attachment is ok" + ), + ok. + +test_get_doc_with_2nd_text_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + TextAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttType = couch_util:get_nested_json_value( + TextAttJson, + [<<"content_type">>] + ), + TextAttData = couch_util:get_nested_json_value( + TextAttJson, + [<<"data">>] + ), + etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"), + %% check the attachment's data is the base64 encoding of the plain text + %% and not the base64 encoding of the gziped plain text + etap:is( + TextAttData, + base64:encode(test_text_data()), + "2nd text attachment data is properly base64 encoded" + ), + ok. + +test_2nd_text_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {TextAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttLength = couch_util:get_value(<<"length">>, TextAttJson), + etap:is( + TextAttLength, + length(test_text_data()), + "2nd text attachment stub length matches the uncompressed length" + ), + TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson), + etap:is( + TextAttEncoding, + <<"gzip">>, + "2nd text attachment stub has the encoding field set to gzip" + ), + TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson), + etap:is( + TextAttEncLength, + iolist_size(zlib:gzip(test_text_data())), + "2nd text attachment stub encoded_length matches the compressed length" + ), + ok. + +test_get_doc_with_2nd_png_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + PngAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttType = couch_util:get_nested_json_value( + PngAttJson, + [<<"content_type">>] + ), + PngAttData = couch_util:get_nested_json_value( + PngAttJson, + [<<"data">>] + ), + etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"), + etap:is( + PngAttData, + base64:encode(test_png_data()), + "2nd png attachment data is properly base64 encoded" + ), + ok. + +test_2nd_png_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {PngAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttLength = couch_util:get_value(<<"length">>, PngAttJson), + etap:is( + PngAttLength, + length(test_png_data()), + "2nd png attachment stub length matches the uncompressed length" + ), + PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson), + etap:is( + PngEncoding, + undefined, + "2nd png attachment stub doesn't have an encoding field" + ), + PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson), + etap:is( + PngEncLength, + undefined, + "2nd png attachment stub doesn't have an encoded_length field" + ), + ok. + +test_already_compressed_att(DocUri, AttName) -> + test_get_already_compressed_att_with_accept_gzip(DocUri, AttName), + test_get_already_compressed_att_without_accept(DocUri, AttName), + test_get_already_compressed_att_stub(DocUri, AttName). + +test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {DocUri ++ "/" ++ AttName, [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "received body is gziped"), + etap:is( + iolist_to_binary(Body), + iolist_to_binary(zlib:gzip(test_text_data())), + "received data for the already compressed attachment is ok" + ), + ok. + +test_get_already_compressed_att_without_accept(DocUri, AttName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {DocUri ++ "/" ++ AttName, []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + iolist_to_binary(Body), + iolist_to_binary(test_text_data()), + "received data for the already compressed attachment is ok" + ), + ok. + +test_get_already_compressed_att_stub(DocUri, AttName) -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {DocUri ++ "?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {AttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, iolist_to_binary(AttName)] + ), + AttLength = couch_util:get_value(<<"length">>, AttJson), + etap:is( + AttLength, + iolist_size((zlib:gzip(test_text_data()))), + "Already compressed attachment stub length matches the " + "compressed length" + ), + Encoding = couch_util:get_value(<<"encoding">>, AttJson), + etap:is( + Encoding, + <<"gzip">>, + "Already compressed attachment stub has the encoding field set to gzip" + ), + EncLength = couch_util:get_value(<<"encoded_length">>, AttJson), + etap:is( + EncLength, + AttLength, + "Already compressed attachment stub encoded_length matches the " + "length field value" + ), + ok. + +test_create_already_compressed_att_with_invalid_content_encoding( + DocUri, AttName, AttData, Encoding) -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {DocUri ++ "/" ++ AttName, [{"Content-Encoding", Encoding}], + "text/plain", AttData}, + [], + [{sync, true}]), + etap:is( + Code, + 415, + "Couldn't create an already compressed attachment using the " + "unsupported encoding '" ++ Encoding ++ "'" + ), + ok. + +test_png_data() -> + {ok, Data} = file:read_file( + test_util:source_file("share/www/image/logo.png") + ), + binary_to_list(Data). + +test_text_data() -> + {ok, Data} = file:read_file( + test_util:source_file("README") + ), + binary_to_list(Data). diff --git a/test/etap/150-invalid-view-seq.t b/test/etap/150-invalid-view-seq.t new file mode 100755 index 00000000..0664c116 --- /dev/null +++ b/test/etap/150-invalid-view-seq.t @@ -0,0 +1,192 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-record(user_ctx, { + name = null, + roles = [], + handler +}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +test_db_name() -> + <<"couch_test_invalid_view_seq">>. + +main(_) -> + test_util:init_code_path(), + + etap:plan(10), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +%% NOTE: since during the test we stop the server, +%% a huge and ugly but harmless stack trace is sent to stderr +%% +test() -> + couch_server_sup:start_link([default_config()]), + timer:sleep(1000), + delete_db(), + create_db(), + + create_docs(), + create_design_doc(), + + % make DB file backup + backup_db_file(), + + put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), + put(port, couch_config:get("httpd", "port", "5984")), + application:start(inets), + + create_new_doc(), + query_view_before_restore_backup(), + + % restore DB file backup after querying view + restore_backup_db_file(), + + query_view_after_restore_backup(), + + delete_db(), + couch_server_sup:stop(), + ok. + +admin_user_ctx() -> + {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. + +create_db() -> + {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]). + +delete_db() -> + couch_server:delete(test_db_name(), [admin_user_ctx()]). + +create_docs() -> + {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), + Doc1 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc1">>}, + {<<"value">>, 1} + + ]}), + Doc2 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc2">>}, + {<<"value">>, 2} + + ]}), + Doc3 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc3">>}, + {<<"value">>, 3} + + ]}), + {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]), + couch_db:ensure_full_commit(Db), + couch_db:close(Db). + +create_design_doc() -> + {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), + DDoc = couch_doc:from_json_obj({[ + {<<"_id">>, <<"_design/foo">>}, + {<<"language">>, <<"javascript">>}, + {<<"views">>, {[ + {<<"bar">>, {[ + {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>} + ]}} + ]}} + ]}), + {ok, _} = couch_db:update_docs(Db, [DDoc]), + couch_db:ensure_full_commit(Db), + couch_db:close(Db). + +backup_db_file() -> + DbFile = test_util:build_file("tmp/lib/" ++ + binary_to_list(test_db_name()) ++ ".couch"), + {ok, _} = file:copy(DbFile, DbFile ++ ".backup"), + ok. + +create_new_doc() -> + {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), + Doc666 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc666">>}, + {<<"value">>, 999} + + ]}), + {ok, _} = couch_db:update_docs(Db, [Doc666]), + couch_db:ensure_full_commit(Db), + couch_db:close(Db). + +db_url() -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ + binary_to_list(test_db_name()). + +query_view_before_restore_backup() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/_design/foo/_view/bar", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "Got view response before restoring backup."), + ViewJson = couch_util:json_decode(Body), + Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]), + HasDoc1 = has_doc("doc1", Rows), + HasDoc2 = has_doc("doc2", Rows), + HasDoc3 = has_doc("doc3", Rows), + HasDoc666 = has_doc("doc666", Rows), + etap:is(HasDoc1, true, "Before backup restore, view has doc1"), + etap:is(HasDoc2, true, "Before backup restore, view has doc2"), + etap:is(HasDoc3, true, "Before backup restore, view has doc3"), + etap:is(HasDoc666, true, "Before backup restore, view has doc666"), + ok. + +has_doc(DocId1, Rows) -> + DocId = iolist_to_binary(DocId1), + lists:any( + fun({R}) -> lists:member({<<"id">>, DocId}, R) end, + Rows + ). + +restore_backup_db_file() -> + couch_server_sup:stop(), + timer:sleep(3000), + DbFile = test_util:build_file("tmp/lib/" ++ + binary_to_list(test_db_name()) ++ ".couch"), + ok = file:delete(DbFile), + ok = file:rename(DbFile ++ ".backup", DbFile), + couch_server_sup:start_link([default_config()]), + timer:sleep(1000), + ok. + +query_view_after_restore_backup() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/_design/foo/_view/bar", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "Got view response after restoring backup."), + ViewJson = couch_util:json_decode(Body), + Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]), + HasDoc1 = has_doc("doc1", Rows), + HasDoc2 = has_doc("doc2", Rows), + HasDoc3 = has_doc("doc3", Rows), + HasDoc666 = has_doc("doc666", Rows), + etap:is(HasDoc1, true, "After backup restore, view has doc1"), + etap:is(HasDoc2, true, "After backup restore, view has doc2"), + etap:is(HasDoc3, true, "After backup restore, view has doc3"), + etap:is(HasDoc666, false, "After backup restore, view does not have doc666"), + ok. diff --git a/test/etap/160-vhosts.t b/test/etap/160-vhosts.t new file mode 100755 index 00000000..eb704d31 --- /dev/null +++ b/test/etap/160-vhosts.t @@ -0,0 +1,131 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +%% XXX: Figure out how to -include("couch_rep.hrl") +-record(http_db, { + url, + auth = [], + resource = "", + headers = [ + {"User-Agent", "CouchDB/"++couch_server:get_version()}, + {"Accept", "application/json"}, + {"Accept-Encoding", "gzip"} + ], + qs = [], + method = get, + body = nil, + options = [ + {response_format,binary}, + {inactivity_timeout, 30000} + ], + retries = 10, + pause = 1, + conn = nil +}). + +-record(user_ctx, { + name = null, + roles = [], + handler +}). + +server() -> "http://127.0.0.1:5984/". +dbname() -> "etap-test-db". +admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. + +config_files() -> + lists:map(fun test_util:build_file/1, [ + "etc/couchdb/default_dev.ini", + "etc/couchdb/local_dev.ini" + ]). + +main(_) -> + test_util:init_code_path(), + + etap:plan(4), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link(config_files()), + ibrowse:start(), + crypto:start(), + + couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]), + {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]), + + Doc = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc1">>}, + {<<"value">>, 666} + ]}), + {ok, _} = couch_db:update_docs(Db, [Doc]), + couch_db:ensure_full_commit(Db), + + %% end boilerplate, start test + + couch_config:set("vhosts", "example.com", "/etap-test-db", false), + test_regular_request(), + test_vhost_request(), + test_vhost_request_with_qs(), + test_vhost_request_with_global(), + + %% restart boilerplate + couch_db:close(Db), + couch_server:delete(list_to_binary(dbname()), []), + ok. + +test_regular_request() -> + case ibrowse:send_req(server(), [], get, []) of + {ok, _, _, Body} -> + {[{<<"couchdb">>, <<"Welcome">>}, + {<<"version">>,_} + ]} = couch_util:json_decode(Body), + etap:is(true, true, "should return server info"); + _Else -> false + end. + +test_vhost_request() -> + case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of + {ok, _, _, Body} -> + {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]} + = couch_util:json_decode(Body), + etap:is(true, true, "should return database info"); + _Else -> false + end. + +test_vhost_request_with_qs() -> + Url = server() ++ "doc1?revs_info=true", + case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of + {ok, _, _, Body} -> + {JsonProps} = couch_util:json_decode(Body), + HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps), + etap:is(HasRevsInfo, true, "should return _revs_info"); + _Else -> false + end. + +test_vhost_request_with_global() -> + Url2 = server() ++ "_utils/index.html", + case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of + {ok, _, _, Body2} -> + "<!DOCTYPE" ++ _Foo = Body2, + etap:is(true, true, "should serve /_utils even inside vhosts"); + _Else -> false + end. diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am index d0f751f8..bdab95aa 100644 --- a/test/etap/Makefile.am +++ b/test/etap/Makefile.am @@ -14,7 +14,7 @@ noinst_SCRIPTS = run noinst_DATA = test_util.beam %.beam: %.erl - erlc $< + $(ERLC) $< run: run.tpl sed -e "s|%abs_top_srcdir%|@abs_top_srcdir@|g" \ @@ -58,7 +58,12 @@ EXTRA_DIST = \ 110-replication-httpc.t \ 111-replication-changes-feed.t \ 112-replication-missing-revs.t \ + 113-replication-attachment-comp.t \ 120-stats-collect.t \ 121-stats-aggregates.cfg \ 121-stats-aggregates.ini \ - 121-stats-aggregates.t + 121-stats-aggregates.t \ + 130-attachments-md5.t \ + 140-attachment-comp.t \ + 150-invalid-view-seq.t \ + 160-vhosts.t diff --git a/test/javascript/couch_http.js b/test/javascript/couch_http.js index f92cf119..5f4716d2 100644 --- a/test/javascript/couch_http.js +++ b/test/javascript/couch_http.js @@ -15,11 +15,15 @@ if(typeof(CouchHTTP) != "undefined") { CouchHTTP.prototype.open = function(method, url, async) { - if(/^\s*http:\/\//.test(url)) { - return this._open(method, url, async); - } else { - return this._open(method, this.base_url + url, async); + if(!/^\s*http:\/\//.test(url)) { + if(/^[^\/]/.test(url)) { + url = this.base_url + "/" + url; + } else { + url = this.base_url + url; + } } + + return this._open(method, url, async); }; CouchHTTP.prototype.setRequestHeader = function(name, value) { @@ -52,6 +56,7 @@ } })(); +CouchDB.urlPrefix = ""; CouchDB.newXhr = function() { return new CouchHTTP(); }; diff --git a/test/view_server/query_server_spec.rb b/test/view_server/query_server_spec.rb index 1de8e5bc..de1df5c1 100644 --- a/test/view_server/query_server_spec.rb +++ b/test/view_server/query_server_spec.rb @@ -139,7 +139,7 @@ functions = { "js" => %{function(doc){emit("foo",doc.a); emit("bar",doc.a)}}, "erlang" => <<-ERLANG fun({Doc}) -> - A = proplists:get_value(<<"a">>, Doc, null), + A = couch_util:get_value(<<"a">>, Doc, null), Emit(<<"foo">>, A), Emit(<<"bar">>, A) end. @@ -153,7 +153,7 @@ functions = { JS "erlang" => <<-ERLANG fun({Doc}) -> - A = proplists:get_value(<<"a">>, Doc, null), + A = couch_util:get_value(<<"a">>, Doc, null), Emit(<<"baz">>, A) end. ERLANG @@ -175,7 +175,7 @@ functions = { JS "erlang" => <<-ERLANG fun({NewDoc}, _OldDoc, _UserCtx) -> - case proplists:get_value(<<"bad">>, NewDoc) of + case couch_util:get_value(<<"bad">>, NewDoc) of undefined -> 1; _ -> {[{forbidden, <<"bad doc">>}]} end @@ -191,8 +191,8 @@ functions = { JS "erlang" => <<-ERLANG fun({Doc}, Req) -> - Title = proplists:get_value(<<"title">>, Doc), - Body = proplists:get_value(<<"body">>, Doc), + Title = couch_util:get_value(<<"title">>, Doc), + Body = couch_util:get_value(<<"body">>, Doc), Resp = <<Title/binary, " - ", Body/binary>>, {[{<<"body">>, Resp}]} end. @@ -208,8 +208,8 @@ functions = { JS "erlang" => <<-ERLANG fun({Doc}, Req) -> - Title = proplists:get_value(<<"title">>, Doc), - Body = proplists:get_value(<<"body">>, Doc), + Title = couch_util:get_value(<<"title">>, Doc), + Body = couch_util:get_value(<<"body">>, Doc), Resp = <<Title/binary, " - ", Body/binary>>, {[ {<<"code">>, 200}, @@ -256,9 +256,9 @@ functions = { "erlang" => <<-ERLANG, fun(Head, {Req}) -> Send(<<"first chunk">>), - Send(proplists:get_value(<<"q">>, Req)), + Send(couch_util:get_value(<<"q">>, Req)), Fun = fun({Row}, _) -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), {ok, nil} end, {ok, _} = FoldRows(Fun, nil), @@ -283,7 +283,7 @@ functions = { fun(Head, Req) -> Send(<<"bacon">>), Fun = fun({Row}, _) -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), Send(<<"eggs">>), {ok, nil} end, @@ -307,9 +307,9 @@ functions = { "erlang" => <<-ERLANG, fun(Head, {Req}) -> Send(<<"first chunk">>), - Send(proplists:get_value(<<"q">>, Req)), + Send(couch_util:get_value(<<"q">>, Req)), Fun = fun({Row}, _) -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), {ok, nil} end, FoldRows(Fun, nil), @@ -335,13 +335,13 @@ functions = { "erlang" => <<-ERLANG, fun(Head, {Req}) -> Send(<<"first chunk">>), - Send(proplists:get_value(<<"q">>, Req)), + Send(couch_util:get_value(<<"q">>, Req)), Fun = fun ({Row}, Count) when Count < 2 -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), {ok, Count+1}; ({Row}, Count) when Count == 2 -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), {stop, <<"early tail">>} end, {ok, Tail} = FoldRows(Fun, 0), @@ -380,10 +380,10 @@ functions = { Send(<<"bacon">>), Fun = fun ({Row}, Count) when Count < 2 -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), {ok, Count+1}; ({Row}, Count) when Count == 2 -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), {stop, <<"early">>} end, {ok, Tail} = FoldRows(Fun, 0), @@ -408,9 +408,9 @@ functions = { "erlang" => <<-ERLANG, fun(Head, {Req}) -> Send(<<"first chunk">>), - Send(proplists:get_value(<<"q">>, Req)), + Send(couch_util:get_value(<<"q">>, Req)), Fun = fun({Row}, _) -> - Send(proplists:get_value(<<"key">>, Row)), + Send(couch_util:get_value(<<"key">>, Row)), {ok, nil} end, FoldRows(Fun, nil), @@ -428,7 +428,7 @@ functions = { JS "erlang" => <<-ERLANG, fun({Doc}, Req) -> - proplists:get_value(<<"good">>, Doc) + couch_util:get_value(<<"good">>, Doc) end. ERLANG }, |