diff options
-rw-r--r-- | share/www/script/test/list_views.js | 4 | ||||
-rw-r--r-- | share/www/script/test/view_collation.js | 10 | ||||
-rw-r--r-- | src/couchdb/couch_btree.erl | 149 | ||||
-rw-r--r-- | src/couchdb/couch_db.erl | 30 | ||||
-rw-r--r-- | src/couchdb/couch_db_updater.erl | 13 | ||||
-rw-r--r-- | src/couchdb/couch_httpd_auth.erl | 13 | ||||
-rw-r--r-- | src/couchdb/couch_httpd_db.erl | 61 | ||||
-rw-r--r-- | src/couchdb/couch_httpd_show.erl | 14 | ||||
-rw-r--r-- | src/couchdb/couch_httpd_view.erl | 142 | ||||
-rw-r--r-- | src/couchdb/couch_view.erl | 8 | ||||
-rw-r--r-- | src/couchdb/couch_view_compactor.erl | 4 | ||||
-rw-r--r-- | src/couchdb/couch_view_updater.erl | 24 | ||||
-rwxr-xr-x | test/etap/020-btree-basics.t | 14 |
13 files changed, 233 insertions, 253 deletions
diff --git a/share/www/script/test/list_views.js b/share/www/script/test/list_views.js index e297f0a3..d845f926 100644 --- a/share/www/script/test/list_views.js +++ b/share/www/script/test/list_views.js @@ -314,10 +314,10 @@ couchTests.list_views = function(debug) { }); T(xhr.status == 200, "multi key"); T(/Total Rows/.test(xhr.responseText)); - T(!(/Key: 1/.test(xhr.responseText))); + T(!(/Key: 1 /.test(xhr.responseText))); T(/Key: 2/.test(xhr.responseText)); T(/FirstKey: 2/.test(xhr.responseText)); - T(/LastKey: 7/.test(xhr.responseText)); + T(/LastKey: 11/.test(xhr.responseText)); // no multi-key fetch allowed when group=false xhr = CouchDB.request("POST", "/test_suite_db/_design/lists/_list/simpleForm/withReduce?group=false", { diff --git a/share/www/script/test/view_collation.js b/share/www/script/test/view_collation.js index 9bf06f9b..f4ae4a15 100644 --- a/share/www/script/test/view_collation.js +++ b/share/www/script/test/view_collation.js @@ -103,10 +103,14 @@ couchTests.view_collation = function(debug) { var rows = db.query(queryFun, null, {endkey : "b", descending:true, inclusive_end:false}).rows; T(rows[rows.length-1].key == "B") - - // inclusive_end=false overrides endkey_docid + var rows = db.query(queryFun, null, { - endkey : "b", endkey_docid: "b", + endkey : "b", endkey_docid: "10", inclusive_end:false}).rows; T(rows[rows.length-1].key == "aa") + + var rows = db.query(queryFun, null, { + endkey : "b", endkey_docid: "11", + inclusive_end:false}).rows; + T(rows[rows.length-1].key == "b") }; diff --git a/src/couchdb/couch_btree.erl b/src/couchdb/couch_btree.erl index d540c35f..6176603c 100644 --- a/src/couchdb/couch_btree.erl +++ b/src/couchdb/couch_btree.erl @@ -12,8 +12,8 @@ -module(couch_btree). --export([open/2, open/3, query_modify/4, add/2, add_remove/3, foldl/3, foldl/4]). --export([foldr/3, foldr/4, fold/4, fold/5, full_reduce/1, final_reduce/2]). +-export([open/2, open/3, query_modify/4, add/2, add_remove/3]). +-export([fold/4, full_reduce/1, final_reduce/2,foldl/3,foldl/4]). -export([fold_reduce/6, fold_reduce/7, lookup/2, get_state/1, set_options/2]). -define(CHUNK_THRESHOLD, 16#4ff). @@ -99,31 +99,67 @@ full_reduce(#btree{root=nil,reduce=Reduce}) -> full_reduce(#btree{root={_P, Red}}) -> {ok, Red}. -foldl(Bt, Fun, Acc) -> - fold(Bt, fwd, Fun, Acc). - -foldl(Bt, Key, Fun, Acc) -> - fold(Bt, Key, fwd, Fun, Acc). - -foldr(Bt, Fun, Acc) -> - fold(Bt, rev, Fun, Acc). - -foldr(Bt, Key, Fun, Acc) -> - fold(Bt, Key, rev, Fun, Acc). - % wraps a 2 arity function with the proper 3 arity function convert_fun_arity(Fun) when is_function(Fun, 2) -> fun(KV, _Reds, AccIn) -> Fun(KV, AccIn) end; convert_fun_arity(Fun) when is_function(Fun, 3) -> Fun. % Already arity 3 -fold(Bt, Dir, Fun, Acc) -> - {_ContinueFlag, Acc2} = stream_node(Bt, [], Bt#btree.root, nil, Dir, convert_fun_arity(Fun), Acc), - {ok, Acc2}. -fold(Bt, Key, Dir, Fun, Acc) -> - {_ContinueFlag, Acc2} = stream_node(Bt, [], Bt#btree.root, Key, Dir, convert_fun_arity(Fun), Acc), - {ok, Acc2}. +make_key_in_end_range_function(#btree{less=Less}, fwd, Options) -> + case proplists:get_value(end_key, Options) of + undefined -> + case proplists:get_value(end_key_inclusive, Options) of + undefined -> + fun(_Key) -> true end; + LastKey -> + fun(Key) -> not Less(LastKey, Key) end + end; + EndKey -> + fun(Key) -> Less(Key, EndKey) end + end; +make_key_in_end_range_function(#btree{less=Less}, rev, Options) -> + case proplists:get_value(end_key, Options) of + undefined -> + case proplists:get_value(end_key_inclusive, Options) of + undefined -> + fun(_Key) -> true end; + LastKey -> + fun(Key) -> not Less(Key, LastKey) end + end; + EndKey -> + fun(Key) -> Less(EndKey, Key) end + end. + + +foldl(Bt, Fun, Acc) -> + fold(Bt, Fun, Acc, []). + +foldl(Bt, Fun, Acc, Options) -> + fold(Bt, Fun, Acc, Options). + + +fold(#btree{root=nil}, _Fun, Acc, _Options) -> + {ok, {[], []}, Acc}; +fold(#btree{root=Root}=Bt, Fun, Acc, Options) -> + Dir = proplists:get_value(dir, Options, fwd), + InRange = make_key_in_end_range_function(Bt, Dir, Options), + Result = + case proplists:get_value(start_key, Options) of + undefined -> + stream_node(Bt, [], Bt#btree.root, InRange, Dir, + convert_fun_arity(Fun), Acc); + StartKey -> + stream_node(Bt, [], Bt#btree.root, StartKey, InRange, Dir, + convert_fun_arity(Fun), Acc) + end, + case Result of + {ok, Acc2}-> + {_P, FullReduction} = Root, + {ok, {[], [FullReduction]}, Acc2}; + {stop, LastReduction, Acc2} -> + {ok, LastReduction, Acc2} + end. add(Bt, InsertKeyValues) -> add_remove(Bt, InsertKeyValues, []). @@ -556,40 +592,32 @@ adjust_dir(fwd, List) -> adjust_dir(rev, List) -> lists:reverse(List). -stream_node(Bt, Reds, PointerInfo, nil, Dir, Fun, Acc) -> - stream_node(Bt, Reds, PointerInfo, Dir, Fun, Acc); -stream_node(Bt, Reds, PointerInfo, {}, rev, Fun, Acc) -> - stream_node(Bt, Reds, PointerInfo, rev, Fun, Acc); -stream_node(_Bt, _Reds, nil, _StartKey, _Dir, _Fun, Acc) -> - {ok, Acc}; -stream_node(Bt, Reds, {Pointer, _Reds}, StartKey, Dir, Fun, Acc) -> +stream_node(Bt, Reds, {Pointer, _Reds}, StartKey, InRange, Dir, Fun, Acc) -> {NodeType, NodeList} = get_node(Bt, Pointer), case NodeType of kp_node -> - stream_kp_node(Bt, Reds, adjust_dir(Dir, NodeList), StartKey, Dir, Fun, Acc); + stream_kp_node(Bt, Reds, adjust_dir(Dir, NodeList), StartKey, InRange, Dir, Fun, Acc); kv_node -> - stream_kv_node(Bt, Reds, adjust_dir(Dir, NodeList), StartKey, Dir, Fun, Acc) + stream_kv_node(Bt, Reds, adjust_dir(Dir, NodeList), StartKey, InRange, Dir, Fun, Acc) end. -stream_node(_Bt, _Reds, nil, _Dir, _Fun, Acc) -> - {ok, Acc}; -stream_node(Bt, Reds, {Pointer, _Reds}, Dir, Fun, Acc) -> +stream_node(Bt, Reds, {Pointer, _Reds}, InRange, Dir, Fun, Acc) -> {NodeType, NodeList} = get_node(Bt, Pointer), case NodeType of kp_node -> - stream_kp_node(Bt, Reds, adjust_dir(Dir, NodeList), Dir, Fun, Acc); + stream_kp_node(Bt, Reds, adjust_dir(Dir, NodeList), InRange, Dir, Fun, Acc); kv_node -> - stream_kv_node2(Bt, Reds, [], adjust_dir(Dir, NodeList), Dir, Fun, Acc) + stream_kv_node2(Bt, Reds, [], adjust_dir(Dir, NodeList), InRange, Dir, Fun, Acc) end. -stream_kp_node(_Bt, _Reds, [], _Dir, _Fun, Acc) -> +stream_kp_node(_Bt, _Reds, [], _InRange, _Dir, _Fun, Acc) -> {ok, Acc}; -stream_kp_node(Bt, Reds, [{_Key, {Pointer, Red}} | Rest], Dir, Fun, Acc) -> - case stream_node(Bt, Reds, {Pointer, Red}, Dir, Fun, Acc) of +stream_kp_node(Bt, Reds, [{_Key, {Pointer, Red}} | Rest], InRange, Dir, Fun, Acc) -> + case stream_node(Bt, Reds, {Pointer, Red}, InRange, Dir, Fun, Acc) of {ok, Acc2} -> - stream_kp_node(Bt, [Red | Reds], Rest, Dir, Fun, Acc2); - {stop, Acc2} -> - {stop, Acc2} + stream_kp_node(Bt, [Red | Reds], Rest, InRange, Dir, Fun, Acc2); + {stop, LastReds, Acc2} -> + {stop, LastReds, Acc2} end. drop_nodes(_Bt, Reds, _StartKey, []) -> @@ -600,7 +628,7 @@ drop_nodes(Bt, Reds, StartKey, [{NodeKey, {Pointer, Red}} | RestKPs]) -> false -> {Reds, [{NodeKey, {Pointer, Red}} | RestKPs]} end. -stream_kp_node(Bt, Reds, KPs, StartKey, Dir, Fun, Acc) -> +stream_kp_node(Bt, Reds, KPs, StartKey, InRange, Dir, Fun, Acc) -> {NewReds, NodesToStream} = case Dir of fwd -> @@ -609,28 +637,28 @@ stream_kp_node(Bt, Reds, KPs, StartKey, Dir, Fun, Acc) -> rev -> % keep all nodes sorting before the key, AND the first node to sort after RevKPs = lists:reverse(KPs), - case lists:splitwith(fun({Key, _Pointer}) -> less(Bt, Key, StartKey) end, RevKPs) of - {_RevBefore, []} -> + case lists:splitwith(fun({Key, _Pointer}) -> less(Bt, Key, StartKey) end, RevKPs) of + {_RevsBefore, []} -> % everything sorts before it {Reds, KPs}; {RevBefore, [FirstAfter | Drop]} -> {[Red || {_K,{_P,Red}} <- Drop] ++ Reds, - [FirstAfter | lists:reverse(RevBefore)]} + [FirstAfter | lists:reverse(RevBefore)]} end end, case NodesToStream of [] -> {ok, Acc}; [{_Key, {Pointer, Red}} | Rest] -> - case stream_node(Bt, NewReds, {Pointer, Red}, StartKey, Dir, Fun, Acc) of + case stream_node(Bt, NewReds, {Pointer, Red}, StartKey, InRange, Dir, Fun, Acc) of {ok, Acc2} -> - stream_kp_node(Bt, [Red | NewReds], Rest, Dir, Fun, Acc2); - {stop, Acc2} -> - {stop, Acc2} + stream_kp_node(Bt, [Red | NewReds], Rest, InRange, Dir, Fun, Acc2); + {stop, LastReds, Acc2} -> + {stop, LastReds, Acc2} end end. -stream_kv_node(Bt, Reds, KVs, StartKey, Dir, Fun, Acc) -> +stream_kv_node(Bt, Reds, KVs, StartKey, InRange, Dir, Fun, Acc) -> DropFun = case Dir of fwd -> @@ -640,15 +668,20 @@ stream_kv_node(Bt, Reds, KVs, StartKey, Dir, Fun, Acc) -> end, {LTKVs, GTEKVs} = lists:splitwith(DropFun, KVs), AssembleLTKVs = [assemble(Bt,K,V) || {K,V} <- LTKVs], - stream_kv_node2(Bt, Reds, AssembleLTKVs, GTEKVs, Dir, Fun, Acc). + stream_kv_node2(Bt, Reds, AssembleLTKVs, GTEKVs, InRange, Dir, Fun, Acc). -stream_kv_node2(_Bt, _Reds, _PrevKVs, [], _Dir, _Fun, Acc) -> +stream_kv_node2(_Bt, _Reds, _PrevKVs, [], _InRange, _Dir, _Fun, Acc) -> {ok, Acc}; -stream_kv_node2(Bt, Reds, PrevKVs, [{K,V} | RestKVs], Dir, Fun, Acc) -> - AssembledKV = assemble(Bt, K, V), - case Fun(AssembledKV, {PrevKVs, Reds}, Acc) of - {ok, Acc2} -> - stream_kv_node2(Bt, Reds, [AssembledKV | PrevKVs], RestKVs, Dir, Fun, Acc2); - {stop, Acc2} -> - {stop, Acc2} +stream_kv_node2(Bt, Reds, PrevKVs, [{K,V} | RestKVs], InRange, Dir, Fun, Acc) -> + case InRange(K) of + false -> + {stop, {PrevKVs, Reds}, Acc}; + true -> + AssembledKV = assemble(Bt, K, V), + case Fun(AssembledKV, {PrevKVs, Reds}, Acc) of + {ok, Acc2} -> + stream_kv_node2(Bt, Reds, [AssembledKV | PrevKVs], RestKVs, InRange, Dir, Fun, Acc2); + {stop, Acc2} -> + {stop, {PrevKVs, Reds}, Acc2} + end end. diff --git a/src/couchdb/couch_db.erl b/src/couchdb/couch_db.erl index 7118145d..60d16904 100644 --- a/src/couchdb/couch_db.erl +++ b/src/couchdb/couch_db.erl @@ -19,7 +19,7 @@ -export([get_doc_info/2,open_doc/2,open_doc/3,open_doc_revs/4]). -export([set_revs_limit/2,get_revs_limit/1,register_update_notifier/3]). -export([get_missing_revs/2,name/1,doc_to_tree/1,get_update_seq/1,get_committed_update_seq/1]). --export([enum_docs/4,enum_docs/5,enum_docs_since/4,enum_docs_since/5]). +-export([enum_docs/4,enum_docs_since/5]). -export([enum_docs_since_reduce_to_count/1,enum_docs_reduce_to_count/1]). -export([increment_update_seq/1,get_purge_seq/1,purge_docs/2,get_last_purged/1]). -export([start_link/3,open_doc_int/3,set_admins/2,get_admins/1,ensure_full_commit/1]). @@ -193,14 +193,15 @@ get_db_info(Db) -> {ok, InfoList}. get_design_docs(#db{fulldocinfo_by_id_btree=Btree}=Db) -> - couch_btree:foldl(Btree, <<"_design/">>, + {ok,_, Docs} = couch_btree:fold(Btree, fun(#full_doc_info{id= <<"_design/",_/binary>>}=FullDocInfo, _Reds, AccDocs) -> {ok, Doc} = couch_db:open_doc_int(Db, FullDocInfo, []), {ok, [Doc | AccDocs]}; (_, _Reds, AccDocs) -> {stop, AccDocs} end, - []). + [], [{start_key, <<"_design/">>}, {end_key, <<"_design0">>}]), + {ok, Docs}. check_is_admin(#db{admins=Admins, user_ctx=#user_ctx{name=Name,roles=Roles}}) -> DbAdmins = [<<"_admin">> | Admins], @@ -693,8 +694,7 @@ enum_docs_reduce_to_count(Reds) -> Count. changes_since(Db, Style, StartSeq, Fun, Acc) -> - enum_docs_since(Db, StartSeq, fwd, - fun(DocInfo, _Offset, Acc2) -> + Wrapper = fun(DocInfo, _Offset, Acc2) -> #doc_info{revs=Revs} = DocInfo, case Style of main_only -> @@ -705,7 +705,9 @@ changes_since(Db, Style, StartSeq, Fun, Acc) -> #rev_info{seq=RevSeq}=RevInfo <- Revs, StartSeq < RevSeq] end, Fun(Infos, Acc2) - end, Acc). + end, + {ok, _LastReduction, AccOut} = couch_btree:fold(Db#db.docinfo_by_seq_btree, Wrapper, Acc, [{start_key, StartSeq + 1}]), + {ok, AccOut}. count_changes_since(Db, SinceSeq) -> {ok, Changes} = @@ -719,17 +721,13 @@ count_changes_since(Db, SinceSeq) -> 0), Changes. -enum_docs_since(Db, SinceSeq, Direction, InFun, Acc) -> - couch_btree:fold(Db#db.docinfo_by_seq_btree, SinceSeq + 1, Direction, InFun, Acc). - -enum_docs_since(Db, SinceSeq, InFun, Acc) -> - enum_docs_since(Db, SinceSeq, fwd, InFun, Acc). - -enum_docs(Db, StartId, Direction, InFun, InAcc) -> - couch_btree:fold(Db#db.fulldocinfo_by_id_btree, StartId, Direction, InFun, InAcc). +enum_docs_since(Db, SinceSeq, InFun, Acc, Options) -> + {ok, LastReduction, AccOut} = couch_btree:fold(Db#db.docinfo_by_seq_btree, InFun, Acc, [{start_key, SinceSeq + 1} | Options]), + {ok, enum_docs_since_reduce_to_count(LastReduction), AccOut}. -enum_docs(Db, StartId, InFun, Ctx) -> - enum_docs(Db, StartId, fwd, InFun, Ctx). +enum_docs(Db, InFun, InAcc, Options) -> + {ok, LastReduce, OutAcc} = couch_btree:fold(Db#db.fulldocinfo_by_id_btree, InFun, InAcc, Options), + {ok, enum_docs_reduce_to_count(LastReduce), OutAcc}. % server functions diff --git a/src/couchdb/couch_db_updater.erl b/src/couchdb/couch_db_updater.erl index fd1d340f..42a85894 100644 --- a/src/couchdb/couch_db_updater.erl +++ b/src/couchdb/couch_db_updater.erl @@ -168,7 +168,7 @@ handle_cast({compact_done, CompactFilepath}, #db{filepath=Filepath}=Db) -> case Db#db.update_seq == NewSeq of true -> % suck up all the local docs into memory and write them to the new db - {ok, LocalDocs} = couch_btree:foldl(Db#db.local_docs_btree, + {ok, _, LocalDocs} = couch_btree:foldl(Db#db.local_docs_btree, fun(Value, _Offset, Acc) -> {ok, [Value | Acc]} end, []), {ok, NewLocalBtree} = couch_btree:add(NewDb#db.local_docs_btree, LocalDocs), @@ -279,10 +279,11 @@ simple_upgrade_record(Old, New) -> lists:sublist(tuple_to_list(New), size(Old) + 1, size(New)-size(Old)), list_to_tuple(tuple_to_list(Old) ++ NewValuesTail). -% used for doc insertion, also for the PassedEndFun on all_docs view -less_docid(A, B) when A==B -> false; +less_docid(A, B) when A == B -> false; less_docid(nil, _) -> true; % nil - special key sorts before all less_docid({}, _) -> false; % {} -> special key sorts after all +less_docid(_, nil) -> false; +less_docid(_, {}) -> true; less_docid(A, B) -> A < B. @@ -722,8 +723,10 @@ copy_compact(Db, NewDb0, Retry) -> couch_task_status:set_update_frequency(500), - {ok, {NewDb2, Uncopied, TotalChanges}} = - couch_btree:foldl(Db#db.docinfo_by_seq_btree, NewDb#db.update_seq + 1, EnumBySeqFun, {NewDb, [], 0}), + {ok, _, {NewDb2, Uncopied, TotalChanges}} = + couch_btree:foldl(Db#db.docinfo_by_seq_btree, EnumBySeqFun, + {NewDb, [], 0}, + [{start_key, NewDb#db.update_seq + 1}]), couch_task_status:update("Flushing"), diff --git a/src/couchdb/couch_httpd_auth.erl b/src/couchdb/couch_httpd_auth.erl index 1c1ad0a9..6df1a393 100644 --- a/src/couchdb/couch_httpd_auth.erl +++ b/src/couchdb/couch_httpd_auth.erl @@ -114,7 +114,6 @@ get_user(Db, UserName) -> % then fall back to querying the db. case couch_config:get("admins", ?b2l(UserName)) of "-hashed-" ++ HashedPwdAndSalt -> - io:format("hashed: '~p'~n", [hashed]), [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","), [{<<"roles">>, [<<"_admin">>]}, {<<"salt">>, ?l2b(Salt)}, @@ -127,14 +126,10 @@ get_user(Db, UserName) -> case (catch couch_view:get_map_view(Db, DesignId, ViewName, nil)) of {ok, View, _Group} -> - FoldlFun = fun - ({{Key, _DocId}, Value}, _, nil) when Key == UserName -> {ok, Value}; - (_, _, Acc) -> {stop, Acc} - end, - case couch_view:fold(View, {UserName, nil}, fwd, FoldlFun, nil) of - {ok, {Result}} -> Result; - _Else -> nil - end; + FoldFun = fun({_, Value}, _, {_}) -> {stop, Value} end, + {ok, _, {Result}} = couch_view:fold(View, FoldFun, {nil}, + [{start_key, {UserName, nil}},{end_key, {UserName, {}}}]), + Result; {not_found, _Reason} -> nil % case (catch couch_view:get_reduce_view(Db, DesignId, ViewName, nil)) of diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl index 0f3835ae..614e1d64 100644 --- a/src/couchdb/couch_httpd_db.erl +++ b/src/couchdb/couch_httpd_db.erl @@ -460,6 +460,7 @@ db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) -> db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) -> #view_query_args{ start_key = StartKey, + end_key = EndKey, limit = Limit, skip = SkipCount, direction = Dir @@ -479,7 +480,7 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) -> {} -> 100000000000; StartKey when is_integer(StartKey) -> StartKey end, - {ok, FoldResult} = couch_db:enum_docs_since(Db, StartKey2, Dir, + {ok, LastOffset, FoldResult} = couch_db:enum_docs_since(Db, StartKey2, fun(DocInfo, Offset, Acc) -> #doc_info{ id=Id, @@ -505,9 +506,13 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) -> false -> [] end }, - FoldlFun({{Seq, Id}, Json}, Offset, Acc) - end, {Limit, SkipCount, undefined, [], nil}), - couch_httpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}) + if (Seq > EndKey) -> + {stop, Acc}; + true -> + FoldlFun({{Seq, Id}, Json}, Offset, Acc) + end + end, {Limit, SkipCount, undefined, []}, [{dir, Dir}]), + couch_httpd_view:finish_view_fold(Req, TotalRowCount, LastOffset, FoldResult) end); db_req(#httpd{path_parts=[_,<<"_all_docs_by_seq">>]}=Req, _Db) -> @@ -591,9 +596,11 @@ all_docs_view(Req, Db, Keys) -> start_key = StartKey, start_docid = StartDocId, end_key = EndKey, + end_docid = EndDocId, limit = Limit, skip = SkipCount, - direction = Dir + direction = Dir, + inclusive_end = Inclusive } = QueryArgs = couch_httpd_view:parse_view_params(Req, Keys, map), {ok, Info} = couch_db:get_db_info(Db), CurrentEtag = couch_httpd:make_etag(Info), @@ -603,26 +610,16 @@ all_docs_view(Req, Db, Keys) -> StartId = if is_binary(StartKey) -> StartKey; true -> StartDocId end, - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, + EndId = if is_binary(EndKey) -> EndKey; + true -> EndDocId + end, + FoldAccInit = {Limit, SkipCount, undefined, []}, case Keys of nil -> - PassedEndFun = - case Dir of - fwd -> - fun(ViewKey, _ViewId) -> - couch_db_updater:less_docid(EndKey, ViewKey) - end; - rev-> - fun(ViewKey, _ViewId) -> - couch_db_updater:less_docid(ViewKey, EndKey) - end - end, - FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, TotalRowCount, #view_fold_helper_funs{ - reduce_count = fun couch_db:enum_docs_reduce_to_count/1, - passed_end = PassedEndFun + reduce_count = fun couch_db:enum_docs_reduce_to_count/1 }), AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) -> case couch_doc:to_doc_info(FullDocInfo) of @@ -632,9 +629,10 @@ all_docs_view(Req, Db, Keys) -> {ok, Acc} end end, - {ok, FoldResult} = couch_db:enum_docs(Db, StartId, Dir, - AdapterFun, FoldAccInit), - couch_httpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}); + {ok, LastOffset, FoldResult} = couch_db:enum_docs(Db, + AdapterFun, FoldAccInit, [{start_key, StartId}, {dir, Dir}, + {if Inclusive -> end_key_inclusive; true -> end_key end, EndId}]), + couch_httpd_view:finish_view_fold(Req, TotalRowCount, LastOffset, FoldResult); _ -> FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, TotalRowCount, #view_fold_helper_funs{ @@ -646,8 +644,8 @@ all_docs_view(Req, Db, Keys) -> rev -> fun lists:foldr/3 end, - {ok, FoldResult} = KeyFoldFun( - fun(Key, {ok, FoldAcc}) -> + FoldResult = KeyFoldFun( + fun(Key, FoldAcc) -> DocInfo = (catch couch_db:get_doc_info(Db, Key)), Doc = case DocInfo of {ok, #doc_info{id=Id, revs=[#rev_info{deleted=false, rev=Rev}|_]}} -> @@ -660,15 +658,10 @@ all_docs_view(Req, Db, Keys) -> ?LOG_ERROR("Invalid DocInfo: ~p", [DocInfo]), throw({error, invalid_doc_info}) end, - Acc = (catch FoldlFun(Doc, 0, FoldAcc)), - case Acc of - {stop, Acc2} -> - {ok, Acc2}; - _ -> - Acc - end - end, {ok, FoldAccInit}, Keys), - couch_httpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}) + {_, FoldAcc2} = FoldlFun(Doc, 0, FoldAcc), + FoldAcc2 + end, FoldAccInit, Keys), + couch_httpd_view:finish_view_fold(Req, TotalRowCount, 0, FoldResult) end end). diff --git a/src/couchdb/couch_httpd_show.erl b/src/couchdb/couch_httpd_show.erl index 86eba4c2..176a9b15 100644 --- a/src/couchdb/couch_httpd_show.erl +++ b/src/couchdb/couch_httpd_show.erl @@ -193,8 +193,8 @@ output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, Vie start_response = StartListRespFun, send_row = SendListRowFun }), - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - {ok, FoldResult} = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit), + FoldAccInit = {Limit, SkipCount, undefined, []}, + {ok, _, FoldResult} = couch_view:fold(View, FoldlFun, FoldAccInit, [{start_key, Start},{dir, Dir}]), finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount) end); @@ -218,9 +218,9 @@ output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, Vie StartListRespFun = make_map_start_resp_fun(QueryServer, Db), SendListRowFun = make_map_send_row_fun(QueryServer), - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - {ok, FoldResult} = lists:foldl( - fun(Key, {ok, FoldAcc}) -> + FoldAccInit = {Limit, SkipCount, undefined, []}, + {ok, _, FoldResult} = lists:foldl( + fun(Key, {ok, _, FoldAcc}) -> FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs#view_query_args{ start_key = Key, end_key = Key @@ -230,8 +230,8 @@ output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, Vie start_response = StartListRespFun, send_row = SendListRowFun }), - couch_view:fold(View, {Key, StartDocId}, Dir, FoldlFun, FoldAcc) - end, {ok, FoldAccInit}, Keys), + couch_view:fold(View, FoldlFun, FoldAcc, [{start_key, {Key, StartDocId}}, {dir, Dir}]) + end, {ok, nil, FoldAccInit}, Keys), finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount) end). diff --git a/src/couchdb/couch_httpd_view.erl b/src/couchdb/couch_httpd_view.erl index 884402da..35484823 100644 --- a/src/couchdb/couch_httpd_view.erl +++ b/src/couchdb/couch_httpd_view.erl @@ -16,7 +16,7 @@ -export([handle_view_req/2,handle_temp_view_req/2,handle_db_view_req/2]). -export([get_stale_type/1, get_reduce_type/1, parse_view_params/3]). --export([make_view_fold_fun/6, finish_view_fold/3, view_row_obj/3]). +-export([make_view_fold_fun/6, finish_view_fold/4, view_row_obj/3]). -export([view_group_etag/2, view_group_etag/3, make_reduce_fold_funs/5]). -export([design_doc_view/5, parse_bool_param/1]). @@ -150,11 +150,11 @@ output_map_view(Req, View, Group, Db, QueryArgs, nil) -> CurrentEtag = view_group_etag(Group, Db), couch_httpd:etag_respond(Req, CurrentEtag, fun() -> {ok, RowCount} = couch_view:get_row_count(View), - Start = {StartKey, StartDocId}, FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount, #view_fold_helper_funs{reduce_count=fun couch_view:reduce_to_count/1}), - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit), - finish_view_fold(Req, RowCount, FoldResult) + FoldAccInit = {Limit, SkipCount, undefined, []}, + {ok, LastReduce, FoldResult} = couch_view:fold(View, FoldlFun, FoldAccInit, + [{dir, Dir}, {start_key, {StartKey, StartDocId}} | make_end_key_option(QueryArgs)]), + finish_view_fold(Req, RowCount, couch_view:reduce_to_count(LastReduce), FoldResult) end); output_map_view(Req, View, Group, Db, QueryArgs, Keys) -> @@ -167,21 +167,21 @@ output_map_view(Req, View, Group, Db, QueryArgs, Keys) -> CurrentEtag = view_group_etag(Group, Db, Keys), couch_httpd:etag_respond(Req, CurrentEtag, fun() -> {ok, RowCount} = couch_view:get_row_count(View), - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - FoldResult = lists:foldl( - fun(Key, {ok, FoldAcc}) -> - Start = {Key, StartDocId}, + FoldAccInit = {Limit, SkipCount, undefined, []}, + {LastReduce, FoldResult} = lists:foldl( + fun(Key, {_, FoldAcc}) -> FoldlFun = make_view_fold_fun(Req, QueryArgs#view_query_args{ - start_key = Key, - end_key = Key }, CurrentEtag, Db, RowCount, #view_fold_helper_funs{ reduce_count = fun couch_view:reduce_to_count/1 }), - couch_view:fold(View, Start, Dir, FoldlFun, FoldAcc) - end, {ok, FoldAccInit}, Keys), - finish_view_fold(Req, RowCount, FoldResult) + {ok, LastReduce, FoldResult} = couch_view:fold(View, FoldlFun, FoldAcc, + [{dir, Dir},{start_key, {Key, StartDocId}} | make_end_key_option( + QueryArgs#view_query_args{end_key=Key})]), + {LastReduce, FoldResult} + end, {{[],[]}, FoldAccInit}, Keys), + finish_view_fold(Req, RowCount, couch_view:reduce_to_count(LastReduce), FoldResult) end). output_reduce_view(Req, Db, View, Group, QueryArgs, nil) -> @@ -401,58 +401,37 @@ validate_view_query(extra, _Value, Args) -> Args. make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) -> - #view_query_args{ - end_key = EndKey, - end_docid = EndDocId, - inclusive_end = InclusiveEnd, - direction = Dir - } = QueryArgs, - #view_fold_helper_funs{ - passed_end = PassedEndFun, start_response = StartRespFun, send_row = SendRowFun, reduce_count = ReduceCountFun - } = apply_default_helper_funs(HelperFuns, - {Dir, EndKey, EndDocId, InclusiveEnd}), + } = apply_default_helper_funs(HelperFuns), #view_query_args{ include_docs = IncludeDocs } = QueryArgs, - - fun({{Key, DocId}, Value}, OffsetReds, {AccLimit, AccSkip, Resp, RowFunAcc, - OffsetAcc}) -> - PassedEnd = PassedEndFun(Key, DocId), - case {PassedEnd, AccLimit, AccSkip, Resp} of - {true, _, _, _} -> - % The stop key has been passed, stop looping. - % We may need offset so calcluate it here. - % Checking Resp is an optimization that tells - % us its already been calculated (and sent). - NewOffset = case Resp of - undefined -> ReduceCountFun(OffsetReds); - _ -> nil - end, - {stop, {AccLimit, AccSkip, Resp, RowFunAcc, NewOffset}}; - {_, 0, _, _} -> + + fun({{Key, DocId}, Value}, OffsetReds, {AccLimit, AccSkip, Resp, RowFunAcc}) -> + case {AccLimit, AccSkip, Resp} of + {0, _, _} -> % we've done "limit" rows, stop foldling - {stop, {0, 0, Resp, RowFunAcc, OffsetAcc}}; - {_, _, AccSkip, _} when AccSkip > 0 -> + {stop, {0, 0, Resp, RowFunAcc}}; + {_, AccSkip, _} when AccSkip > 0 -> % just keep skipping - {ok, {AccLimit, AccSkip - 1, Resp, RowFunAcc, OffsetAcc}}; - {_, _, _, undefined} -> + {ok, {AccLimit, AccSkip - 1, Resp, RowFunAcc}}; + {_, _, undefined} -> % rendering the first row, first we start the response Offset = ReduceCountFun(OffsetReds), {ok, Resp2, RowFunAcc0} = StartRespFun(Req, Etag, TotalViewCount, Offset, RowFunAcc), {Go, RowFunAcc2} = SendRowFun(Resp2, Db, {{Key, DocId}, Value}, IncludeDocs, RowFunAcc0), - {Go, {AccLimit - 1, 0, Resp2, RowFunAcc2, Offset}}; - {_, AccLimit, _, Resp} when (AccLimit > 0) -> + {Go, {AccLimit - 1, 0, Resp2, RowFunAcc2}}; + {AccLimit, _, Resp} when (AccLimit > 0) -> % rendering all other rows {Go, RowFunAcc2} = SendRowFun(Resp, Db, {{Key, DocId}, Value}, IncludeDocs, RowFunAcc), - {Go, {AccLimit - 1, 0, Resp, RowFunAcc2, OffsetAcc}} + {Go, {AccLimit - 1, 0, Resp, RowFunAcc2}} end end. @@ -515,14 +494,9 @@ make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) -> {ok, GroupRowsFun, RespFun}. apply_default_helper_funs(#view_fold_helper_funs{ - passed_end = PassedEnd, start_response = StartResp, send_row = SendRow -}=Helpers, {Dir, EndKey, EndDocId, InclusiveEnd}) -> - PassedEnd2 = case PassedEnd of - undefined -> make_passed_end_fun(Dir, EndKey, EndDocId, InclusiveEnd); - _ -> PassedEnd - end, +}=Helpers) -> StartResp2 = case StartResp of undefined -> fun json_view_start_resp/5; @@ -535,10 +509,10 @@ apply_default_helper_funs(#view_fold_helper_funs{ end, Helpers#view_fold_helper_funs{ - passed_end = PassedEnd2, start_response = StartResp2, send_row = SendRow2 - }. + }; + apply_default_helper_funs(#reduce_fold_helper_funs{ start_response = StartResp, @@ -559,35 +533,17 @@ apply_default_helper_funs(#reduce_fold_helper_funs{ send_row = SendRow2 }. -make_passed_end_fun(fwd, EndKey, EndDocId, InclusiveEnd) -> - case InclusiveEnd of - true -> - fun(ViewKey, ViewId) -> - couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId]) - end; - false -> - fun - (ViewKey, _ViewId) when ViewKey == EndKey -> - true; - (ViewKey, ViewId) -> - couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId]) - end - end; - -make_passed_end_fun(rev, EndKey, EndDocId, InclusiveEnd) -> - case InclusiveEnd of - true -> - fun(ViewKey, ViewId) -> - couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId]) - end; - false-> - fun - (ViewKey, _ViewId) when ViewKey == EndKey -> - true; - (ViewKey, ViewId) -> - couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId]) - end - end. +make_end_key_option( + #view_query_args{end_key = EndKey, + end_docid = EndDocId, + inclusive_end = true}) -> + [{end_key_inclusive, {EndKey, EndDocId}}]; +make_end_key_option( + #view_query_args{ + end_key = EndKey, + end_docid = EndDocId, + inclusive_end = false}) -> + [{end_key, {EndKey,reverse_key_default(EndDocId)}}]. json_view_start_resp(Req, Etag, TotalViewCount, Offset, _Acc) -> {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]), @@ -651,26 +607,20 @@ view_row_with_doc(Db, {{Key, DocId}, Value}, Rev) -> {[{id, DocId}, {key, Key}, {value, Value}, {doc, JsonDoc}]} end. -finish_view_fold(Req, TotalRows, FoldResult) -> +finish_view_fold(Req, TotalRows, Offset, FoldResult) -> case FoldResult of - {ok, {_, _, undefined, _, Offset}} -> - % nothing found in the view, nothing has been returned + {_, _, undefined, _} -> + % nothing found in the view or keys, nothing has been returned % send empty view - NewOffset = case Offset of - nil -> TotalRows; - _ -> Offset - end, send_json(Req, 200, {[ {total_rows, TotalRows}, - {offset, NewOffset}, + {offset, Offset}, {rows, []} ]}); - {ok, {_, _, Resp, _, _}} -> + {_, _, Resp, _} -> % end the view send_chunk(Resp, "\r\n]}"), - end_json_response(Resp); - Error -> - throw(Error) + end_json_response(Resp) end. finish_reduce_fold(Req, Resp) -> diff --git a/src/couchdb/couch_view.erl b/src/couchdb/couch_view.erl index b5509b5d..41d34e89 100644 --- a/src/couchdb/couch_view.erl +++ b/src/couchdb/couch_view.erl @@ -13,7 +13,7 @@ -module(couch_view). -behaviour(gen_server). --export([start_link/0,fold/4,fold/5,less_json/2,less_json_keys/2,expand_dups/2, +-export([start_link/0,fold/4,less_json/2,less_json_keys/2,expand_dups/2, detuple_kvs/2,init/1,terminate/2,handle_call/3,handle_cast/2,handle_info/2, code_change/3,get_reduce_view/4,get_temp_reduce_view/5,get_temp_map_view/4, get_map_view/4,get_row_count/1,reduce_to_count/1,fold_reduce/7, @@ -239,15 +239,13 @@ fold_fun(Fun, [KV|Rest], {KVReds, Reds}, Acc) -> {stop, Acc2} end. -fold(#view{btree=Btree}, Dir, Fun, Acc) -> - fold(Btree, nil, Dir, Fun, Acc). -fold(#view{btree=Btree}, StartKey, Dir, Fun, Acc) -> +fold(#view{btree=Btree}, Fun, Acc, Options) -> WrapperFun = fun(KV, Reds, Acc2) -> fold_fun(Fun, expand_dups([KV],[]), Reds, Acc2) end, - {ok, _AccResult} = couch_btree:fold(Btree, StartKey, Dir, WrapperFun, Acc). + {ok, _LastReduce, _AccResult} = couch_btree:fold(Btree, WrapperFun, Acc, Options). init([]) -> diff --git a/src/couchdb/couch_view_compactor.erl b/src/couchdb/couch_view_compactor.erl index 762668c0..ad217d97 100644 --- a/src/couchdb/couch_view_compactor.erl +++ b/src/couchdb/couch_view_compactor.erl @@ -58,7 +58,7 @@ compact_group(Group, EmptyGroup) -> {ok, {Bt, [KV|Acc], TotalCopied+1}} end end, - {ok, {Bt3, Uncopied, _Total}} = couch_btree:foldl(IdBtree, Fun, + {ok, _, {Bt3, Uncopied, _Total}} = couch_btree:foldl(IdBtree, Fun, {EmptyIdBtree, [], 0}), {ok, NewIdBtree} = couch_btree:add(Bt3, lists:reverse(Uncopied)), @@ -91,7 +91,7 @@ compact_view(View, EmptyView) -> end end, - {ok, {Bt3, Uncopied, _Total}} = couch_btree:foldl(View#view.btree, Fun, + {ok, _, {Bt3, Uncopied, _Total}} = couch_btree:foldl(View#view.btree, Fun, {EmptyView#view.btree, [], 0}), {ok, NewBt} = couch_btree:add(Bt3, lists:reverse(Uncopied)), EmptyView#view{btree = NewBt}. diff --git a/src/couchdb/couch_view_updater.erl b/src/couchdb/couch_view_updater.erl index a8027ce1..bb1dc975 100644 --- a/src/couchdb/couch_view_updater.erl +++ b/src/couchdb/couch_view_updater.erl @@ -41,7 +41,7 @@ update(Owner, Group) -> Self = self(), ViewEmptyKVs = [{View, []} || View <- Group2#group.views], spawn_link(fun() -> do_maps(Group, MapQueue, WriteQueue, ViewEmptyKVs) end), - spawn_link(fun() -> do_writes(Self, Owner, Group2, WriteQueue) end), + spawn_link(fun() -> do_writes(Self, Owner, Group2, WriteQueue, Seq == 0) end), % compute on all docs modified since we last computed. TotalChanges = couch_db:count_changes_since(Db, Seq), % update status every half second @@ -55,7 +55,7 @@ update(Owner, Group) -> true -> [conflicts, deleted_conflicts, local_seq]; _ -> [conflicts, deleted_conflicts] end, - {ok, _} + {ok, _, _} = couch_db:enum_docs_since( Db, Seq, @@ -65,7 +65,7 @@ update(Owner, Group) -> load_doc(Db, DocInfo, MapQueue, DocOpts, IncludeDesign), {ok, ChangesProcessed+1} end, - 0), + 0, []), couch_task_status:set_update_frequency(0), couch_task_status:update("Finishing."), couch_work_queue:close(MapQueue), @@ -137,7 +137,7 @@ do_maps(Group, MapQueue, WriteQueue, ViewEmptyKVs) -> do_maps(Group1, MapQueue, WriteQueue, ViewEmptyKVs) end. -do_writes(Parent, Owner, Group, WriteQueue) -> +do_writes(Parent, Owner, Group, WriteQueue, IntitalBuild) -> case couch_work_queue:dequeue(WriteQueue) of closed -> Parent ! {new_group, Group}; @@ -154,12 +154,13 @@ do_writes(Parent, Owner, Group, WriteQueue) -> {lists:max([Seq, Seq2]), AccViewKVs2, DocIdViewIdKeys ++ AccDocIdViewIdKeys} end, nil, Queue), - Group2 = write_changes(Group, ViewKeyValues, DocIdViewIdKeys, NewSeq), + Group2 = write_changes(Group, ViewKeyValues, DocIdViewIdKeys, NewSeq, + IntitalBuild), case Owner of nil -> ok; _ -> ok = gen_server:cast(Owner, {partial_update, self(), Group2}) end, - do_writes(Parent, nil, Group2, WriteQueue) + do_writes(Parent, nil, Group2, WriteQueue, IntitalBuild) end. view_insert_query_results([], [], ViewKVs, DocIdViewIdKeysAcc) -> @@ -212,12 +213,17 @@ view_compute(#group{def_lang=DefLang, query_server=QueryServerIn}=Group, Docs) - -write_changes(Group, ViewKeyValuesToAdd, DocIdViewIdKeys, NewSeq) -> +write_changes(Group, ViewKeyValuesToAdd, DocIdViewIdKeys, NewSeq, InitialBuild) -> #group{id_btree=IdBtree} = Group, AddDocIdViewIdKeys = [{DocId, ViewIdKeys} || {DocId, ViewIdKeys} <- DocIdViewIdKeys, ViewIdKeys /= []], - RemoveDocIds = [DocId || {DocId, ViewIdKeys} <- DocIdViewIdKeys, ViewIdKeys == []], - LookupDocIds = [DocId || {DocId, _ViewIdKeys} <- DocIdViewIdKeys], + if InitialBuild -> + RemoveDocIds = [], + LookupDocIds = []; + true -> + RemoveDocIds = [DocId || {DocId, ViewIdKeys} <- DocIdViewIdKeys, ViewIdKeys == []], + LookupDocIds = [DocId || {DocId, _ViewIdKeys} <- DocIdViewIdKeys] + end, {ok, LookupResults, IdBtree2} = couch_btree:query_modify(IdBtree, LookupDocIds, AddDocIdViewIdKeys, RemoveDocIds), KeysToRemoveByView = lists:foldl( diff --git a/test/etap/020-btree-basics.t b/test/etap/020-btree-basics.t index 9187b606..90c04075 100755 --- a/test/etap/020-btree-basics.t +++ b/test/etap/020-btree-basics.t @@ -59,8 +59,8 @@ test_kvs(KeyValues) -> Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]), etap:is(Btree1#btree.reduce, ReduceFun, "Reduce function was set"), - EmptyRes = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0), - etap:is(EmptyRes, {ok, 0}, "Folding over an empty btree"), + {ok, _, EmptyRes} = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0), + etap:is(EmptyRes, 0, "Folding over an empty btree"), {ok, Btree2} = couch_btree:add_remove(Btree1, KeyValues, []), etap:ok(test_btree(Btree2, KeyValues), @@ -151,15 +151,15 @@ test_key_access(Btree, List) -> end, Length = length(List), Sorted = lists:sort(List), - {ok, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}), - {ok, {[], Length}} = couch_btree:foldr(Btree, FoldFun, {Sorted, 0}), + {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}), + {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun, {Sorted, 0}, [{dir, rev}]), ok. test_lookup_access(Btree, KeyValues) -> FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end, lists:foreach(fun({Key, Value}) -> [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]), - {ok, true} = couch_btree:foldl(Btree, Key, FoldFun, {Key, Value}) + {ok, _, true} = couch_btree:foldl(Btree, FoldFun, {Key, Value}, [{start_key, Key}]) end, KeyValues). test_final_reductions(Btree, KeyValues) -> @@ -182,8 +182,8 @@ test_final_reductions(Btree, KeyValues) -> 0 -> {nil, nil}; _ -> lists:nth(KVLen div 3, lists:sort(KeyValues)) end, - {ok, FoldLRed} = couch_btree:foldl(Btree, LStartKey, FoldLFun, 0), - {ok, FoldRRed} = couch_btree:foldr(Btree, RStartKey, FoldRFun, 0), + {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0, [{start_key, LStartKey}]), + {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0, [{dir, rev}, {start_key, RStartKey}]), KVLen = FoldLRed + FoldRRed, ok. |