diff options
-rw-r--r-- | share/www/script/couch.js | 29 | ||||
-rw-r--r-- | share/www/script/couch_tests.js | 181 | ||||
-rw-r--r-- | src/couchdb/couch_httpd_db.erl | 101 | ||||
-rw-r--r-- | src/couchdb/couch_httpd_view.erl | 233 |
4 files changed, 456 insertions, 88 deletions
diff --git a/share/www/script/couch.js b/share/www/script/couch.js index cf26bb2e..87bba88a 100644 --- a/share/www/script/couch.js +++ b/share/www/script/couch.js @@ -112,8 +112,11 @@ function CouchDB(name) { } // Applies the map function to the contents of database and returns the results. - this.query = function(mapFun, reduceFun, options) { + this.query = function(mapFun, reduceFun, options, keys) { var body = {language: "javascript"}; + if(keys) { + body.keys = keys ; + } if (typeof(mapFun) != "string") mapFun = mapFun.toSource ? mapFun.toSource() : "(" + mapFun.toString() + ")"; body.map = mapFun; @@ -132,8 +135,16 @@ function CouchDB(name) { return result; } - this.view = function(viewname, options) { - var req = request("GET", this.uri + "_view/" + viewname + encodeOptions(options)); + this.view = function(viewname, options, keys) { + var req = null ; + if(!keys) { + req = request("GET", this.uri + "_view/" + viewname + encodeOptions(options)); + } else { + req = request("POST", this.uri + "_view/" + viewname + encodeOptions(options), { + headers: {"Content-Type": "application/json"}, + body: JSON.stringify({keys:keys}) + }); + } if (req.status == 404) return null; var result = JSON.parse(req.responseText); @@ -151,8 +162,16 @@ function CouchDB(name) { return result; } - this.allDocs = function(options) { - var req = request("GET", this.uri + "_all_docs" + encodeOptions(options)); + this.allDocs = function(options,keys) { + var req = null; + if(!keys) { + req = request("GET", this.uri + "_all_docs" + encodeOptions(options)); + } else { + req = request("POST", this.uri + "_all_docs" + encodeOptions(options), { + headers: {"Content-Type": "application/json"}, + body: JSON.stringify({keys:keys}) + }); + } var result = JSON.parse(req.responseText); if (req.status != 200) throw result; diff --git a/share/www/script/couch_tests.js b/share/www/script/couch_tests.js index 3abd570e..b2b0bbb1 100644 --- a/share/www/script/couch_tests.js +++ b/share/www/script/couch_tests.js @@ -1089,6 +1089,187 @@ var tests = { T(results.total_rows == 0); }, + view_multi_key_all_docs: function(debug) { + var db = new CouchDB("test_suite_db"); + db.deleteDb(); + db.createDb(); + if (debug) debugger; + + var docs = makeDocs(0, 100); + T(db.bulkSave(docs).ok); + + var keys = ["10","15","30","37","50"]; + var rows = db.allDocs({},keys).rows; + T(rows.length == keys.length); + for(var i=0; i<rows.length; i++) + T(rows[i].id == keys[i]); + + rows = db.allDocs({count: 1}, keys).rows; + T(rows.length == 1); + T(rows[0].id == keys[0]); + + rows = db.allDocs({skip: 2}, keys).rows; + T(rows.length == 3); + for(var i=0; i<rows.length; i++) + T(rows[i].id == keys[i+2]); + + rows = db.allDocs({descending: "true"}, keys).rows; + T(rows.length == keys.length); + for(var i=0; i<rows.length; i++) + T(rows[i].id == keys[keys.length-i-1]); + + rows = db.allDocs({descending: "true", skip: 3, count:1}, keys).rows; + T(rows.length == 1); + T(rows[0].id == keys[1]); + + // Check we get invalid rows when the key doesn't exist + rows = db.allDocs({}, [1, "i_dont_exist", "0"]).rows; + T(rows.length == 3); + T(rows[0].error == "not_found"); + T(rows[1].error == "not_found"); + T(rows[2].id == rows[2].key && rows[2].key == "0"); + }, + + view_multi_key_design: function(debug) { + var db = new CouchDB("test_suite_db"); + db.deleteDb(); + db.createDb(); + if (debug) debugger; + + var docs = makeDocs(0, 100); + T(db.bulkSave(docs).ok); + + var designDoc = { + _id:"_design/test", + language: "javascript", + views: { + all_docs: { + map: "function(doc) { emit(doc.integer, doc.string) }" + }, + multi_emit: { + map: "function(doc) {for(var i = 0 ; i < 3 ; i++) { emit(i, doc.integer) ; } }" + }, + summate: { + map:"function (doc) {emit(doc.integer, doc.integer)};", + reduce:"function (keys, values) { return sum(values); };" + } + } + } + T(db.save(designDoc).ok); + + // First, the goods: + var keys = [10,15,30,37,50]; + var rows = db.view("test/all_docs",{},keys).rows; + for(var i=0; i<rows.length; i++) { + T(keys.indexOf(rows[i].key) != -1); + T(rows[i].key == rows[i].value); + } + + var reduce = db.view("test/summate",{group:true},keys).rows; + T(reduce.length == keys.length); + for(var i=0; i<reduce.length; i++) { + T(keys.indexOf(reduce[i].key) != -1); + T(rows[i].key == rows[i].value); + } + + // Test that invalid parameter combinations get rejected + var badargs = [{startkey:0}, {endkey:0}, {key: 0}, {group_level: 2}]; + for(var i in badargs) + { + try { + db.view("test/all_docs",badargs[i],keys); + T(0==1); + } catch (e) { + T(e.error == "query_parse_error"); + } + } + + try { + db.view("test/summate",{},keys); + T(0==1); + } catch (e) { + T(e.error == "query_parse_error"); + } + + // Check that limiting by startkey_docid and endkey_docid get applied + // as expected. + var curr = db.view("test/multi_emit", {startkey_docid: 21, endkey_docid: 23}, [0, 2]).rows; + var exp_key = [ 0, 0, 0, 2, 2, 2] ; + var exp_val = [21, 22, 23, 21, 22, 23] ; + T(curr.length == 6); + for( var i = 0 ; i < 6 ; i++) + { + T(curr[i].key == exp_key[i]); + T(curr[i].value == exp_val[i]); + } + + // Check count works + curr = db.view("test/all_docs", {count: 1}, keys).rows; + T(curr.length == 1); + T(curr[0].key == 10); + + // Check offset works + curr = db.view("test/multi_emit", {skip: 1}, [0]).rows; + T(curr.length == 99); + T(curr[0].value == 1); + + // Check that dir works + curr = db.view("test/multi_emit", {descending: "true"}, [1]).rows; + T(curr.length == 100); + T(curr[0].value == 99); + T(curr[99].value == 0); + + // Check a couple combinations + curr = db.view("test/multi_emit", {descending: "true", skip: 3, count: 2}, [2]).rows; + T(curr.length, 2); + T(curr[0].value == 96); + T(curr[1].value == 95); + + curr = db.view("test/multi_emit", {skip: 2, count: 3, startkey_docid: "13"}, [0]).rows; + T(curr.length == 3); + T(curr[0].value == 15); + T(curr[1].value == 16); + T(curr[2].value == 17); + + curr = db.view("test/multi_emit", + {skip: 1, count: 5, startkey_docid: "25", endkey_docid: "27"}, [1]).rows; + T(curr.length == 2); + T(curr[0].value == 26); + T(curr[1].value == 27); + + curr = db.view("test/multi_emit", + {skip: 1, count: 5, startkey_docid: "28", endkey_docid: "26", descending: "true"}, [1]).rows; + T(curr.length == 2); + T(curr[0].value == 27); + T(curr[1].value == 26); + }, + + view_multi_key_temp: function(debug) { + var db = new CouchDB("test_suite_db"); + db.deleteDb(); + db.createDb(); + if (debug) debugger; + + var docs = makeDocs(0, 100); + T(db.bulkSave(docs).ok); + + var queryFun = function(doc) { emit(doc.integer, doc.integer) }; + var reduceFun = function (keys, values) { return sum(values); }; + + var keys = [10,15,30,37,50]; + var rows = db.query(queryFun, null, {}, keys).rows; + for(var i=0; i<rows.length; i++) { + T(keys.indexOf(rows[i].key) != -1); + T(rows[i].key == rows[i].value); + } + + var reduce = db.query(queryFun, reduceFun, {group:true}, keys).rows; + for(var i=0; i<reduce.length; i++) { + T(keys.indexOf(reduce[i].key) != -1); + T(reduce[i].key == reduce[i].value); + } + }, + view_pagination: function(debug) { var db = new CouchDB("test_suite_db"); db.deleteDb(); diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl index ff3d8125..20805c85 100644 --- a/src/couchdb/couch_httpd_db.erl +++ b/src/couchdb/couch_httpd_db.erl @@ -156,38 +156,17 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) -> db_req(#httpd{path_parts=[_,<<"_purge">>]}=Req, _Db) -> send_method_not_allowed(Req, "POST"); - + db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> - #view_query_args{ - start_key = StartKey, - start_docid = StartDocId, - count = Count, - skip = SkipCount, - direction = Dir - } = QueryArgs = couch_httpd_view:parse_view_query(Req), - {ok, Info} = couch_db:get_db_info(Db), - TotalRowCount = proplists:get_value(doc_count, Info), + all_docs_view(Req, Db, nil); - StartId = if is_binary(StartKey) -> StartKey; - true -> StartDocId - end, - - FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, TotalRowCount, - fun couch_db:enum_docs_reduce_to_count/1), - AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) -> - case couch_doc:to_doc_info(FullDocInfo) of - #doc_info{deleted=false, rev=Rev} -> - FoldlFun({{Id, Id}, {[{rev, Rev}]}}, Offset, Acc); - #doc_info{deleted=true} -> - {ok, Acc} - end - end, - {ok, FoldResult} = couch_db:enum_docs(Db, StartId, Dir, AdapterFun, - {Count, SkipCount, undefined, []}), - couch_httpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}); +db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> + {Props} = couch_httpd:json_body(Req), + Keys = proplists:get_value(<<"keys">>, Props, nil), + all_docs_view(Req, Db, Keys); db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) -> - send_method_not_allowed(Req, "GET,HEAD"); + send_method_not_allowed(Req, "GET,HEAD,POST"); db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) -> #view_query_args{ @@ -261,6 +240,72 @@ db_req(#httpd{path_parts=[_, DocId]}=Req, Db) -> db_req(#httpd{path_parts=[_, DocId, FileName]}=Req, Db) -> db_attachment_req(Req, Db, DocId, FileName). +all_docs_view(Req, Db, Keys) -> + #view_query_args{ + start_key = StartKey, + start_docid = StartDocId, + count = Count, + skip = SkipCount, + direction = Dir + } = QueryArgs = couch_httpd_view:parse_view_query(Req, Keys), + {ok, Info} = couch_db:get_db_info(Db), + TotalRowCount = proplists:get_value(doc_count, Info), + StartId = if is_binary(StartKey) -> StartKey; + true -> StartDocId + end, + FoldAccInit = {Count, SkipCount, undefined, []}, + + case Keys of + nil -> + FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, + TotalRowCount, fun couch_db:enum_docs_reduce_to_count/1), + AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) -> + case couch_doc:to_doc_info(FullDocInfo) of + #doc_info{deleted=false, rev=Rev} -> + FoldlFun({{Id, Id}, {[{rev, Rev}]}}, Offset, Acc); + #doc_info{deleted=true} -> + {ok, Acc} + end + end, + {ok, FoldResult} = couch_db:enum_docs(Db, StartId, Dir, + AdapterFun, FoldAccInit), + couch_httpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}); + _ -> + FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, + TotalRowCount, fun(Offset) -> Offset end), + KeyFoldFun = case Dir of + fwd -> + fun lists:foldl/3; + rev -> + fun lists:foldr/3 + end, + {ok, FoldResult} = KeyFoldFun( + fun(Key, {ok, FoldAcc}) -> + DocInfo = (catch couch_db:get_doc_info(Db, Key)), + Doc = case DocInfo of + {ok, #doc_info{id=Id, rev=Rev, deleted=false}} = DocInfo -> + {{Id, Id}, {[{rev, Rev}]}}; + {ok, #doc_info{id=Id, rev=Rev, deleted=true}} = DocInfo -> + {{Id, Id}, {[{rev, Rev}, {deleted, true}]}}; + not_found -> + {{Key, error}, not_found}; + _ -> + ?LOG_ERROR("Invalid DocInfo: ~p", [DocInfo]), + throw({error, invalid_doc_info}) + end, + Acc = (catch FoldlFun(Doc, 0, FoldAcc)), + case Acc of + {stop, Acc2} -> + {ok, Acc2}; + _ -> + Acc + end + end, {ok, FoldAccInit}, Keys), + couch_httpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}) + end. + + + db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) -> diff --git a/src/couchdb/couch_httpd_view.erl b/src/couchdb/couch_httpd_view.erl index c513e750..de6a6af7 100644 --- a/src/couchdb/couch_httpd_view.erl +++ b/src/couchdb/couch_httpd_view.erl @@ -15,40 +15,47 @@ -export([handle_view_req/2,handle_temp_view_req/2]). --export([parse_view_query/1,make_view_fold_fun/4,finish_view_fold/3]). +-export([parse_view_query/1,parse_view_query/2,make_view_fold_fun/4,finish_view_fold/3]). -import(couch_httpd, [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, start_json_response/2,send_chunk/2,end_json_response/1]). - -handle_view_req(#httpd{method='GET',path_parts=[_,_, Id, ViewName]}=Req, Db) -> - #view_query_args{ - reduce = Reduce - } = QueryArgs = parse_view_query(Req), - +design_doc_view(Req, Db, Id, ViewName, Keys) -> case couch_view:get_map_view({couch_db:name(Db), <<"_design/", Id/binary>>, ViewName}) of {ok, View} -> - output_map_view(Req, View, QueryArgs); + QueryArgs = parse_view_query(Req, Keys), + output_map_view(Req, View, QueryArgs, Keys); {not_found, Reason} -> case couch_view:get_reduce_view({couch_db:name(Db), <<"_design/", Id/binary>>, ViewName}) of {ok, View} -> + #view_query_args{ + reduce = Reduce + } = QueryArgs = parse_view_query(Req, Keys, true), case Reduce of false -> {reduce, _N, _Lang, MapView} = View, - output_map_view(Req, MapView, QueryArgs); + output_map_view(Req, MapView, QueryArgs, Keys); _ -> - output_reduce_view(Req, View) + output_reduce_view(Req, View, QueryArgs, Keys) end; _ -> throw({not_found, Reason}) end - end; + end. + +handle_view_req(#httpd{method='GET',path_parts=[_,_, Id, ViewName]}=Req, Db) -> + design_doc_view(Req, Db, Id, ViewName, nil); + +handle_view_req(#httpd{method='POST',path_parts=[_,_, Id, ViewName]}=Req, Db) -> + {Props} = couch_httpd:json_body(Req), + Keys = proplists:get_value(<<"keys">>, Props, nil), + design_doc_view(Req, Db, Id, ViewName, Keys); handle_view_req(Req, _Db) -> - send_method_not_allowed(Req, "GET,HEAD"). + send_method_not_allowed(Req, "GET,POST,HEAD"). handle_temp_view_req(#httpd{method='POST'}=Req, Db) -> QueryArgs = parse_view_query(Req), @@ -61,20 +68,21 @@ handle_temp_view_req(#httpd{method='POST'}=Req, Db) -> {Props} = couch_httpd:json_body(Req), Language = proplists:get_value(<<"language">>, Props, <<"javascript">>), MapSrc = proplists:get_value(<<"map">>, Props), + Keys = proplists:get_value(<<"keys">>, Props, nil), case proplists:get_value(<<"reduce">>, Props, null) of null -> {ok, View} = couch_view:get_map_view({temp, couch_db:name(Db), Language, MapSrc}), - output_map_view(Req, View, QueryArgs); + output_map_view(Req, View, QueryArgs, Keys); RedSrc -> {ok, View} = couch_view:get_reduce_view( {temp, couch_db:name(Db), Language, MapSrc, RedSrc}), - output_reduce_view(Req, View) + output_reduce_view(Req, View, QueryArgs, Keys) end; handle_temp_view_req(Req, _Db) -> send_method_not_allowed(Req, "POST"). -output_map_view(Req, View, QueryArgs) -> +output_map_view(Req, View, QueryArgs, nil) -> #view_query_args{ count = Count, direction = Dir, @@ -88,9 +96,30 @@ output_map_view(Req, View, QueryArgs) -> fun couch_view:reduce_to_count/1), FoldAccInit = {Count, SkipCount, undefined, []}, FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit), + finish_view_fold(Req, RowCount, FoldResult); + +output_map_view(Req, View, QueryArgs, Keys) -> + #view_query_args{ + count = Count, + direction = Dir, + skip = SkipCount, + start_docid = StartDocId + } = QueryArgs, + {ok, RowCount} = couch_view:get_row_count(View), + FoldAccInit = {Count, SkipCount, undefined, []}, + FoldResult = lists:foldl( + fun(Key, {ok, FoldAcc}) -> + Start = {Key, StartDocId}, + FoldlFun = make_view_fold_fun(Req, + QueryArgs#view_query_args{ + start_key = Key, + end_key = Key + }, RowCount, fun couch_view:reduce_to_count/1), + couch_view:fold(View, Start, Dir, FoldlFun, FoldAcc) + end, {ok, FoldAccInit}, Keys), finish_view_fold(Req, RowCount, FoldResult). -output_reduce_view(Req, View) -> +output_reduce_view(Req, View, QueryArgs, nil) -> #view_query_args{ start_key = StartKey, end_key = EndKey, @@ -100,7 +129,39 @@ output_reduce_view(Req, View) -> start_docid = StartDocId, end_docid = EndDocId, group_level = GroupLevel - } = parse_view_query(Req), + } = QueryArgs, + {ok, Resp} = start_json_response(Req, 200), + {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Resp, GroupLevel), + send_chunk(Resp, "{\"rows\":["), + {ok, _} = couch_view:fold_reduce(View, Dir, {StartKey, StartDocId}, + {EndKey, EndDocId}, GroupRowsFun, RespFun, {"", Skip, Count}), + send_chunk(Resp, "]}"), + end_json_response(Resp); + +output_reduce_view(Req, View, QueryArgs, Keys) -> + #view_query_args{ + count = Count, + skip = Skip, + direction = Dir, + start_docid = StartDocId, + end_docid = EndDocId, + group_level = GroupLevel + } = QueryArgs, + {ok, Resp} = start_json_response(Req, 200), + {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Resp, GroupLevel), + send_chunk(Resp, "{\"rows\":["), + lists:foldl( + fun(Key, AccSeparator) -> + {ok, _} = couch_view:fold_reduce(View, Dir, {Key, StartDocId}, + {Key, EndDocId}, GroupRowsFun, RespFun, + {AccSeparator, Skip, Count}), + "," % Switch to comma + end, + "", Keys), % Start with no comma + send_chunk(Resp, "]}"), + end_json_response(Resp). + +make_reduce_fold_funs(Resp, GroupLevel) -> GroupRowsFun = fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 -> true; @@ -110,55 +171,76 @@ output_reduce_view(Req, View) -> ({Key1,_}, {Key2,_}) -> Key1 == Key2 end, - {ok, Resp} = start_json_response(Req, 200), - send_chunk(Resp, "{\"rows\":["), - {ok, _} = couch_view:fold_reduce(View, Dir, {StartKey, StartDocId}, {EndKey, EndDocId}, - GroupRowsFun, - fun(_Key, _Red, {AccSeparator,AccSkip,AccCount}) when AccSkip > 0 -> - {ok, {AccSeparator,AccSkip-1,AccCount}}; - (_Key, _Red, {AccSeparator,0,AccCount}) when AccCount == 0 -> - {stop, {AccSeparator,0,AccCount}}; - (_Key, Red, {AccSeparator,0,AccCount}) when GroupLevel == 0 -> - Json = ?JSON_ENCODE({[{key, null}, {value, Red}]}), - send_chunk(Resp, AccSeparator ++ Json), - {ok, {",",0,AccCount-1}}; - (Key, Red, {AccSeparator,0,AccCount}) - when is_integer(GroupLevel) - andalso is_list(Key) -> - Json = ?JSON_ENCODE( - {[{key, lists:sublist(Key, GroupLevel)},{value, Red}]}), - send_chunk(Resp, AccSeparator ++ Json), - {ok, {",",0,AccCount-1}}; - (Key, Red, {AccSeparator,0,AccCount}) -> - Json = ?JSON_ENCODE({[{key, Key}, {value, Red}]}), - send_chunk(Resp, AccSeparator ++ Json), - {ok, {",",0,AccCount-1}} - end, {"", Skip, Count}), - send_chunk(Resp, "]}"), - end_json_response(Resp). + RespFun = fun(_Key, _Red, {AccSeparator,AccSkip,AccCount}) when AccSkip > 0 -> + {ok, {AccSeparator,AccSkip-1,AccCount}}; + (_Key, _Red, {AccSeparator,0,AccCount}) when AccCount == 0 -> + {stop, {AccSeparator,0,AccCount}}; + (_Key, Red, {AccSeparator,0,AccCount}) when GroupLevel == 0 -> + Json = ?JSON_ENCODE({[{key, null}, {value, Red}]}), + send_chunk(Resp, AccSeparator ++ Json), + {ok, {",",0,AccCount-1}}; + (Key, Red, {AccSeparator,0,AccCount}) + when is_integer(GroupLevel) + andalso is_list(Key) -> + Json = ?JSON_ENCODE( + {[{key, lists:sublist(Key, GroupLevel)},{value, Red}]}), + send_chunk(Resp, AccSeparator ++ Json), + {ok, {",",0,AccCount-1}}; + (Key, Red, {AccSeparator,0,AccCount}) -> + Json = ?JSON_ENCODE({[{key, Key}, {value, Red}]}), + send_chunk(Resp, AccSeparator ++ Json), + {ok, {",",0,AccCount-1}} + end, + {ok, GroupRowsFun, RespFun}. + + reverse_key_default(nil) -> {}; reverse_key_default({}) -> nil; reverse_key_default(Key) -> Key. parse_view_query(Req) -> + parse_view_query(Req, nil, nil). +parse_view_query(Req, Keys) -> + parse_view_query(Req, Keys, nil). +parse_view_query(Req, Keys, IsReduce) -> QueryList = couch_httpd:qs(Req), - lists:foldl(fun({Key,Value}, Args) -> + #view_query_args{ + group_level = GroupLevel + } = QueryArgs = lists:foldl(fun({Key,Value}, Args) -> case {Key, Value} of {"", _} -> Args; {"key", Value} -> - JsonKey = ?JSON_DECODE(Value), - Args#view_query_args{start_key=JsonKey,end_key=JsonKey}; + case Keys of + nil -> + JsonKey = ?JSON_DECODE(Value), + Args#view_query_args{start_key=JsonKey,end_key=JsonKey}; + _ -> + Msg = io_lib:format("Query parameter \"~s\" not compatible with multi key mode.", [Key]), + throw({query_parse_error, Msg}) + end; {"startkey_docid", DocId} -> Args#view_query_args{start_docid=list_to_binary(DocId)}; {"endkey_docid", DocId} -> Args#view_query_args{end_docid=list_to_binary(DocId)}; {"startkey", Value} -> - Args#view_query_args{start_key=?JSON_DECODE(Value)}; + case Keys of + nil -> + Args#view_query_args{start_key=?JSON_DECODE(Value)}; + _ -> + Msg = io_lib:format("Query parameter \"~s\" not compatible with multi key mode.", [Key]), + throw({query_parse_error, Msg}) + end; {"endkey", Value} -> - Args#view_query_args{end_key=?JSON_DECODE(Value)}; + case Keys of + nil -> + Args#view_query_args{end_key=?JSON_DECODE(Value)}; + _ -> + Msg = io_lib:format("Query parameter \"~s\" not compatible with multi key mode.", [Key]), + throw({query_parse_error, Msg}) + end; {"count", Value} -> case (catch list_to_integer(Value)) of Count when is_integer(Count) -> @@ -201,10 +283,24 @@ parse_view_query(Req) -> "Bad URL query value, number expected: skip=~s", [Value])), throw({query_parse_error, Msg}) end; - {"group", "true"} -> - Args#view_query_args{group_level=exact}; + {"group", Value} -> + case Value of + "true" -> + Args#view_query_args{group_level=exact}; + "false" -> + Args#view_query_args{group_level=0}; + _ -> + Msg = "Bad URL query value for 'group' expected \"true\" or \"false\".", + throw({query_parse_error, Msg}) + end; {"group_level", LevelStr} -> - Args#view_query_args{group_level=list_to_integer(LevelStr)}; + case Keys of + nil -> + Args#view_query_args{group_level=list_to_integer(LevelStr)}; + _ -> + Msg = lists:flatten(io_lib:format("Multi-key fetches for a reduce view must include group=true", [])), + throw({query_parse_error, Msg}) + end; {"reduce", "true"} -> Args#view_query_args{reduce=true}; {"reduce", "false"} -> @@ -214,7 +310,25 @@ parse_view_query(Req) -> "Bad URL query key:~s", [Key])), throw({query_parse_error, Msg}) end - end, #view_query_args{}, QueryList). + end, #view_query_args{}, QueryList), + case Keys of + nil -> + QueryArgs; + _ -> + case IsReduce of + nil -> + QueryArgs; + _ -> + case GroupLevel of + exact -> + QueryArgs; + _ -> + Msg = lists:flatten(io_lib:format( + "Multi-key fetches for a reduce view must include group=true", [])), + throw({query_parse_error, Msg}) + end + end + end. make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) -> @@ -253,12 +367,21 @@ make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) -> {ok, Resp2} = start_json_response(Req, 200), JsonBegin = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n", [TotalViewCount, Offset]), - JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]}, - + JsonObj = case DocId of + error -> + {[{key, Key}, {error, Value}]}; + _ -> + {[{id, DocId}, {key, Key}, {value, Value}]} + end, send_chunk(Resp2, JsonBegin ++ ?JSON_ENCODE(JsonObj)), {ok, {AccCount - 1, 0, Resp2, AccRevRows}}; {_, AccCount, _, Resp} when (AccCount > 0) -> - JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]}, + JsonObj = case DocId of + error -> + {[{key, Key}, {error, Value}]}; + _ -> + {[{id, DocId}, {key, Key}, {value, Value}]} + end, send_chunk(Resp, ",\r\n" ++ ?JSON_ENCODE(JsonObj)), {ok, {AccCount - 1, 0, Resp, AccRevRows}} end |