summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohn Christopher Anderson <jchris@apache.org>2009-01-04 02:28:08 +0000
committerJohn Christopher Anderson <jchris@apache.org>2009-01-04 02:28:08 +0000
commit14d8a23c9b5bd69099b4bd2c3ca6c3eb0441a0b3 (patch)
tree42e4446fecdcaf379119c4d71d6156b9167ddf65
parente27fb8eaa628128f0ec8f1797805e92ec39ec6cb (diff)
change count to limit in view query params
git-svn-id: https://svn.apache.org/repos/asf/couchdb/trunk@731159 13f79535-47bb-0310-9956-ffa450edef68
-rw-r--r--share/www/script/browse.js20
-rw-r--r--share/www/script/couch_tests.js34
-rw-r--r--src/couchdb/couch_db.hrl4
-rw-r--r--src/couchdb/couch_httpd_db.erl8
-rw-r--r--src/couchdb/couch_httpd_view.erl68
-rw-r--r--src/couchdb/couch_rep.erl2
6 files changed, 68 insertions, 68 deletions
diff --git a/share/www/script/browse.js b/share/www/script/browse.js
index b91520bb..39483c1a 100644
--- a/share/www/script/browse.js
+++ b/share/www/script/browse.js
@@ -276,7 +276,7 @@ function CouchDatabasePage() {
load: function(elem) {
$("#input_docid", elem).val(designDocId).suggest(function(text, callback) {
db.allDocs({
- count: 10, startkey: "_design/" + text,
+ limit: 10, startkey: "_design/" + text,
endkey: "_design/" + text + "ZZZZ",
success: function(docs) {
var matches = [];
@@ -379,8 +379,8 @@ function CouchDatabasePage() {
this.updateDocumentListing = function(options) {
$(document.body).addClass("loading");
if (options === undefined) options = {};
- if (options.count === undefined) {
- options.count = parseInt($("#perpage").val(), 10);
+ if (options.limit === undefined) {
+ options.limit = parseInt($("#perpage").val(), 10);
}
if (options.group === undefined) {
options.group = true;
@@ -406,16 +406,16 @@ function CouchDatabasePage() {
resp.rows = resp.rows.reverse();
}
if (resp.rows !== null && (decending_reverse ?
- (resp.total_rows - resp.offset > options.count) :
+ (resp.total_rows - resp.offset > options.limit) :
(resp.offset > 0))) {
- $("#paging a.prev").attr("href", "#" + (resp.offset - options.count)).click(function() {
+ $("#paging a.prev").attr("href", "#" + (resp.offset - options.limit)).click(function() {
var firstDoc = resp.rows[0];
page.updateDocumentListing({
startkey: firstDoc.key !== undefined ? firstDoc.key : null,
startkey_docid: firstDoc.id,
skip: 1,
descending: !descend,
- count: options.count
+ limit: options.limit
});
return false;
});
@@ -423,16 +423,16 @@ function CouchDatabasePage() {
$("#paging a.prev").removeAttr("href");
}
if (resp.rows !== null && (decending_reverse ?
- (resp.offset - resp.total_rows < options.count) :
- (resp.total_rows - resp.offset > options.count))) {
- $("#paging a.next").attr("href", "#" + (resp.offset + options.count)).click(function() {
+ (resp.offset - resp.total_rows < options.limit) :
+ (resp.total_rows - resp.offset > options.limit))) {
+ $("#paging a.next").attr("href", "#" + (resp.offset + options.limit)).click(function() {
var lastDoc = resp.rows[resp.rows.length - 1];
page.updateDocumentListing({
startkey: lastDoc.key !== undefined ? lastDoc.key : null,
startkey_docid: lastDoc.id,
skip: 1,
descending: descend,
- count: options.count
+ limit: options.limit
});
return false;
});
diff --git a/share/www/script/couch_tests.js b/share/www/script/couch_tests.js
index c0f909ad..16fa51e7 100644
--- a/share/www/script/couch_tests.js
+++ b/share/www/script/couch_tests.js
@@ -492,7 +492,7 @@ var tests = {
result = db.query(map, reduce, {startkey: 4, endkey: 6});
T(result.rows[0].value == 30);
- result = db.query(map, reduce, {group:true, count:3});
+ result = db.query(map, reduce, {group:true, limit:3});
T(result.rows[0].value == 2);
T(result.rows[1].value == 4);
T(result.rows[2].value == 6);
@@ -536,8 +536,8 @@ var tests = {
T(equals(results.rows[2], {key:["a", "b", "c"],value:10*i}));
T(equals(results.rows[3], {key:["a", "b", "d"],value:10*i}));
- // test to make sure group reduce and count params provide valid json
- var results = db.query(map, reduce, {group: true, count: 2});
+ // test to make sure group reduce and limit params provide valid json
+ var results = db.query(map, reduce, {group: true, limit: 2});
T(equals(results.rows[0], {key: ["a"], value: 20*i}));
T(equals(results.rows.length, 2));
@@ -1394,7 +1394,7 @@ var tests = {
}
T(db.save(designDoc).ok);
- var resp = db.view('test/all_docs', {include_docs: true, count: 2});
+ var resp = db.view('test/all_docs', {include_docs: true, limit: 2});
T(resp.rows.length == 2);
T(resp.rows[0].id == "0");
T(resp.rows[0].doc._id == "0");
@@ -1406,7 +1406,7 @@ var tests = {
T(resp.rows[0].doc._id == "29");
T(resp.rows[1].doc.integer == 74);
- resp = db.allDocs({count: 2, skip: 1, include_docs: true});
+ resp = db.allDocs({limit: 2, skip: 1, include_docs: true});
T(resp.rows.length == 2);
T(resp.rows[0].doc.integer == 1);
T(resp.rows[1].doc.integer == 10);
@@ -1420,7 +1420,7 @@ var tests = {
T(resp.rows[0].doc.integer == 1);
T(!resp.rows[1].doc);
- resp = db.allDocs({include_docs: true, count: 0});
+ resp = db.allDocs({include_docs: true, limit: 0});
T(resp.rows.length == 0);
// No reduce support
@@ -1482,7 +1482,7 @@ var tests = {
for(var i=0; i<rows.length; i++)
T(rows[i].id == keys[i]);
- rows = db.allDocs({count: 1}, keys).rows;
+ rows = db.allDocs({limit: 1}, keys).rows;
T(rows.length == 1);
T(rows[0].id == keys[0]);
@@ -1496,7 +1496,7 @@ var tests = {
for(var i=0; i<rows.length; i++)
T(rows[i].id == keys[keys.length-i-1]);
- rows = db.allDocs({descending: "true", skip: 3, count:1}, keys).rows;
+ rows = db.allDocs({descending: "true", skip: 3, limit:1}, keys).rows;
T(rows.length == 1);
T(rows[0].id == keys[1]);
@@ -1596,8 +1596,8 @@ var tests = {
T(curr[i].value == exp_val[i]);
}
- // Check count works
- curr = db.view("test/all_docs", {count: 1}, keys).rows;
+ // Check limit works
+ curr = db.view("test/all_docs", {limit: 1}, keys).rows;
T(curr.length == 1);
T(curr[0].key == 10);
@@ -1613,25 +1613,25 @@ var tests = {
T(curr[99].value == 0);
// Check a couple combinations
- curr = db.view("test/multi_emit", {descending: "true", skip: 3, count: 2}, [2]).rows;
+ curr = db.view("test/multi_emit", {descending: "true", skip: 3, limit: 2}, [2]).rows;
T(curr.length, 2);
T(curr[0].value == 96);
T(curr[1].value == 95);
- curr = db.view("test/multi_emit", {skip: 2, count: 3, startkey_docid: "13"}, [0]).rows;
+ curr = db.view("test/multi_emit", {skip: 2, limit: 3, startkey_docid: "13"}, [0]).rows;
T(curr.length == 3);
T(curr[0].value == 15);
T(curr[1].value == 16);
T(curr[2].value == 17);
curr = db.view("test/multi_emit",
- {skip: 1, count: 5, startkey_docid: "25", endkey_docid: "27"}, [1]).rows;
+ {skip: 1, limit: 5, startkey_docid: "25", endkey_docid: "27"}, [1]).rows;
T(curr.length == 2);
T(curr[0].value == 26);
T(curr[1].value == 27);
curr = db.view("test/multi_emit",
- {skip: 1, count: 5, startkey_docid: "28", endkey_docid: "26", descending: "true"}, [1]).rows;
+ {skip: 1, limit: 5, startkey_docid: "28", endkey_docid: "26", descending: "true"}, [1]).rows;
T(curr.length == 2);
T(curr[0].value == 27);
T(curr[1].value == 26);
@@ -1680,7 +1680,7 @@ var tests = {
var queryResults = db.query(queryFun, null, {
startkey: i,
startkey_docid: i,
- count: 10
+ limit: 10
});
T(queryResults.rows.length == 10)
T(queryResults.total_rows == docs.length)
@@ -1697,7 +1697,7 @@ var tests = {
startkey: i,
startkey_docid: i,
descending: true,
- count: 10
+ limit: 10
});
T(queryResults.rows.length == 10)
T(queryResults.total_rows == docs.length)
@@ -1714,7 +1714,7 @@ var tests = {
startkey: i,
startkey_docid: i,
descending: false,
- count: 10
+ limit: 10
});
T(queryResults.rows.length == 10)
T(queryResults.total_rows == docs.length)
diff --git a/src/couchdb/couch_db.hrl b/src/couchdb/couch_db.hrl
index eb7bd9a1..22fad2ef 100644
--- a/src/couchdb/couch_db.hrl
+++ b/src/couchdb/couch_db.hrl
@@ -139,8 +139,8 @@
-record(view_query_args, {
start_key = nil,
end_key = {},
- count = 10000000000, % a huge huge default number. Picked so we don't have
- % to do different logic for when there is no count
+ limit = 10000000000, % a huge huge default number. Picked so we don't have
+ % to do different logic for when there is no limit
% limit
update = true,
direction = fwd,
diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl
index eb41801f..84a0a812 100644
--- a/src/couchdb/couch_httpd_db.erl
+++ b/src/couchdb/couch_httpd_db.erl
@@ -173,7 +173,7 @@ db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) ->
db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) ->
#view_query_args{
start_key = StartKey,
- count = Count,
+ limit = Limit,
skip = SkipCount,
direction = Dir
} = QueryArgs = couch_httpd_view:parse_view_query(Req),
@@ -215,7 +215,7 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) ->
end
},
FoldlFun({{UpdateSeq, Id}, Json}, Offset, Acc)
- end, {Count, SkipCount, undefined, []}),
+ end, {Limit, SkipCount, undefined, []}),
couch_httpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult});
db_req(#httpd{path_parts=[_,<<"_all_docs_by_seq">>]}=Req, _Db) ->
@@ -259,7 +259,7 @@ all_docs_view(Req, Db, Keys) ->
#view_query_args{
start_key = StartKey,
start_docid = StartDocId,
- count = Count,
+ limit = Limit,
skip = SkipCount,
direction = Dir
} = QueryArgs = couch_httpd_view:parse_view_query(Req, Keys),
@@ -268,7 +268,7 @@ all_docs_view(Req, Db, Keys) ->
StartId = if is_binary(StartKey) -> StartKey;
true -> StartDocId
end,
- FoldAccInit = {Count, SkipCount, undefined, []},
+ FoldAccInit = {Limit, SkipCount, undefined, []},
case Keys of
nil ->
diff --git a/src/couchdb/couch_httpd_view.erl b/src/couchdb/couch_httpd_view.erl
index 3a25bd42..80e06908 100644
--- a/src/couchdb/couch_httpd_view.erl
+++ b/src/couchdb/couch_httpd_view.erl
@@ -84,7 +84,7 @@ handle_slow_view_req(Req, _Db) ->
output_map_view(Req, View, Db, QueryArgs, nil) ->
#view_query_args{
- count = Count,
+ limit = Limit,
direction = Dir,
skip = SkipCount,
start_key = StartKey,
@@ -94,19 +94,19 @@ output_map_view(Req, View, Db, QueryArgs, nil) ->
Start = {StartKey, StartDocId},
FoldlFun = make_view_fold_fun(Req, QueryArgs, Db, RowCount,
fun couch_view:reduce_to_count/1),
- FoldAccInit = {Count, SkipCount, undefined, []},
+ FoldAccInit = {Limit, SkipCount, undefined, []},
FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit),
finish_view_fold(Req, RowCount, FoldResult);
output_map_view(Req, View, Db, QueryArgs, Keys) ->
#view_query_args{
- count = Count,
+ limit = Limit,
direction = Dir,
skip = SkipCount,
start_docid = StartDocId
} = QueryArgs,
{ok, RowCount} = couch_view:get_row_count(View),
- FoldAccInit = {Count, SkipCount, undefined, []},
+ FoldAccInit = {Limit, SkipCount, undefined, []},
FoldResult = lists:foldl(
fun(Key, {ok, FoldAcc}) ->
Start = {Key, StartDocId},
@@ -123,7 +123,7 @@ output_reduce_view(Req, View, QueryArgs, nil) ->
#view_query_args{
start_key = StartKey,
end_key = EndKey,
- count = Count,
+ limit = Limit,
skip = Skip,
direction = Dir,
start_docid = StartDocId,
@@ -134,13 +134,13 @@ output_reduce_view(Req, View, QueryArgs, nil) ->
{ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Resp, GroupLevel),
send_chunk(Resp, "{\"rows\":["),
{ok, _} = couch_view:fold_reduce(View, Dir, {StartKey, StartDocId},
- {EndKey, EndDocId}, GroupRowsFun, RespFun, {"", Skip, Count}),
+ {EndKey, EndDocId}, GroupRowsFun, RespFun, {"", Skip, Limit}),
send_chunk(Resp, "]}"),
end_json_response(Resp);
output_reduce_view(Req, View, QueryArgs, Keys) ->
#view_query_args{
- count = Count,
+ limit = Limit,
skip = Skip,
direction = Dir,
start_docid = StartDocId,
@@ -154,7 +154,7 @@ output_reduce_view(Req, View, QueryArgs, Keys) ->
fun(Key, AccSeparator) ->
{ok, {NewAcc, _, _}} = couch_view:fold_reduce(View, Dir, {Key, StartDocId},
{Key, EndDocId}, GroupRowsFun, RespFun,
- {AccSeparator, Skip, Count}),
+ {AccSeparator, Skip, Limit}),
NewAcc % Switch to comma
end,
"", Keys), % Start with no comma
@@ -171,25 +171,25 @@ make_reduce_fold_funs(Resp, GroupLevel) ->
({Key1,_}, {Key2,_}) ->
Key1 == Key2
end,
- RespFun = fun(_Key, _Red, {AccSeparator,AccSkip,AccCount}) when AccSkip > 0 ->
- {ok, {AccSeparator,AccSkip-1,AccCount}};
- (_Key, _Red, {AccSeparator,0,AccCount}) when AccCount == 0 ->
- {stop, {AccSeparator,0,AccCount}};
- (_Key, Red, {AccSeparator,0,AccCount}) when GroupLevel == 0 ->
+ RespFun = fun(_Key, _Red, {AccSeparator,AccSkip,AccLimit}) when AccSkip > 0 ->
+ {ok, {AccSeparator,AccSkip-1,AccLimit}};
+ (_Key, _Red, {AccSeparator,0,AccLimit}) when AccLimit == 0 ->
+ {stop, {AccSeparator,0,AccLimit}};
+ (_Key, Red, {AccSeparator,0,AccLimit}) when GroupLevel == 0 ->
Json = ?JSON_ENCODE({[{key, null}, {value, Red}]}),
send_chunk(Resp, AccSeparator ++ Json),
- {ok, {",",0,AccCount-1}};
- (Key, Red, {AccSeparator,0,AccCount})
+ {ok, {",",0,AccLimit-1}};
+ (Key, Red, {AccSeparator,0,AccLimit})
when is_integer(GroupLevel)
andalso is_list(Key) ->
Json = ?JSON_ENCODE(
{[{key, lists:sublist(Key, GroupLevel)},{value, Red}]}),
send_chunk(Resp, AccSeparator ++ Json),
- {ok, {",",0,AccCount-1}};
- (Key, Red, {AccSeparator,0,AccCount}) ->
+ {ok, {",",0,AccLimit-1}};
+ (Key, Red, {AccSeparator,0,AccLimit}) ->
Json = ?JSON_ENCODE({[{key, Key}, {value, Red}]}),
send_chunk(Resp, AccSeparator ++ Json),
- {ok, {",",0,AccCount-1}}
+ {ok, {",",0,AccLimit-1}}
end,
{ok, GroupRowsFun, RespFun}.
@@ -241,17 +241,17 @@ parse_view_query(Req, Keys, IsReduce) ->
Msg = io_lib:format("Query parameter \"~s\" not compatible with multi key mode.", [Key]),
throw({query_parse_error, Msg})
end;
- {"count", Value} ->
+ {"limit", Value} ->
case (catch list_to_integer(Value)) of
- Count when is_integer(Count) ->
- if Count < 0 ->
- Msg = io_lib:format("Count must be a positive integer: count=~s", [Value]),
+ Limit when is_integer(Limit) ->
+ if Limit < 0 ->
+ Msg = io_lib:format("Limit must be a positive integer: limit=~s", [Value]),
throw({query_parse_error, Msg});
true ->
- Args#view_query_args{count=Count}
+ Args#view_query_args{limit=Limit}
end;
_Error ->
- Msg = io_lib:format("Bad URL query value, number expected: count=~s", [Value]),
+ Msg = io_lib:format("Bad URL query value, number expected: limit=~s", [Value]),
throw({query_parse_error, Msg})
end;
{"update", "false"} ->
@@ -276,8 +276,8 @@ parse_view_query(Req, Keys, IsReduce) ->
Args;
{"skip", Value} ->
case (catch list_to_integer(Value)) of
- Count when is_integer(Count) ->
- Args#view_query_args{skip=Count};
+ Limit when is_integer(Limit) ->
+ Args#view_query_args{skip=Limit};
_Error ->
Msg = lists:flatten(io_lib:format(
"Bad URL query value, number expected: skip=~s", [Value])),
@@ -381,17 +381,17 @@ make_view_fold_fun(Req, QueryArgs, Db, TotalViewCount, ReduceCountFun) ->
end,
fun({{Key, DocId}, Value}, OffsetReds,
- {AccCount, AccSkip, Resp, AccRevRows}) ->
+ {AccLimit, AccSkip, Resp, AccRevRows}) ->
PassedEnd = PassedEndFun(Key, DocId),
- case {PassedEnd, AccCount, AccSkip, Resp} of
+ case {PassedEnd, AccLimit, AccSkip, Resp} of
{true, _, _, _} ->
% The stop key has been passed, stop looping.
- {stop, {AccCount, AccSkip, Resp, AccRevRows}};
+ {stop, {AccLimit, AccSkip, Resp, AccRevRows}};
{_, 0, _, _} ->
- % we've done "count" rows, stop foldling
+ % we've done "limit" rows, stop foldling
{stop, {0, 0, Resp, AccRevRows}};
{_, _, AccSkip, _} when AccSkip > 0 ->
- {ok, {AccCount, AccSkip - 1, Resp, AccRevRows}};
+ {ok, {AccLimit, AccSkip - 1, Resp, AccRevRows}};
{_, _, _, undefined} ->
{ok, Resp2} = start_json_response(Req, 200),
Offset = ReduceCountFun(OffsetReds),
@@ -399,11 +399,11 @@ make_view_fold_fun(Req, QueryArgs, Db, TotalViewCount, ReduceCountFun) ->
[TotalViewCount, Offset]),
JsonObj = view_row_obj(Db, {{Key, DocId}, Value}, IncludeDocs),
send_chunk(Resp2, JsonBegin ++ ?JSON_ENCODE(JsonObj)),
- {ok, {AccCount - 1, 0, Resp2, AccRevRows}};
- {_, AccCount, _, Resp} when (AccCount > 0) ->
+ {ok, {AccLimit - 1, 0, Resp2, AccRevRows}};
+ {_, AccLimit, _, Resp} when (AccLimit > 0) ->
JsonObj = view_row_obj(Db, {{Key, DocId}, Value}, IncludeDocs),
send_chunk(Resp, ",\r\n" ++ ?JSON_ENCODE(JsonObj)),
- {ok, {AccCount - 1, 0, Resp, AccRevRows}}
+ {ok, {AccLimit - 1, 0, Resp, AccRevRows}}
end
end.
diff --git a/src/couchdb/couch_rep.erl b/src/couchdb/couch_rep.erl
index 99cca65d..881525f0 100644
--- a/src/couchdb/couch_rep.erl
+++ b/src/couchdb/couch_rep.erl
@@ -277,7 +277,7 @@ close_db(Db)->
couch_db:close(Db).
get_doc_info_list(#http_db{uri=DbUrl, headers=Headers}, StartSeq) ->
- Url = DbUrl ++ "_all_docs_by_seq?count=100&startkey="
+ Url = DbUrl ++ "_all_docs_by_seq?limit=100&startkey="
++ integer_to_list(StartSeq),
{Results} = do_http_request(Url, get, Headers),
lists:map(fun({RowInfoList}) ->