summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristopher Lenz <cmlenz@apache.org>2008-08-31 09:43:41 +0000
committerChristopher Lenz <cmlenz@apache.org>2008-08-31 09:43:41 +0000
commit15a175144d83d6177e9bbb923a7f7157e5ea8917 (patch)
tree92b7becc9610c46f87ddf7ab4c313642b007c4aa
parentac4075a7987dc43aadeb18a94e07f090d1b77546 (diff)
Merged json_term_changes branch back into trunk.
git-svn-id: https://svn.apache.org/repos/asf/incubator/couchdb/trunk@690668 13f79535-47bb-0310-9956-ffa450edef68
-rw-r--r--share/www/script/couch_tests.js40
-rw-r--r--src/couchdb/Makefile.am3
-rw-r--r--src/couchdb/cjson.erl567
-rw-r--r--src/couchdb/couch.app.tpl.in1
-rw-r--r--src/couchdb/couch_db.erl12
-rw-r--r--src/couchdb/couch_db.hrl15
-rw-r--r--src/couchdb/couch_db_update_notifier.erl6
-rw-r--r--src/couchdb/couch_db_updater.erl6
-rw-r--r--src/couchdb/couch_doc.erl93
-rw-r--r--src/couchdb/couch_erl_driver.c4
-rw-r--r--src/couchdb/couch_httpd.erl416
-rw-r--r--src/couchdb/couch_query_servers.erl97
-rw-r--r--src/couchdb/couch_rep.erl125
-rw-r--r--src/couchdb/couch_server.erl17
-rw-r--r--src/couchdb/couch_util.erl76
-rw-r--r--src/couchdb/couch_view.erl91
-rw-r--r--src/mochiweb/mochijson2.erl14
17 files changed, 532 insertions, 1051 deletions
diff --git a/share/www/script/couch_tests.js b/share/www/script/couch_tests.js
index fd6eaed2..91ee5469 100644
--- a/share/www/script/couch_tests.js
+++ b/share/www/script/couch_tests.js
@@ -14,6 +14,9 @@ var tests = {
// Do some basic tests.
basics: function(debug) {
+ var result = JSON.parse(CouchDB.request("GET", "/").responseText);
+ T(result.couchdb == "Welcome");
+
var db = new CouchDB("test_suite_db");
db.deleteDb();
@@ -144,7 +147,7 @@ var tests = {
// COPY with existing target
T(db.save({_id:"doc_to_be_copied",v:1}).ok);
- var doc = db.save({_id:"doc_to_be_overwritten",v:1});
+ var doc = db.save({_id:"doc_to_be_overwritten",v:2});
T(doc.ok);
// error condition
@@ -159,9 +162,9 @@ var tests = {
});
T(xhr.status == 201);
- var newRev = db.open("doc_to_be_overwritten")._rev;
- T(rev != newRev);
-
+ var over = db.open("doc_to_be_overwritten");
+ T(rev != over._rev);
+ T(over.v == 1);
},
// Do some edit conflict detection tests
@@ -311,6 +314,15 @@ var tests = {
for (i = 0; i < 5; i++) {
T(db.open(docs[i]._id) == null);
}
+
+ // verify creating a document with no id returns a new id
+ var req = CouchDB.request("POST", "/test_suite_db/_bulk_docs", {
+ body: JSON.stringify({"docs": [{"foo":"bar"}]})
+ });
+ result = JSON.parse(req.responseText);
+
+ T(result.new_revs[0].id != "");
+ T(result.new_revs[0].rev != "");
},
// test saving a semi-large quanitity of documents and do some view queries.
@@ -815,8 +827,11 @@ var tests = {
T(db.bulkSave(makeDocs(1, numDocs + 1)).ok);
+ // test that the _all_docs view returns correctly with keys
+ var results = db.allDocs({startkey:"_design%2F", endkey:"_design%2FZZZ"});
+ T(results.rows.length == 1);
+
for (var loop = 0; loop < 2; loop++) {
- if (db.view("test/all_docs") == null) throw "fuck";
var rows = db.view("test/all_docs").rows;
for (var i = 0; i < numDocs; i++) {
T(rows[2*i].key == i+1);
@@ -825,8 +840,19 @@ var tests = {
T(db.view("test/no_docs").total_rows == 0)
T(db.view("test/single_doc").total_rows == 1)
restartServer();
- }
-
+ };
+
+ // test when language not specified, Javascript is implied
+ var designDoc2 = {
+ _id:"_design/test2",
+ // language: "javascript",
+ views: {
+ single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"}
+ }
+ };
+
+ T(db.save(designDoc2).ok);
+ T(db.view("test2/single_doc").total_rows == 1);
var summate = function(N) {return (N+1)*N/2;};
var result = db.view("test/summate");
diff --git a/src/couchdb/Makefile.am b/src/couchdb/Makefile.am
index 710ff1f0..e8d3662e 100644
--- a/src/couchdb/Makefile.am
+++ b/src/couchdb/Makefile.am
@@ -37,7 +37,6 @@ EXTRA_DIST = $(couch_files)
CLEANFILES = $(compiled_files) $(doc_base) $(doc_modules) edoc-info
source_files = \
- cjson.erl \
couch_btree.erl \
couch_config.erl \
couch_config_writer.erl \
@@ -61,7 +60,6 @@ source_files = \
couch_db_updater.erl
compiled_files = \
- cjson.beam \
couch.app \
couch_btree.beam \
couch_config.beam \
@@ -94,7 +92,6 @@ doc_base = \
stylesheet.css
doc_modules = \
- cjson.html \
couch_btree.html \
couch_config.html \
couch_config_writer.html \
diff --git a/src/couchdb/cjson.erl b/src/couchdb/cjson.erl
deleted file mode 100644
index 6e144c9e..00000000
--- a/src/couchdb/cjson.erl
+++ /dev/null
@@ -1,567 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2006 Mochi Media, Inc.
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without restriction,
-%% including without limitation the rights to use, copy, modify, merge,
-%% publish, distribute, sublicense, and/or sell copies of the Software,
-%% and to permit persons to whom the Software is furnished to do
-%% so, subject to the following conditions:
-%%
-%% The above copyright notice and this permission notice shall be included
-%% in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-%% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-%% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-%% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-%% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-%% @doc Yet another JSON (RFC 4627) library for Erlang.
-
--module(cjson).
--author('bob@mochimedia.com').
--export([encoder/1, encode/1]).
--export([decoder/1, decode/1]).
--export([test/0]).
-
-%
-% NOTE: This file was originally mochijson.erl and has been adapted for
-% use with CouchDB.
-%
-% The changes are:
-% {array, [...]}
-% is now
-% {...}
-% and:
-% {struct, [...]}
-% is now
-% {obj, [...]}
-%
-
-% This is a macro to placate syntax highlighters..
--define(Q, $\").
--define(ADV_COL(S, N), S#decoder{column=N+S#decoder.column}).
--define(INC_COL(S), S#decoder{column=1+S#decoder.column}).
--define(INC_LINE(S), S#decoder{column=1, line=1+S#decoder.line}).
-
-%% @type iolist() = [char() | binary() | iolist()]
-%% @type iodata() = iolist() | binary()
-%% @type json_string() = atom | string() | binary()
-%% @type json_number() = integer() | float()
-%% @type json_array() = {json_term()}
-%% @type json_object() = {struct, [{json_string(), json_term()}]}
-%% @type json_term() = json_string() | json_number() | json_array() |
-%% json_object()
-%% @type encoding() = utf8 | unicode
-%% @type encoder_option() = {input_encoding, encoding()} |
-%% {handler, function()}
-%% @type decoder_option() = {input_encoding, encoding()} |
-%% {object_hook, function()}
-
--record(encoder, {input_encoding=utf8,
- handler=null}).
-
--record(decoder, {input_encoding=utf8,
- object_hook=null,
- line=1,
- column=1,
- state=null}).
-
-%% @spec encoder([encoder_option()]) -> function()
-%% @doc Create an encoder/1 with the given options.
-encoder(Options) ->
- State = parse_encoder_options(Options, #encoder{}),
- fun (O) -> json_encode(O, State) end.
-
-%% @spec encode(json_term()) -> iolist()
-%% @doc Encode the given as JSON to an iolist.
-encode(Any) ->
- json_encode(Any, #encoder{}).
-
-%% @spec decoder([decoder_option()]) -> function()
-%% @doc Create a decoder/1 with the given options.
-decoder(Options) ->
- State = parse_decoder_options(Options, #decoder{}),
- fun (O) -> json_decode(O, State) end.
-
-%% @spec decode(iolist()) -> json_term()
-%% @doc Decode the given iolist to Erlang terms.
-decode(S) ->
- json_decode(S, #decoder{}).
-
-test() ->
- test_all().
-
-%% Internal API
-
-parse_encoder_options([], State) ->
- State;
-parse_encoder_options([{input_encoding, Encoding} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{input_encoding=Encoding});
-parse_encoder_options([{handler, Handler} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{handler=Handler}).
-
-parse_decoder_options([], State) ->
- State;
-parse_decoder_options([{input_encoding, Encoding} | Rest], State) ->
- parse_decoder_options(Rest, State#decoder{input_encoding=Encoding});
-parse_decoder_options([{object_hook, Hook} | Rest], State) ->
- parse_decoder_options(Rest, State#decoder{object_hook=Hook}).
-
-
-format_float(F) ->
- format_float1(lists:reverse(float_to_list(F)), []).
-
-format_float1([$0, $0, _, $e | Rest], []) ->
- strip_zeros(Rest, []);
-format_float1([Sign, $e | Rest], Acc) ->
- strip_zeros(Rest, [$e, Sign | Acc]);
-format_float1([C | Rest], Acc) ->
- format_float1(Rest, [C | Acc]).
-
-strip_zeros(L=[$0, $. | _], Acc) ->
- lists:reverse(L, Acc);
-strip_zeros([$0 | Rest], Acc) ->
- strip_zeros(Rest, Acc);
-strip_zeros(L, Acc) ->
- lists:reverse(L, Acc).
-
-json_encode(true, _State) ->
- "true";
-json_encode(false, _State) ->
- "false";
-json_encode(null, _State) ->
- "null";
-json_encode(I, _State) when is_integer(I) ->
- integer_to_list(I);
-json_encode(F, _State) when is_float(F) ->
- format_float(F);
-json_encode(L, State) when is_list(L); is_binary(L); is_atom(L) ->
- json_encode_string(L, State);
-json_encode({obj, Props}, State) when is_list(Props) ->
- json_encode_proplist(Props, State);
-json_encode(Array, State) when is_tuple(Array) ->
- json_encode_array(Array, State);
-json_encode(Bad, #encoder{handler=null}) ->
- exit({json_encode, {bad_term, Bad}});
-json_encode(Bad, State=#encoder{handler=Handler}) ->
- json_encode(Handler(Bad), State).
-
-json_encode_array({}, _State) ->
- "[]";
-json_encode_array(Tuple, State) ->
- F = fun (O, Acc) ->
- [$,, json_encode(O, State) | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "[", tuple_to_list(Tuple)),
- lists:reverse([$\] | Acc1]).
-
-json_encode_proplist([], _State) ->
- "{}";
-json_encode_proplist(Props, State) ->
- F = fun ({K, V}, Acc) ->
- KS = case K of
- K when is_atom(K) ->
- json_encode_string_utf8(atom_to_list(K), [?Q]);
- K when is_integer(K) ->
- json_encode_string(integer_to_list(K), State);
- K when is_list(K); is_binary(K) ->
- json_encode_string(K, State)
- end,
- VS = json_encode(V, State),
- [$,, VS, $:, KS | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "{", Props),
- lists:reverse([$\} | Acc1]).
-
-json_encode_string(A, _State) when is_atom(A) ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(atom_to_list(A)), [?Q]);
-json_encode_string(B, _State) when is_binary(B) ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(B), [?Q]);
-json_encode_string(S, #encoder{input_encoding=utf8}) ->
- json_encode_string_utf8(S, [?Q]);
-json_encode_string(S, #encoder{input_encoding=unicode}) ->
- json_encode_string_unicode(S, [?Q]).
-
-json_encode_string_utf8([], Acc) ->
- lists:reverse([$\" | Acc]);
-json_encode_string_utf8(All=[C | Cs], Acc) ->
- case C of
- C when C >= 16#7f ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(All), Acc);
- _ ->
- Acc1 = case C of
- ?Q ->
- [?Q, $\\ | Acc];
- $/ ->
- [$/, $\\ | Acc];
- $\\ ->
- [$\\, $\\ | Acc];
- $\b ->
- [$b, $\\ | Acc];
- $\f ->
- [$f, $\\ | Acc];
- $\n ->
- [$n, $\\ | Acc];
- $\r ->
- [$r, $\\ | Acc];
- $\t ->
- [$t, $\\ | Acc];
- C when C >= 0, C < $\s ->
- [unihex(C) | Acc];
- C when C >= $\s ->
- [C | Acc];
- _ ->
- exit({json_encode, {bad_char, C}})
- end,
- json_encode_string_utf8(Cs, Acc1)
- end.
-
-json_encode_string_unicode([], Acc) ->
- lists:reverse([$\" | Acc]);
-json_encode_string_unicode([C | Cs], Acc) ->
- Acc1 = case C of
- ?Q ->
- [?Q, $\\ | Acc];
- $/ ->
- [$/, $\\ | Acc];
- $\\ ->
- [$\\, $\\ | Acc];
- $\b ->
- [$b, $\\ | Acc];
- $\f ->
- [$f, $\\ | Acc];
- $\n ->
- [$n, $\\ | Acc];
- $\r ->
- [$r, $\\ | Acc];
- $\t ->
- [$t, $\\ | Acc];
- C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
- [unihex(C) | Acc];
- C when C < 16#7f ->
- [C | Acc];
- _ ->
- exit({json_encode, {bad_char, C}})
- end,
- json_encode_string_unicode(Cs, Acc1).
-
-dehex(C) when C >= $0, C =< $9 ->
- C - $0;
-dehex(C) when C >= $a, C =< $f ->
- C - $a + 10;
-dehex(C) when C >= $A, C =< $F ->
- C - $A + 10.
-
-hexdigit(C) when C >= 0, C =< 9 ->
- C + $0;
-hexdigit(C) when C =< 15 ->
- C + $a - 10.
-
-unihex(C) when C < 16#10000 ->
- <<D3:4, D2:4, D1:4, D0:4>> = <<C:16>>,
- Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
- [$\\, $u | Digits];
-unihex(C) when C =< 16#10FFFF ->
- N = C - 16#10000,
- S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
- S2 = 16#dc00 bor (N band 16#3ff),
- [unihex(S1), unihex(S2)].
-
-json_decode(B, S) when is_binary(B) ->
- json_decode([B], S);
-json_decode(L, S) ->
- {Res, L1, S1} = decode1(L, S),
- {eof, [], _} = tokenize(L1, S1#decoder{state=trim}),
- Res.
-
-decode1(L, S=#decoder{state=null}) ->
- case tokenize(L, S#decoder{state=any}) of
- {{const, C}, L1, S1} ->
- {C, L1, S1};
- {start_array, L1, S1} ->
- decode_array(L1, S1#decoder{state=any}, []);
- {start_object, L1, S1} ->
- decode_object(L1, S1#decoder{state=key}, [])
- end.
-
-make_object(V, #decoder{object_hook=null}) ->
- V;
-make_object(V, #decoder{object_hook=Hook}) ->
- Hook(V).
-
-decode_object(L, S=#decoder{state=key}, Acc) ->
- case tokenize(L, S) of
- {end_object, Rest, S1} ->
- V = make_object({obj, lists:reverse(Acc)}, S1),
- {V, Rest, S1#decoder{state=null}};
- {{const, K}, Rest, S1} when is_list(K) ->
- {colon, L2, S2} = tokenize(Rest, S1),
- {V, L3, S3} = decode1(L2, S2#decoder{state=null}),
- decode_object(L3, S3#decoder{state=comma}, [{K, V} | Acc])
- end;
-decode_object(L, S=#decoder{state=comma}, Acc) ->
- case tokenize(L, S) of
- {end_object, Rest, S1} ->
- V = make_object({obj, lists:reverse(Acc)}, S1),
- {V, Rest, S1#decoder{state=null}};
- {comma, Rest, S1} ->
- decode_object(Rest, S1#decoder{state=key}, Acc)
- end.
-
-decode_array(L, S=#decoder{state=any}, Acc) ->
- case tokenize(L, S) of
- {end_array, Rest, S1} ->
- {list_to_tuple(lists:reverse(Acc)), Rest, S1#decoder{state=null}};
- {start_array, Rest, S1} ->
- {Array, Rest1, S2} = decode_array(Rest, S1#decoder{state=any}, []),
- decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
- {start_object, Rest, S1} ->
- {Array, Rest1, S2} = decode_object(Rest, S1#decoder{state=key}, []),
- decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
- {{const, Const}, Rest, S1} ->
- decode_array(Rest, S1#decoder{state=comma}, [Const | Acc])
- end;
-decode_array(L, S=#decoder{state=comma}, Acc) ->
- case tokenize(L, S) of
- {end_array, Rest, S1} ->
- {list_to_tuple(lists:reverse(Acc)), Rest, S1#decoder{state=null}};
- {comma, Rest, S1} ->
- decode_array(Rest, S1#decoder{state=any}, Acc)
- end.
-
-tokenize_string(IoList=[C | _], S=#decoder{input_encoding=utf8}, Acc)
- when is_list(C); is_binary(C); C >= 16#7f ->
- List = xmerl_ucs:from_utf8(list_to_binary(lists:flatten(IoList))),
- tokenize_string(List, S#decoder{input_encoding=unicode}, Acc);
-tokenize_string("\"" ++ Rest, S, Acc) ->
- {lists:reverse(Acc), Rest, ?INC_COL(S)};
-tokenize_string("\\\"" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\" | Acc]);
-tokenize_string("\\\\" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\\ | Acc]);
-tokenize_string("\\/" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$/ | Acc]);
-tokenize_string("\\b" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\b | Acc]);
-tokenize_string("\\f" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\\ | Acc]);
-tokenize_string("\\n" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\n | Acc]);
-tokenize_string("\\r" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\r | Acc]);
-tokenize_string("\\t" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\t | Acc]);
-tokenize_string([$\\, $u, C3, C2, C1, C0 | Rest], S, Acc) ->
- % coalesce UTF-16 surrogate pair?
- C = dehex(C0) bor
- (dehex(C1) bsl 4) bor
- (dehex(C2) bsl 8) bor
- (dehex(C3) bsl 12),
- tokenize_string(Rest, ?ADV_COL(S, 6), [C | Acc]);
-tokenize_string([C | Rest], S, Acc) when C >= $\s; C < 16#10FFFF ->
- tokenize_string(Rest, ?ADV_COL(S, 1), [C | Acc]).
-
-tokenize_number(IoList=[C | _], Mode, S=#decoder{input_encoding=utf8}, Acc)
- when is_list(C); is_binary(C); C >= 16#7f ->
- List = xmerl_ucs:from_utf8(list_to_binary(lists:flatten(IoList))),
- tokenize_number(List, Mode, S#decoder{input_encoding=unicode}, Acc);
-tokenize_number([$- | Rest], sign, S, []) ->
- tokenize_number(Rest, int, ?INC_COL(S), [$-]);
-tokenize_number(Rest, sign, S, []) ->
- tokenize_number(Rest, int, S, []);
-tokenize_number([$0 | Rest], int, S, Acc) ->
- tokenize_number(Rest, frac, ?INC_COL(S), [$0 | Acc]);
-tokenize_number([C | Rest], int, S, Acc) when C >= $1, C =< $9 ->
- tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
-tokenize_number([C | Rest], int1, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
-tokenize_number(Rest, int1, S, Acc) ->
- tokenize_number(Rest, frac, S, Acc);
-tokenize_number([$., C | Rest], frac, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
-tokenize_number([E | Rest], frac, S, Acc) when E == $e; E == $E ->
- tokenize_number(Rest, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
-tokenize_number(Rest, frac, S, Acc) ->
- {{int, lists:reverse(Acc)}, Rest, S};
-tokenize_number([C | Rest], frac1, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, frac1, ?INC_COL(S), [C | Acc]);
-tokenize_number([E | Rest], frac1, S, Acc) when E == $e; E == $E ->
- tokenize_number(Rest, esign, ?INC_COL(S), [$e | Acc]);
-tokenize_number(Rest, frac1, S, Acc) ->
- {{float, lists:reverse(Acc)}, Rest, S};
-tokenize_number([C | Rest], esign, S, Acc) when C == $-; C == $+ ->
- tokenize_number(Rest, eint, ?INC_COL(S), [C | Acc]);
-tokenize_number(Rest, esign, S, Acc) ->
- tokenize_number(Rest, eint, S, Acc);
-tokenize_number([C | Rest], eint, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
-tokenize_number([C | Rest], eint1, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
-tokenize_number(Rest, eint1, S, Acc) ->
- {{float, lists:reverse(Acc)}, Rest, S}.
-
-tokenize([], S=#decoder{state=trim}) ->
- {eof, [], S};
-tokenize([L | Rest], S) when is_list(L) ->
- tokenize(L ++ Rest, S);
-tokenize([B | Rest], S) when is_binary(B) ->
- tokenize(xmerl_ucs:from_utf8(B) ++ Rest, S#decoder{input_encoding=unicode});
-tokenize("\r\n" ++ Rest, S) ->
- tokenize(Rest, ?INC_LINE(S));
-tokenize("\n" ++ Rest, S) ->
- tokenize(Rest, ?INC_LINE(S));
-tokenize([C | Rest], S) when C == $\s; C == $\t ->
- tokenize(Rest, ?INC_COL(S));
-tokenize("{" ++ Rest, S) ->
- {start_object, Rest, ?INC_COL(S)};
-tokenize("}" ++ Rest, S) ->
- {end_object, Rest, ?INC_COL(S)};
-tokenize("[" ++ Rest, S) ->
- {start_array, Rest, ?INC_COL(S)};
-tokenize("]" ++ Rest, S) ->
- {end_array, Rest, ?INC_COL(S)};
-tokenize("," ++ Rest, S) ->
- {comma, Rest, ?INC_COL(S)};
-tokenize(":" ++ Rest, S) ->
- {colon, Rest, ?INC_COL(S)};
-tokenize("null" ++ Rest, S) ->
- {{const, null}, Rest, ?ADV_COL(S, 4)};
-tokenize("true" ++ Rest, S) ->
- {{const, true}, Rest, ?ADV_COL(S, 4)};
-tokenize("false" ++ Rest, S) ->
- {{const, false}, Rest, ?ADV_COL(S, 5)};
-tokenize("\"" ++ Rest, S) ->
- {String, Rest1, S1} = tokenize_string(Rest, ?INC_COL(S), []),
- {{const, xmerl_ucs:to_utf8(String)}, Rest1, S1};
-tokenize(L=[C | _], S) when C >= $0, C =< $9; C == $- ->
- case tokenize_number(L, sign, S, []) of
- {{int, Int}, Rest, S1} ->
- {{const, list_to_integer(Int)}, Rest, S1};
- {{float, Float}, Rest, S1} ->
- {{const, list_to_float(Float)}, Rest, S1}
- end.
-
-%% testing constructs borrowed from the Yaws JSON implementation.
-
-%% Create an object from a list of Key/Value pairs.
-
-obj_new() ->
- {obj, []}.
-
-is_obj({obj, Props}) ->
- F = fun ({K, _}) when is_list(K) ->
- true;
- (_) ->
- false
- end,
- lists:all(F, Props).
-
-obj_from_list(Props) ->
- Obj = {obj, Props},
- case is_obj(Obj) of
- true -> Obj;
- false -> exit(json_bad_object)
- end.
-
-%% Test for equivalence of Erlang terms.
-%% Due to arbitrary order of construction, equivalent objects might
-%% compare unequal as erlang terms, so we need to carefully recurse
-%% through aggregates (tuples and objects).
-
-equiv({obj, Props1}, {obj, Props2}) ->
- equiv_object(Props1, Props2);
-equiv(T1, T2) when is_tuple(T1), is_tuple(T2) ->
- equiv_list(tuple_to_list(T1), tuple_to_list(T2));
-equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
-equiv(S1, S2) when is_list(S1), is_list(S2) -> S1 == S2;
-equiv(true, true) -> true;
-equiv(false, false) -> true;
-equiv(null, null) -> true.
-
-%% Object representation and traversal order is unknown.
-%% Use the sledgehammer and sort property lists.
-
-equiv_object(Props1, Props2) ->
- L1 = lists:keysort(1, Props1),
- L2 = lists:keysort(1, Props2),
- Pairs = lists:zip(L1, L2),
- true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
- equiv(K1, K2) and equiv(V1, V2)
- end, Pairs).
-
-%% Recursively compare tuple elements for equivalence.
-
-equiv_list([], []) ->
- true;
-equiv_list([V1 | L1], [V2 | L2]) ->
- case equiv(V1, V2) of
- true ->
- equiv_list(L1, L2);
- false ->
- false
- end.
-
-test_all() ->
- test_one(e2j_test_vec(utf8), 1).
-
-test_one([], N) ->
- io:format("~p tests passed~n", [N-1]),
- ok;
-test_one([{E, J} | Rest], N) ->
- io:format("[~p] ~p ~p~n", [N, E, J]),
- true = equiv(E, decode(J)),
- true = equiv(E, decode(encode(E))),
- test_one(Rest, 1+N).
-
-e2j_test_vec(unicode) ->
- [
- {"foo" ++ [500] ++ "bar", [$", $f, $o, $o, 500, $b, $a, $r, $"]}
- ];
-e2j_test_vec(utf8) ->
- [
- {1, "1"},
- {3.1416, "3.14160"}, % text representation may truncate, trail zeroes
- {-1, "-1"},
- {-3.1416, "-3.14160"},
- {12.0e10, "1.20000e+11"},
- {1.234E+10, "1.23400e+10"},
- {-1.234E-10, "-1.23400e-10"},
- {10.0, "1.0e+01"},
- {123.456, "1.23456E+2"},
- {10.0, "1e1"},
- {"foo", "\"foo\""},
- {"foo" ++ [5] ++ "bar", "\"foo\\u0005bar\""},
- {"", "\"\""},
- {[], "\"\""},
- {"\n\n\n", "\"\\n\\n\\n\""},
- {obj_new(), "{}"},
- {obj_from_list([{"foo", "bar"}]), "{\"foo\":\"bar\"}"},
- {obj_from_list([{"foo", "bar"}, {"baz", 123}]),
- "{\"foo\":\"bar\",\"baz\":123}"},
- {{}, "[]"},
- {{{}}, "[[]]"},
- {{1, "foo"}, "[1,\"foo\"]"},
-
- % json array in a json object
- {obj_from_list([{"foo", {123}}]),
- "{\"foo\":[123]}"},
-
- % json object in a json object
- {obj_from_list([{"foo", obj_from_list([{"bar", true}])}]),
- "{\"foo\":{\"bar\":true}}"},
-
- % fold evaluation order
- {obj_from_list([{"foo", {}},
- {"bar", obj_from_list([{"baz", true}])},
- {"alice", "bob"}]),
- "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
-
- % json object in a json array
- {{-123, "foo", obj_from_list([{"bar", {}}]), null},
- "[-123,\"foo\",{\"bar\":[]},null]"}
- ].
diff --git a/src/couchdb/couch.app.tpl.in b/src/couchdb/couch.app.tpl.in
index 9766d0f9..8611df4b 100644
--- a/src/couchdb/couch.app.tpl.in
+++ b/src/couchdb/couch.app.tpl.in
@@ -2,7 +2,6 @@
[{description,"@package_name@"},
{vsn,"@version@"},
{modules,[couch_btree,
- cjson,
couch_db,
couch_db_updater,
couch_doc,
diff --git a/src/couchdb/couch_db.erl b/src/couchdb/couch_db.erl
index e8c7eb93..4bcefdcd 100644
--- a/src/couchdb/couch_db.erl
+++ b/src/couchdb/couch_db.erl
@@ -228,11 +228,11 @@ update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options) ->
Docs2 = lists:map(
fun(#doc{id=Id,revs=Revs}=Doc) ->
case Id of
- ?LOCAL_DOC_PREFIX ++ _ ->
- Rev = case Revs of [] -> 0; [Rev0|_] -> list_to_integer(Rev0) end,
- Doc#doc{revs=[integer_to_list(Rev + 1)]};
+ <<?LOCAL_DOC_PREFIX, _/binary>> ->
+ Rev = case Revs of [] -> 0; [Rev0|_] -> list_to_integer(binary_to_list(Rev0)) end,
+ Doc#doc{revs=[list_to_binary(integer_to_list(Rev + 1))]};
_ ->
- Doc#doc{revs=[integer_to_list(couch_util:rand32()) | Revs]}
+ Doc#doc{revs=[list_to_binary(integer_to_list(couch_util:rand32())) | Revs]}
end
end, Docs),
NewRevs = [NewRev || #doc{revs=[NewRev|_]} <- Docs2],
@@ -429,10 +429,10 @@ open_doc_revs_int(Db, IdRevs, Options) ->
end,
IdRevs, LookupResults).
-open_doc_int(Db, ?LOCAL_DOC_PREFIX ++ _ = Id, _Options) ->
+open_doc_int(Db, <<?LOCAL_DOC_PREFIX, _/binary>> = Id, _Options) ->
case couch_btree:lookup(Db#db.local_docs_btree, [Id]) of
[{ok, {_, {Rev, BodyData}}}] ->
- {ok, #doc{id=Id, revs=[integer_to_list(Rev)], body=BodyData}};
+ {ok, #doc{id=Id, revs=[list_to_binary(integer_to_list(Rev))], body=BodyData}};
[not_found] ->
{not_found, missing}
end;
diff --git a/src/couchdb/couch_db.hrl b/src/couchdb/couch_db.hrl
index 43860a1a..fa604108 100644
--- a/src/couchdb/couch_db.hrl
+++ b/src/couchdb/couch_db.hrl
@@ -14,7 +14,10 @@
-define(DESIGN_DOC_PREFIX0, "_design").
-define(DESIGN_DOC_PREFIX, "_design/").
--define(DEFAULT_ATTACHMENT_CONTENT_TYPE, "application/octet-stream").
+-define(JSON_ENCODE(V), mochijson2:encode(V)).
+-define(JSON_DECODE(V), mochijson2:decode(V)).
+
+-define(DEFAULT_ATTACHMENT_CONTENT_TYPE, <<"application/octet-stream">>).
-define(LOG_DEBUG(Format, Args),
case couch_log:debug_on() of
@@ -33,8 +36,8 @@
-record(doc_info,
{
- id = "",
- rev = "",
+ id = <<"">>,
+ rev = <<"">>,
update_seq = 0,
summary_pointer = nil,
conflict_revs = [],
@@ -43,7 +46,7 @@
}).
-record(full_doc_info,
- {id = "",
+ {id = <<"">>,
update_seq = 0,
deleted = false,
rev_tree = []
@@ -51,11 +54,11 @@
-record(doc,
{
- id = "",
+ id = <<"">>,
revs = [],
% the json body object.
- body = {obj, []},
+ body = {[]},
% each attachment contains:
% {data, Type, <<binary>>}
diff --git a/src/couchdb/couch_db_update_notifier.erl b/src/couchdb/couch_db_update_notifier.erl
index f944264c..449da356 100644
--- a/src/couchdb/couch_db_update_notifier.erl
+++ b/src/couchdb/couch_db_update_notifier.erl
@@ -25,6 +25,8 @@
-export([start_link/1, notify/1]).
-export([init/1, terminate/2, handle_event/2, handle_call/2, handle_info/2, code_change/3,stop/1]).
+-include("couch_db.hrl").
+
start_link(Exec) ->
couch_event_sup:start_link(couch_db_update, {couch_db_update_notifier, make_ref()}, Exec).
@@ -50,8 +52,8 @@ handle_event(Event, {Fun, FunAcc}) ->
FunAcc2 = Fun(Event, FunAcc),
{ok, {Fun, FunAcc2}};
handle_event({EventAtom, DbName}, Port) ->
- Obj = {obj, [{type, atom_to_list(EventAtom)}, {db, DbName}]},
- true = port_command(Port, cjson:encode(Obj) ++ "\n"),
+ Obj = {[{type, atom_to_list(EventAtom)}, {db, DbName}]},
+ true = port_command(Port, ?JSON_ENCODE(Obj) ++ "\n"),
{ok, Port}.
handle_call(_Request, State) ->
diff --git a/src/couchdb/couch_db_updater.erl b/src/couchdb/couch_db_updater.erl
index f0673af9..befbfed2 100644
--- a/src/couchdb/couch_db_updater.erl
+++ b/src/couchdb/couch_db_updater.erl
@@ -96,7 +96,7 @@ handle_cast({compact_done, CompactFilepath}, #db{filepath=Filepath}=Db) ->
file:delete(Filepath ++ ".old"),
ok = gen_server:call(Db#db.main_pid, {db_updated, NewDb2}),
- ?LOG_INFO("Compaction for db ~p completed.", [Db#db.name]),
+ ?LOG_INFO("Compaction for db \"~s\" completed.", [Db#db.name]),
{noreply, NewDb2#db{compactor_pid=nil}};
false ->
?LOG_INFO("Compaction file still behind main file "
@@ -287,7 +287,7 @@ update_docs_int(Db, DocsList, Options) ->
{DocsList2, NonRepDocs} = lists:foldl(
fun([#doc{id=Id}=Doc | Rest]=Docs, {DocsListAcc, NonRepDocsAcc}) ->
case Id of
- ?LOCAL_DOC_PREFIX ++ _ when Rest==[] ->
+ <<?LOCAL_DOC_PREFIX, _/binary>> when Rest==[] ->
% when saving NR (non rep) documents, you can only save a single rev
{DocsListAcc, [Doc | NonRepDocsAcc]};
Id->
@@ -363,7 +363,7 @@ update_local_docs(#db{local_docs_btree=Btree}=Db, Docs) ->
NewRev =
case Revs of
[] -> 0;
- [RevStr|_] -> list_to_integer(RevStr)
+ [RevStr|_] -> list_to_integer(binary_to_list(RevStr))
end,
OldRev =
case OldDocLookup of
diff --git a/src/couchdb/couch_doc.erl b/src/couchdb/couch_doc.erl
index 61685cfa..12b48dc2 100644
--- a/src/couchdb/couch_doc.erl
+++ b/src/couchdb/couch_doc.erl
@@ -19,109 +19,116 @@
-include("couch_db.hrl").
to_json_obj(#doc{id=Id,deleted=Del,body=Body,revs=Revs,meta=Meta}=Doc,Options)->
- {obj, [{"_id", Id}] ++
+ {[{<<"_id">>, Id}] ++
case Revs of
[] -> [];
- _ -> [{"_rev", lists:nth(1, Revs)}]
+ _ -> [{<<"_rev">>, lists:nth(1, Revs)}]
end ++
case Del of
false ->
- {obj, BodyProps} = Body,
+ {BodyProps} = Body,
BodyProps;
true ->
- [{"_deleted", true}]
+ [{<<"_deleted">>, true}]
end ++
case lists:member(revs, Options) of
false -> [];
true ->
- [{"_revs", list_to_tuple(Revs)}]
+ [{<<"_revs">>, Revs}]
end ++
lists:map(
fun({revs_info, RevsInfo}) ->
JsonRevsInfo =
- [{obj, [{rev, Rev}, {status, atom_to_list(Status)}]} ||
+ [{[{rev, Rev}, {status, atom_to_list(Status)}]} ||
{Rev, Status} <- RevsInfo],
- {"_revs_info", list_to_tuple(JsonRevsInfo)};
+ {<<"_revs_info">>, JsonRevsInfo};
({conflicts, Conflicts}) ->
- {"_conflicts", list_to_tuple(Conflicts)};
+ {<<"_conflicts">>, Conflicts};
({deleted_conflicts, Conflicts}) ->
- {"_deleted_conflicts", list_to_tuple(Conflicts)}
+ {<<"_deleted_conflicts">>, Conflicts}
end, Meta) ++
case lists:member(attachments, Options) of
true -> % return the full rev list and the binaries as strings.
BinProps = lists:map(
fun({Name, {Type, BinValue}}) ->
- {Name, {obj, [
- {"content_type", Type},
- {"data", couch_util:encodeBase64(bin_to_binary(BinValue))}
+ {Name, {[
+ {<<"content_type">>, Type},
+ {<<"data">>, couch_util:encodeBase64(bin_to_binary(BinValue))}
]}}
end,
Doc#doc.attachments),
case BinProps of
[] -> [];
- _ -> [{"_attachments", {obj, BinProps}}]
+ _ -> [{<<"_attachments">>, {BinProps}}]
end;
false ->
BinProps = lists:map(
fun({Name, {Type, BinValue}}) ->
- {Name, {obj, [
- {"stub", true},
- {"content_type", Type},
- {"length", bin_size(BinValue)}
+ {Name, {[
+ {<<"stub">>, true},
+ {<<"content_type">>, Type},
+ {<<"length">>, bin_size(BinValue)}
]}}
end,
Doc#doc.attachments),
case BinProps of
[] -> [];
- _ -> [{"_attachments", {obj, BinProps}}]
+ _ -> [{<<"_attachments">>, {BinProps}}]
end
end
}.
-from_json_obj({obj, Props}) ->
- {obj,JsonBins} = proplists:get_value("_attachments", Props, {obj, []}),
- Bins = lists:flatmap(fun({Name, {obj, BinProps}}) ->
- case proplists:get_value("stub", BinProps) of
+from_json_obj({Props}) ->
+ {JsonBins} = proplists:get_value(<<"_attachments">>, Props, {[]}),
+ Bins = lists:flatmap(fun({Name, {BinProps}}) ->
+ case proplists:get_value(<<"stub">>, BinProps) of
true ->
[{Name, stub}];
_ ->
- Value = proplists:get_value("data", BinProps),
- Type = proplists:get_value("content_type", BinProps,
+ Value = proplists:get_value(<<"data">>, BinProps),
+ Type = proplists:get_value(<<"content_type">>, BinProps,
?DEFAULT_ATTACHMENT_CONTENT_TYPE),
[{Name, {Type, couch_util:decodeBase64(Value)}}]
end
end, JsonBins),
- AllowedSpecialMembers = ["id", "revs", "rev", "attachments", "revs_info",
- "conflicts", "deleted_conflicts", "deleted"],
+ AllowedSpecialMembers = [<<"id">>, <<"revs">>, <<"rev">>, <<"attachments">>, <<"revs_info">>,
+ <<"conflicts">>, <<"deleted_conflicts">>, <<"deleted">>],
+ % collect all the doc-members that start with "_"
+ % if any aren't in the AllowedSpecialMembers list
+ % then throw a doc_validation error
[case lists:member(Name, AllowedSpecialMembers) of
true ->
ok;
false ->
throw({doc_validation, io_lib:format("Bad special document member: _~s", [Name])})
end
- || {[$_|Name], _Value} <- Props],
+ || {<<$_,Name/binary>>, _Value} <- Props],
Revs =
- case tuple_to_list(proplists:get_value("_revs", Props, {})) of
+ case proplists:get_value(<<"_revs">>, Props, []) of
[] ->
- case proplists:get_value("_rev", Props) of
+ case proplists:get_value(<<"_rev">>, Props) of
undefined -> [];
Rev -> [Rev]
end;
Revs0 ->
Revs0
end,
- case proplists:get_value("_id", Props, "") of
- Id when is_list(Id) ->
- #doc{
- id = Id,
- revs = Revs,
- deleted = proplists:get_value("_deleted", Props, false),
- body = {obj, [{Key, Value} || {[FirstChar|_]=Key, Value} <- Props, FirstChar /= $_]},
- attachments = Bins
- };
- _ ->
+ case proplists:get_value(<<"_id">>, Props, <<>>) of
+ Id when is_binary(Id) -> ok;
+ Id ->
+ ?LOG_DEBUG("Document id is not a string: ~p", [Id]),
throw({invalid_document_id, "Document id is not a string"})
- end.
+ end,
+
+ % strip out the all props beginning with _
+ NewBody = {[{K, V} || {<<First,_/binary>>=K, V} <- Props, First /= $_]},
+ #doc{
+ id = Id,
+ revs = Revs,
+ deleted = proplists:get_value(<<"_deleted">>, Props, false),
+ body = NewBody,
+ attachments = Bins
+ }.
to_doc_info(#full_doc_info{id=Id,update_seq=Seq,rev_tree=Tree}) ->
@@ -181,9 +188,9 @@ bin_to_binary({Fd, Sp, Len}) ->
{ok, Bin, _Sp2} = couch_stream:read(Fd, Sp, Len),
Bin.
-get_view_functions(#doc{body={obj, Fields}}) ->
- Lang = proplists:get_value("language", Fields, "javascript"),
- {obj, Views} = proplists:get_value("views", Fields, {obj, []}),
+get_view_functions(#doc{body={Fields}}) ->
+ Lang = proplists:get_value(<<"language">>, Fields, <<"javascript">>),
+ {Views} = proplists:get_value(<<"views">>, Fields, {[]}),
{Lang, [{ViewName, Value} || {ViewName, Value} <- Views, is_list(Value)]};
get_view_functions(_Doc) ->
none.
diff --git a/src/couchdb/couch_erl_driver.c b/src/couchdb/couch_erl_driver.c
index 0dbedc8d..900ada4b 100644
--- a/src/couchdb/couch_erl_driver.c
+++ b/src/couchdb/couch_erl_driver.c
@@ -131,9 +131,9 @@ static int couch_drv_control(ErlDrvData drv_data, unsigned int command, const ch
if (collResult < 0)
response = 0; //lt
else if (collResult > 0)
- response = 1; //gt
+ response = 2; //gt
else
- response = 2; //eq
+ response = 1; //eq
return return_control_result(&response, sizeof(response), rbuf, rlen);
}
diff --git a/src/couchdb/couch_httpd.erl b/src/couchdb/couch_httpd.erl
index cd5c6a70..048445a9 100644
--- a/src/couchdb/couch_httpd.erl
+++ b/src/couchdb/couch_httpd.erl
@@ -26,14 +26,14 @@
-record(view_query_args, {
start_key = nil,
- end_key = <<>>,
+ end_key = {},
count = 10000000000, % a huge huge default number. Picked so we don't have
% to do different logic for when there is no count
% limit
update = true,
direction = fwd,
start_docid = nil,
- end_docid = <<>>,
+ end_docid = {},
skip = 0,
group_level = 0
}).
@@ -91,7 +91,7 @@ handle_request(Req, DocumentRoot) ->
{Path, _, _} = mochiweb_util:urlsplit_path(Req:get(raw_path)),
?LOG_DEBUG("~p ~s ~p~nHeaders: ~p", [
- Method,
+ Req:get(method),
Path,
Req:get(version),
mochiweb_headers:to_list(Req:get(headers))
@@ -148,9 +148,9 @@ handle_request0(Req, DocumentRoot, Method, Path) ->
% Global request handlers
handle_welcome_request(Req, 'GET') ->
- send_json(Req, {obj, [
- {"couchdb", "Welcome"},
- {"version", couch_server:get_version()}
+ send_json(Req, {[
+ {couchdb, <<"Welcome">>},
+ {version, list_to_binary(couch_server:get_version())}
]});
handle_welcome_request(_Req, _Method) ->
@@ -158,24 +158,24 @@ handle_welcome_request(_Req, _Method) ->
handle_all_dbs_request(Req, 'GET') ->
{ok, DbNames} = couch_server:all_databases(),
- send_json(Req, list_to_tuple(DbNames));
+ send_json(Req, DbNames);
handle_all_dbs_request(_Req, _Method) ->
throw({method_not_allowed, "GET,HEAD"}).
handle_replicate_request(Req, 'POST') ->
- {obj, Props} = cjson:decode(Req:recv_body()),
- Source = proplists:get_value("source", Props),
- Target = proplists:get_value("target", Props),
- {obj, Options} = proplists:get_value("options", Props, {obj, []}),
- {ok, {obj, JsonResults}} = couch_rep:replicate(Source, Target, Options),
- send_json(Req, {obj, [{ok, true} | JsonResults]});
+ {Props} = ?JSON_DECODE(Req:recv_body()),
+ Source = proplists:get_value(<<"source">>, Props),
+ Target = proplists:get_value(<<"target">>, Props),
+ {Options} = proplists:get_value(<<"options">>, Props, {[]}),
+ {ok, {JsonResults}} = couch_rep:replicate(Source, Target, Options),
+ send_json(Req, {[{ok, true} | JsonResults]});
handle_replicate_request(_Req, _Method) ->
throw({method_not_allowed, "POST"}).
handle_restart_request(Req, 'POST') ->
- Response = send_json(Req, {obj, [{ok, true}]}),
+ Response = send_json(Req, {[{ok, true}]}),
spawn(fun() -> couch_server:remote_restart() end),
Response;
@@ -187,7 +187,7 @@ handle_uuids_request(Req, 'POST') ->
% generate the uuids
UUIDs = [ couch_util:new_uuid() || _ <- lists:seq(1,Count)],
% send a JSON response
- send_json(Req, {obj, [{"uuids", list_to_tuple(UUIDs)}]});
+ send_json(Req, {[{"uuids", UUIDs}]});
handle_uuids_request(_Req, _Method) ->
throw({method_not_allowed, "POST"}).
@@ -197,14 +197,15 @@ handle_uuids_request(_Req, _Method) ->
handle_db_request(Req, Method, {Path}) ->
UriParts = string:tokens(Path, "/"),
- [DbName|Rest] = UriParts,
- handle_db_request(Req, Method, {mochiweb_util:unquote(DbName), Rest});
+ [DbName|Rest] =
+ [list_to_binary(mochiweb_util:unquote(Part)) || Part <- UriParts],
+ handle_db_request(Req, Method, {DbName, Rest});
handle_db_request(Req, 'PUT', {DbName, []}) ->
case couch_server:create(DbName, []) of
{ok, Db} ->
couch_db:close(Db),
- send_json(Req, 201, {obj, [{ok, true}]});
+ send_json(Req, 201, {[{ok, true}]});
{error, database_already_exists} ->
Msg = io_lib:format("Database ~p already exists.", [DbName]),
throw({database_already_exists, Msg});
@@ -216,7 +217,7 @@ handle_db_request(Req, 'PUT', {DbName, []}) ->
handle_db_request(Req, 'DELETE', {DbName, []}) ->
case couch_server:delete(DbName) of
ok ->
- send_json(Req, 200, {obj, [
+ send_json(Req, 200, {[
{ok, true}
]});
Error ->
@@ -237,15 +238,15 @@ handle_db_request(Req, Method, {DbName, Rest}) ->
handle_db_request(Req, 'GET', {DbName, Db, []}) ->
{ok, DbInfo} = couch_db:get_db_info(Db),
- send_json(Req, {obj, [{db_name, DbName} | DbInfo]});
+ send_json(Req, {[{db_name, DbName} | DbInfo]});
handle_db_request(Req, 'POST', {_DbName, Db, []}) ->
% TODO: Etag handling
- Json = cjson:decode(Req:recv_body(?MAX_DOC_SIZE)),
+ Json = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)),
Doc = couch_doc:from_json_obj(Json),
DocId = couch_util:new_uuid(),
{ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=[]}, []),
- send_json(Req, 201, {obj, [
+ send_json(Req, 201, {[
{ok, true},
{id, DocId},
{rev, NewRev}
@@ -254,63 +255,63 @@ handle_db_request(Req, 'POST', {_DbName, Db, []}) ->
handle_db_request(_Req, _Method, {_DbName, _Db, []}) ->
throw({method_not_allowed, "DELETE,GET,HEAD,POST"});
-handle_db_request(Req, 'POST', {_DbName, Db, ["_bulk_docs"]}) ->
+handle_db_request(Req, 'POST', {_DbName, Db, [<<"_bulk_docs">>]}) ->
Options = [], % put options here.
- {obj, JsonProps} = cjson:decode(Req:recv_body(?MAX_DOC_SIZE)),
- DocsArray = proplists:get_value("docs", JsonProps),
+ {JsonProps} = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)),
+ DocsArray = proplists:get_value(<<"docs">>, JsonProps),
% convert all the doc elements to native docs
- case proplists:get_value("new_edits", JsonProps, true) of
+ case proplists:get_value(<<"new_edits">>, JsonProps, true) of
true ->
Docs = lists:map(
- fun({obj, ObjProps} = JsonObj) ->
+ fun({ObjProps} = JsonObj) ->
Doc = couch_doc:from_json_obj(JsonObj),
Id = case Doc#doc.id of
- "" -> couch_util:new_uuid();
+ <<>> -> couch_util:new_uuid();
Id0 -> Id0
end,
- Revs = case proplists:get_value("_rev", ObjProps) of
+ Revs = case proplists:get_value(<<"_rev">>, ObjProps) of
undefined -> [];
Rev -> [Rev]
end,
Doc#doc{id=Id,revs=Revs}
end,
- tuple_to_list(DocsArray)),
+ DocsArray),
{ok, ResultRevs} = couch_db:update_docs(Db, Docs, Options),
% output the results
DocResults = lists:zipwith(
fun(Doc, NewRev) ->
- {obj, [{"id", Doc#doc.id}, {"rev", NewRev}]}
+ {[{"id", Doc#doc.id}, {"rev", NewRev}]}
end,
Docs, ResultRevs),
- send_json(Req, 201, {obj, [
+ send_json(Req, 201, {[
{ok, true},
- {new_revs, list_to_tuple(DocResults)}
+ {new_revs, DocResults}
]});
false ->
- Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- tuple_to_list(DocsArray)],
+ Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray],
ok = couch_db:save_docs(Db, Docs, Options),
- send_json(Req, 201, {obj, [
+ send_json(Req, 201, {[
{ok, true}
]})
end;
-handle_db_request(_Req, _Method, {_DbName, _Db, ["_bulk_docs"]}) ->
+handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_bulk_docs">>]}) ->
throw({method_not_allowed, "POST"});
-handle_db_request(Req, 'POST', {_DbName, Db, ["_compact"]}) ->
+handle_db_request(Req, 'POST', {_DbName, Db, [<<"_compact">>]}) ->
ok = couch_db:start_compact(Db),
- send_json(Req, 202, {obj, [
+ send_json(Req, 202, {[
{ok, true}
]});
-handle_db_request(_Req, _Method, {_DbName, _Db, ["_compact"]}) ->
+handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_compact">>]}) ->
throw({method_not_allowed, "POST"});
% View request handlers
-handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) ->
+handle_db_request(Req, 'GET', {_DbName, Db, [<<"_all_docs">>]}) ->
#view_query_args{
start_key = StartKey,
start_docid = StartDocId,
@@ -321,7 +322,7 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) ->
{ok, Info} = couch_db:get_db_info(Db),
TotalRowCount = proplists:get_value(doc_count, Info),
- StartId = if is_list(StartKey) -> StartKey;
+ StartId = if is_binary(StartKey) -> StartKey;
true -> StartDocId
end,
@@ -330,7 +331,7 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) ->
AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) ->
case couch_doc:to_doc_info(FullDocInfo) of
#doc_info{deleted=false, rev=Rev} ->
- FoldlFun({{Id, Id}, {obj, [{rev, Rev}]}}, Offset, Acc);
+ FoldlFun({{Id, Id}, {[{rev, Rev}]}}, Offset, Acc);
#doc_info{deleted=true} ->
{ok, Acc}
end
@@ -339,10 +340,10 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) ->
{Count, SkipCount, undefined, []}),
finish_view_fold(Req, TotalRowCount, {ok, FoldResult});
-handle_db_request(_Req, _Method, {_DbName, _Db, ["_all_docs"]}) ->
+handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_all_docs">>]}) ->
throw({method_not_allowed, "GET,HEAD"});
-handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs_by_seq"]}) ->
+handle_db_request(Req, 'GET', {_DbName, Db, [<<"_all_docs_by_seq">>]}) ->
#view_query_args{
start_key = StartKey,
count = Count,
@@ -370,15 +371,15 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs_by_seq"]}) ->
conflict_revs=ConflictRevs,
deleted_conflict_revs=DelConflictRevs
} = DocInfo,
- Json = {obj,
+ Json = {
[{"rev", Rev}] ++
case ConflictRevs of
[] -> [];
- _ -> [{"conflicts", list_to_tuple(ConflictRevs)}]
+ _ -> [{"conflicts", ConflictRevs}]
end ++
case DelConflictRevs of
[] -> [];
- _ -> [{"deleted_conflicts", list_to_tuple(DelConflictRevs)}]
+ _ -> [{"deleted_conflicts", DelConflictRevs}]
end ++
case Deleted of
true -> [{"deleted", true}];
@@ -392,7 +393,7 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs_by_seq"]}) ->
handle_db_request(_Req, _Method, {_DbName, _Db, ["_all_docs_by_seq"]}) ->
throw({method_not_allowed, "GET,HEAD"});
-handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) ->
+handle_db_request(Req, 'GET', {DbName, _Db, [<<"_view">>, DocId, ViewName]}) ->
#view_query_args{
start_key = StartKey,
count = Count,
@@ -400,8 +401,9 @@ handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) ->
direction = Dir,
start_docid = StartDocId
} = QueryArgs = parse_view_query(Req),
- case couch_view:get_map_view({DbName, "_design/" ++ DocId, ViewName}) of
- {ok, View} ->
+
+ case couch_view:get_map_view({DbName, <<"_design/", DocId/binary>>, ViewName}) of
+ {ok, View} ->
{ok, RowCount} = couch_view:get_row_count(View),
Start = {StartKey, StartDocId},
FoldlFun = make_view_fold_fun(Req, QueryArgs, RowCount,
@@ -410,7 +412,7 @@ handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) ->
FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit),
finish_view_fold(Req, RowCount, FoldResult);
{not_found, Reason} ->
- case couch_view:get_reduce_view({DbName, "_design/" ++ DocId, ViewName}) of
+ case couch_view:get_reduce_view({DbName, <<"_design/", DocId/binary>>, ViewName}) of
{ok, View} ->
output_reduce_view(Req, View);
_ ->
@@ -418,27 +420,25 @@ handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) ->
end
end;
-handle_db_request(_Req, _Method, {_DbName, _Db, ["_view", _DocId, _ViewName]}) ->
+handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_view">>, _DocId, _ViewName]}) ->
throw({method_not_allowed, "GET,HEAD"});
-handle_db_request(Req, 'POST', {_DbName, Db, ["_missing_revs"]}) ->
- {obj, JsonDocIdRevs} = cjson:decode(Req:recv_body()),
- DocIdRevs = [{Id, tuple_to_list(Revs)} || {Id, Revs} <- JsonDocIdRevs],
- {ok, Results} = couch_db:get_missing_revs(Db, DocIdRevs),
- JsonResults = [{Id, list_to_tuple(Revs)} || {Id, Revs} <- Results],
- send_json(Req, {obj, [
- {missing_revs, {obj, JsonResults}}
+handle_db_request(Req, 'POST', {_DbName, Db, [<<"_missing_revs">>]}) ->
+ {JsonDocIdRevs} = ?JSON_DECODE(Req:recv_body()),
+ {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs),
+ send_json(Req, {[
+ {missing_revs, {Results}}
]});
-handle_db_request(Req, 'POST', {_DbName, Db, ["_increment_update_seq"]}) ->
+handle_db_request(Req, 'POST', {_DbName, Db, [<<"_increment_update_seq">>]}) ->
% NOTE, use at own risk. This functionality is experimental
% and might go away entirely.
{ok, NewSeq} = couch_db:increment_update_seq(Db),
- send_json(Req, {obj, [{ok, true},
+ send_json(Req, {[{ok, true},
{update_seq, NewSeq}
]});
-handle_db_request(Req, 'POST', {DbName, _Db, ["_temp_view"]}) ->
+handle_db_request(Req, 'POST', {DbName, _Db, [<<"_temp_view">>]}) ->
#view_query_args{
start_key = StartKey,
count = Count,
@@ -452,18 +452,22 @@ handle_db_request(Req, 'POST', {DbName, _Db, ["_temp_view"]}) ->
"application/json" -> ok;
Else -> throw({incorrect_mime_type, Else})
end,
- {obj, Props} = cjson:decode(Req:recv_body()),
- Language = proplists:get_value("language", Props, "javascript"),
- MapSrc = proplists:get_value("map", Props),
- case proplists:get_value("reduce", Props, null) of
+ {Props} = ?JSON_DECODE(Req:recv_body()),
+ Language = proplists:get_value(<<"language">>, Props, <<"javascript">>),
+ MapSrc = proplists:get_value(<<"map">>, Props),
+ case proplists:get_value(<<"reduce">>, Props, null) of
null ->
{ok, View} = couch_view:get_map_view({temp, DbName, Language, MapSrc}),
Start = {StartKey, StartDocId},
+
{ok, TotalRows} = couch_view:get_row_count(View),
+
FoldlFun = make_view_fold_fun(Req, QueryArgs, TotalRows,
fun couch_view:reduce_to_count/1),
FoldAccInit = {Count, SkipCount, undefined, []},
- FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit),
+ FoldResult = couch_view:fold(View, Start, Dir, fun(A, B, C) ->
+ FoldlFun(A, B, C)
+ end, FoldAccInit),
finish_view_fold(Req, TotalRows, FoldResult);
RedSrc ->
@@ -472,26 +476,22 @@ handle_db_request(Req, 'POST', {DbName, _Db, ["_temp_view"]}) ->
output_reduce_view(Req, View)
end;
-handle_db_request(_Req, _Method, {_DbName, _Db, ["_temp_view"]}) ->
+handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_temp_view">>]}) ->
throw({method_not_allowed, "POST"});
% Document request handlers
-handle_db_request(Req, Method, {DbName, Db, ["_design", DesignName]}) ->
+handle_db_request(Req, Method, {DbName, Db, [<<"_design">>, Name]}) ->
% Special case to enable using an unencoded in the URL of design docs, as
% slashes in document IDs must otherwise be URL encoded
- DocId = mochiweb_util:join(["_design", DesignName], "/"),
- handle_db_request(Req, Method, {DbName, Db, [DocId]});
+ handle_db_request(Req, Method, {DbName, Db, [<<"_design/", Name/binary>>]});
handle_db_request(Req, Method, {DbName, Db, [DocId]}) ->
- UnquotedDocId = mochiweb_util:unquote(DocId),
- handle_doc_request(Req, Method, DbName, Db, UnquotedDocId);
+ handle_doc_request(Req, Method, DbName, Db,DocId);
handle_db_request(Req, Method, {DbName, Db, [DocId, FileName]}) ->
- UnquotedDocId = mochiweb_util:unquote(DocId),
- UnquotedFileName = mochiweb_util:unquote(FileName),
- handle_attachment_request(Req, Method, DbName, Db, UnquotedDocId,
- UnquotedFileName).
+ handle_attachment_request(Req, Method, DbName, Db, DocId,
+ FileName).
output_reduce_view(Req, View) ->
#view_query_args{
@@ -508,8 +508,8 @@ output_reduce_view(Req, View) ->
fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 ->
true;
({Key1,_}, {Key2,_})
- when is_integer(GroupLevel) and is_tuple(Key1) and is_tuple(Key2) ->
- lists:sublist(tuple_to_list(Key1), GroupLevel) == lists:sublist(tuple_to_list(Key2), GroupLevel);
+ when is_integer(GroupLevel) and is_list(Key1) and is_list(Key2) ->
+ lists:sublist(Key1, GroupLevel) == lists:sublist(Key2, GroupLevel);
({Key1,_}, {Key2,_}) ->
Key1 == Key2
end,
@@ -522,52 +522,37 @@ output_reduce_view(Req, View) ->
(_Key, _Red, {AccSeparator,0,AccCount}) when AccCount == 0 ->
{stop, {AccSeparator,0,AccCount}};
(_Key, Red, {AccSeparator,0,AccCount}) when GroupLevel == 0 ->
- Json = lists:flatten(cjson:encode({obj, [{key, null}, {value, Red}]})),
+ Json = ?JSON_ENCODE({[{key, null}, {value, Red}]}),
Resp:write_chunk(AccSeparator ++ Json),
{ok, {",",0,AccCount-1}};
(Key, Red, {AccSeparator,0,AccCount})
- when is_integer(GroupLevel)
- andalso is_tuple(Key)
- andalso element(1, Key) /= obj ->
- Json = lists:flatten(cjson:encode(
- {obj, [{key, list_to_tuple(lists:sublist(tuple_to_list(Key), GroupLevel))},
- {value, Red}]})),
+ when is_integer(GroupLevel)
+ andalso is_list(Key) ->
+ Json = ?JSON_ENCODE(
+ {[{key, lists:sublist(Key, GroupLevel)},{value, Red}]}),
Resp:write_chunk(AccSeparator ++ Json),
{ok, {",",0,AccCount-1}};
(Key, Red, {AccSeparator,0,AccCount}) ->
- Json = lists:flatten(cjson:encode({obj, [{key, Key}, {value, Red}]})),
+ Json = ?JSON_ENCODE({[{key, Key}, {value, Red}]}),
Resp:write_chunk(AccSeparator ++ Json),
{ok, {",",0,AccCount-1}}
end, {"", Skip, Count}),
Resp:write_chunk("]}"),
end_json_response(Resp).
+
handle_doc_request(Req, 'DELETE', _DbName, Db, DocId) ->
- QueryRev = proplists:get_value("rev", Req:parse_qs()),
- Etag = case Req:get_header_value("If-Match") of
- undefined ->
- undefined;
- Tag ->
- string:strip(Tag, both, $")
- end,
- RevToDelete = case {QueryRev, Etag} of
- {undefined, undefined} ->
- throw({missing_rev, "Document rev/etag must be specified to delete"});
- {_, undefined} ->
- QueryRev;
- {undefined, _} ->
- Etag;
- _ when QueryRev == Etag ->
- Etag;
- _ ->
- throw({bad_request, "Document rev and etag have different values"})
- end,
- {ok, NewRev} = couch_db:delete_doc(Db, DocId, [RevToDelete]),
- send_json(Req, 200, {obj, [
- {ok, true},
- {id, DocId},
- {rev, NewRev}
- ]});
+ case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
+ missing_rev ->
+ {missing_rev, "Document rev/etag must be specified to delete"};
+ RevToDelete ->
+ {ok, NewRev} = couch_db:delete_doc(Db, DocId, [RevToDelete]),
+ send_json(Req, 200, {[
+ {ok, true},
+ {id, DocId},
+ {rev, NewRev}
+ ]})
+ end;
handle_doc_request(Req, 'GET', _DbName, Db, DocId) ->
#doc_query_args{
@@ -596,10 +581,10 @@ handle_doc_request(Req, 'GET', _DbName, Db, DocId) ->
case Result of
{ok, Doc} ->
JsonDoc = couch_doc:to_json_obj(Doc, Options),
- Json = lists:flatten(cjson:encode({obj, [{ok, JsonDoc}]})),
+ Json = ?JSON_ENCODE({[{ok, JsonDoc}]}),
Resp:write_chunk(AccSeparator ++ Json);
{{not_found, missing}, RevId} ->
- Json = lists:flatten(cjson:encode({obj, [{"missing", RevId}]})),
+ Json = ?JSON_ENCODE({[{"missing", RevId}]}),
Resp:write_chunk(AccSeparator ++ Json)
end,
"," % AccSeparator now has a comma
@@ -611,16 +596,17 @@ handle_doc_request(Req, 'GET', _DbName, Db, DocId) ->
handle_doc_request(Req, 'POST', _DbName, Db, DocId) ->
Form = mochiweb_multipart:parse_form(Req),
- Rev = proplists:get_value("_rev", Form),
- NewAttachments = [{Name, {ContentType, Content}} ||
- {Name, {ContentType, _}, Content} <-
- proplists:get_all_values("_attachments", Form)],
-
+ Rev = list_to_binary(proplists:get_value("_rev", Form)),
Doc = case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
{ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]};
{ok, [Error]} -> throw(Error)
end,
+ NewAttachments = [
+ {list_to_binary(Name), {list_to_binary(ContentType), Content}} ||
+ {Name, {ContentType, _}, Content} <-
+ proplists:get_all_values("_attachments", Form)
+ ],
#doc{attachments=Attachments} = Doc,
NewDoc = Doc#doc{
attachments = Attachments ++ NewAttachments
@@ -634,87 +620,78 @@ handle_doc_request(Req, 'POST', _DbName, Db, DocId) ->
]});
handle_doc_request(Req, 'PUT', _DbName, Db, DocId) ->
- Json = {obj, DocProps} = cjson:decode(Req:recv_body(?MAX_DOC_SIZE)),
- DocRev = proplists:get_value("_rev", DocProps),
- Etag = case Req:get_header_value("If-Match") of
- undefined ->
- undefined;
- Tag ->
- string:strip(Tag, both, $")
+ Json = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)),
+ Doc = couch_doc:from_json_obj(Json),
+ ExplicitRev =
+ case Doc#doc.revs of
+ [Rev0|_] -> Rev0;
+ [] -> undefined
end,
- Revs = case {DocRev, Etag} of
- {undefined, undefined} ->
- [];
- {_, undefined} ->
- [DocRev];
- {undefined, _} ->
- [Etag];
- _ when DocRev == Etag ->
- [Etag];
- _ ->
- throw({bad_request, "Document rev and etag have different values"})
+ case extract_header_rev(Req, ExplicitRev) of
+ missing_rev ->
+ Revs = [];
+ Rev ->
+ Revs = [Rev]
end,
-
- Doc = couch_doc:from_json_obj(Json),
-
{ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=Revs}, []),
- send_json(Req, 201, [{"Etag", "\"" ++ NewRev ++ "\""}], {obj, [
+ send_json(Req, 201, [{"Etag", <<"\"", NewRev/binary, "\"">>}], {[
{ok, true},
{id, DocId},
{rev, NewRev}
]});
handle_doc_request(Req, 'COPY', _DbName, Db, SourceDocId) ->
- SourceRev = case extract_header_rev(Req) of
- missing_rev -> [];
- Rev -> Rev
+ SourceRev =
+ case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
+ missing_rev -> [];
+ Rev -> Rev
end,
{TargetDocId, TargetRev} = parse_copy_destination_header(Req),
- % open revision Rev or Current
+ % open revision Rev or Current
{Doc, _DocRev} = couch_doc_open(Db, SourceDocId, SourceRev, []),
% save new doc
{ok, NewTargetRev} = couch_db:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRev}, []),
- send_json(Req, 201, [{"Etag", "\"" ++ NewTargetRev ++ "\""}], {obj, [
+ send_json(Req, 201, [{"Etag", "\"" ++ binary_to_list(NewTargetRev) ++ "\""}], {[
{ok, true},
{id, TargetDocId},
{rev, NewTargetRev}
]});
handle_doc_request(Req, 'MOVE', _DbName, Db, SourceDocId) ->
- SourceRev = case extract_header_rev(Req) of
- missing_rev ->
+ SourceRev =
+ case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
+ missing_rev ->
throw({
- bad_request,
- "MOVE requires a specified rev parameter for the origin resource."}
- );
- Rev -> Rev
+ bad_request,
+ "MOVE requires a specified rev parameter for the origin resource."}
+ );
+ Rev -> Rev
end,
{TargetDocId, TargetRev} = parse_copy_destination_header(Req),
-
% open revision Rev or Current
{Doc, _DocRev} = couch_doc_open(Db, SourceDocId, SourceRev, []),
% save new doc & delete old doc in one operation
Docs = [
- Doc#doc{id=TargetDocId, revs=TargetRev},
- #doc{id=SourceDocId, revs=[SourceRev], deleted=true}
- ],
+ Doc#doc{id=TargetDocId, revs=TargetRev},
+ #doc{id=SourceDocId, revs=[SourceRev], deleted=true}
+ ],
{ok, ResultRevs} = couch_db:update_docs(Db, Docs, []),
DocResults = lists:zipwith(
fun(FDoc, NewRev) ->
- {obj, [{"id", FDoc#doc.id}, {"rev", NewRev}]}
+ {[{id, FDoc#doc.id}, {rev, NewRev}]}
end,
Docs, ResultRevs),
- send_json(Req, 201, {obj, [
+ send_json(Req, 201, {[
{ok, true},
- {new_revs, list_to_tuple(DocResults)}
+ {new_revs, DocResults}
]});
handle_doc_request(_Req, _Method, _DbName, _Db, _DocId) ->
@@ -728,19 +705,19 @@ couch_doc_open(Db, DocId, Rev, Options) ->
case Rev of
"" -> % open most recent rev
case couch_db:open_doc(Db, DocId, Options) of
- {ok, #doc{revs=[DocRev|_]}=Doc} ->
- {Doc, DocRev};
- Error ->
- throw(Error)
- end;
- _ -> % open a specific rev (deletions come back as stubs)
- case couch_db:open_doc_revs(Db, DocId, [Rev], Options) of
- {ok, [{ok, Doc}]} ->
- {Doc, Rev};
- {ok, [Else]} ->
- throw(Else)
- end
- end.
+ {ok, #doc{revs=[DocRev|_]}=Doc} ->
+ {Doc, DocRev};
+ Error ->
+ throw(Error)
+ end;
+ _ -> % open a specific rev (deletions come back as stubs)
+ case couch_db:open_doc_revs(Db, DocId, [Rev], Options) of
+ {ok, [{ok, Doc}]} ->
+ {Doc, Rev};
+ {ok, [Else]} ->
+ throw(Else)
+ end
+ end.
% Attachment request handlers
@@ -773,8 +750,6 @@ handle_attachment_request(Req, 'GET', _DbName, Db, DocId, FileName) ->
handle_attachment_request(Req, Method, _DbName, Db, DocId, FileName)
when (Method == 'PUT') or (Method == 'DELETE') ->
- Rev = extract_header_rev(Req),
-
NewAttachment = case Method of
'DELETE' ->
[];
@@ -785,12 +760,12 @@ handle_attachment_request(Req, Method, _DbName, Db, DocId, FileName)
}}]
end,
- Doc = case Rev of
+ Doc = case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
missing_rev -> % make the new doc
#doc{id=DocId};
- _ ->
+ Rev ->
case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
- {ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]};
+ {ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]};
{ok, [Error]} -> throw(Error)
end
end,
@@ -800,7 +775,7 @@ handle_attachment_request(Req, Method, _DbName, Db, DocId, FileName)
attachments = NewAttachment ++ proplists:delete(FileName, Attachments)
},
{ok, UpdatedRev} = couch_db:update_doc(Db, DocEdited, []),
- send_json(Req, case Method of 'DELETE' -> 200; _ -> 201 end, {obj, [
+ send_json(Req, case Method of 'DELETE' -> 200; _ -> 201 end, {[
{ok, true},
{id, DocId},
{rev, UpdatedRev}
@@ -871,8 +846,8 @@ handle_config_request(Req, 'DELETE', {[Section, Option]}) ->
% View request handling internals
-reverse_key_default(nil) -> <<>>;
-reverse_key_default(<<>>) -> nil;
+reverse_key_default(nil) -> {};
+reverse_key_default({}) -> nil;
reverse_key_default(Key) -> Key.
parse_view_query(Req) ->
@@ -882,16 +857,16 @@ parse_view_query(Req) ->
{"", _} ->
Args;
{"key", Value} ->
- JsonKey = cjson:decode(Value),
+ JsonKey = ?JSON_DECODE(Value),
Args#view_query_args{start_key=JsonKey,end_key=JsonKey};
{"startkey_docid", DocId} ->
- Args#view_query_args{start_docid=DocId};
+ Args#view_query_args{start_docid=list_to_binary(DocId)};
{"endkey_docid", DocId} ->
- Args#view_query_args{end_docid=DocId};
+ Args#view_query_args{end_docid=list_to_binary(DocId)};
{"startkey", Value} ->
- Args#view_query_args{start_key=cjson:decode(Value)};
+ Args#view_query_args{start_key=?JSON_DECODE(Value)};
{"endkey", Value} ->
- Args#view_query_args{end_key=cjson:decode(Value)};
+ Args#view_query_args{end_key=?JSON_DECODE(Value)};
{"count", Value} ->
case (catch list_to_integer(Value)) of
Count when is_integer(Count) ->
@@ -966,11 +941,11 @@ make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) ->
case Dir of
fwd ->
fun(ViewKey, ViewId) ->
- couch_view:less_json({EndKey, EndDocId}, {ViewKey, ViewId})
+ couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId])
end;
rev->
fun(ViewKey, ViewId) ->
- couch_view:less_json({ViewKey, ViewId}, {EndKey, EndDocId})
+ couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId])
end
end,
@@ -991,18 +966,19 @@ make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) ->
lists:min([TotalViewCount - Offset, - AccCount]),
JsonBegin = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
[TotalViewCount, Offset2]),
- Resp2:write_chunk(lists:flatten(JsonBegin)),
- JsonObj = {obj, [{id, DocId}, {key, Key}, {value, Value}]},
- {ok, {AccCount + 1, 0, Resp2, [cjson:encode(JsonObj) | AccRevRows]}};
+ Resp2:write_chunk(JsonBegin),
+ JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
+ {ok, {AccCount + 1, 0, Resp2, [?JSON_ENCODE(JsonObj) | AccRevRows]}};
{_, AccCount, _, Resp} ->
- JsonObj = {obj, [{id, DocId}, {key, Key}, {value, Value}]},
- {ok, {AccCount + 1, 0, Resp, [cjson:encode(JsonObj), ",\r\n" | AccRevRows]}}
+
+ JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
+ {ok, {AccCount + 1, 0, Resp, [?JSON_ENCODE(JsonObj), ",\r\n" | AccRevRows]}}
end
end,
PosCountFun = fun({{Key, DocId}, Value}, OffsetReds,
{AccCount, AccSkip, Resp, AccRevRows}) ->
- Offset = ReduceCountFun(OffsetReds),
+ Offset = ReduceCountFun(OffsetReds), % I think we only need this call once per view
PassedEnd = PassedEndFun(Key, DocId),
case {PassedEnd, AccCount, AccSkip, Resp} of
{true, _, _, _} ->
@@ -1017,12 +993,13 @@ make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) ->
Resp2 = start_json_response(Req, 200),
JsonBegin = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
[TotalViewCount, Offset]),
- JsonObj = {obj, [{id, DocId}, {key, Key}, {value, Value}]},
- Resp2:write_chunk(lists:flatten(JsonBegin ++ cjson:encode(JsonObj))),
+ JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
+
+ Resp2:write_chunk(JsonBegin ++ ?JSON_ENCODE(JsonObj)),
{ok, {AccCount - 1, 0, Resp2, AccRevRows}};
{_, AccCount, _, Resp} when (AccCount > 0) ->
- JsonObj = {obj, [{"id", DocId}, {"key", Key}, {"value", Value}]},
- Resp:write_chunk(",\r\n" ++ lists:flatten(cjson:encode(JsonObj))),
+ JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
+ Resp:write_chunk(",\r\n" ++ ?JSON_ENCODE(JsonObj)),
{ok, {AccCount - 1, 0, Resp, AccRevRows}}
end
end,
@@ -1036,13 +1013,13 @@ finish_view_fold(Req, TotalRows, FoldResult) ->
{ok, {_, _, undefined, _}} ->
% nothing found in the view, nothing has been returned
% send empty view
- send_json(Req, 200, {obj, [
+ send_json(Req, 200, {[
{total_rows, TotalRows},
- {rows, {}}
+ {rows, []}
]});
{ok, {_, _, Resp, AccRevRows}} ->
% end the view
- Resp:write_chunk(lists:flatten(AccRevRows) ++ "\r\n]}"),
+ Resp:write_chunk(AccRevRows ++ "\r\n]}"),
end_json_response(Resp);
Error ->
throw(Error)
@@ -1076,8 +1053,8 @@ parse_doc_query(Req) ->
{"open_revs", "all"} ->
Args#doc_query_args{open_revs=all};
{"open_revs", RevsJsonStr} ->
- JsonArray = cjson:decode(RevsJsonStr),
- Args#doc_query_args{open_revs=tuple_to_list(JsonArray)};
+ JsonArray = ?JSON_DECODE(RevsJsonStr),
+ Args#doc_query_args{open_revs=JsonArray};
_Else -> % unknown key value pair, ignore.
Args
end
@@ -1086,7 +1063,7 @@ parse_doc_query(Req) ->
% Utilities
none_match(Req, Tag) ->
- Etag = "\"" ++ Tag ++ "\"",
+ Etag = "\"" ++ binary_to_list(Tag) ++ "\"",
Etags = case Req:get_header_value("If-None-Match") of
undefined ->
[];
@@ -1109,9 +1086,9 @@ error_to_json(Error) ->
_ ->
lists:flatten(io_lib:format("~p", [Reason])) % else term to text
end,
- Json = {obj, [
- {error, atom_to_list(Atom)},
- {reason, FormattedReason}
+ Json = {[
+ {error, Atom},
+ {reason, list_to_binary(FormattedReason)}
]},
{HttpCode, Json}.
@@ -1136,17 +1113,18 @@ error_to_json0({Id, Reason}) when is_atom(Id) ->
error_to_json0(Error) ->
{500, error, Error}.
-extract_header_rev(Req) ->
- QueryRev = proplists:get_value("rev", Req:parse_qs()),
+extract_header_rev(Req, ExplictRev) when is_list(ExplictRev)->
+ extract_header_rev(Req, list_to_binary(ExplictRev));
+extract_header_rev(Req, ExplictRev) ->
Etag = case Req:get_header_value("If-Match") of
undefined -> undefined;
Tag -> string:strip(Tag, both, $")
end,
- case {QueryRev, Etag} of
+ case {ExplictRev, Etag} of
{undefined, undefined} -> missing_rev;
- {_, undefined} -> QueryRev;
- {undefined, _} -> Etag;
- _ when QueryRev == Etag -> Etag;
+ {_, undefined} -> ExplictRev;
+ {undefined, _} -> list_to_binary(Etag);
+ _ when ExplictRev == Etag -> list_to_binary(Etag);
_ ->
throw({bad_request, "Document rev and etag have different values"})
end.
@@ -1154,12 +1132,12 @@ extract_header_rev(Req) ->
parse_copy_destination_header(Req) ->
Destination = Req:get_header_value("Destination"),
case regexp:match(Destination, "\\?") of
- nomatch ->
- {Destination, []};
- {match, _, _} ->
- {ok, [DocId, RevQueryOptions]} = regexp:split(Destination, "\\?"),
- {ok, [_RevQueryKey, Rev]} = regexp:split(RevQueryOptions, "="),
- {DocId, [Rev]}
+ nomatch ->
+ {list_to_binary(Destination), []};
+ {match, _, _} ->
+ {ok, [DocId, RevQueryOptions]} = regexp:split(Destination, "\\?"),
+ {ok, [_RevQueryKey, Rev]} = regexp:split(RevQueryOptions, "="),
+ {list_to_binary(DocId), [list_to_binary(Rev)]}
end.
send_error(Req, {method_not_allowed, Methods}) ->
@@ -1187,7 +1165,7 @@ send_json(Req, Code, Headers, Value) ->
{"Content-Type", negotiate_content_type(Req)},
{"Cache-Control", "must-revalidate"}
] ++ server_header(),
- Body = cjson:encode(Value),
+ Body = ?JSON_ENCODE(Value),
Resp = Req:respond({Code, DefaultHeaders ++ Headers, Body}),
{ok, Resp}.
diff --git a/src/couchdb/couch_query_servers.erl b/src/couchdb/couch_query_servers.erl
index ad207a63..9b1324eb 100644
--- a/src/couchdb/couch_query_servers.erl
+++ b/src/couchdb/couch_query_servers.erl
@@ -18,7 +18,7 @@
-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2,code_change/3,stop/0]).
-export([start_doc_map/2, map_docs/2, stop_doc_map/1]).
-export([reduce/3, rereduce/3]).
--export([test/0]).
+% -export([test/0]).
-include("couch_db.hrl").
@@ -32,7 +32,6 @@ readline(Port) ->
readline(Port, []).
readline(Port, Acc) ->
-
case get(query_server_timeout) of
undefined ->
Timeout = list_to_integer(couch_config:get(
@@ -44,7 +43,7 @@ readline(Port, Acc) ->
{Port, {data, {noeol, Data}}} ->
readline(Port, [Data|Acc]);
{Port, {data, {eol, Data}}} ->
- lists:flatten(lists:reverse(Acc, Data));
+ lists:reverse(Acc, Data);
{Port, Err} ->
catch port_close(Port),
throw({map_process_error, Err})
@@ -54,8 +53,9 @@ readline(Port, Acc) ->
end.
read_json(Port) ->
- case cjson:decode(readline(Port)) of
- {obj, [{"log", Msg}]} when is_list(Msg) ->
+ Line = readline(Port),
+ case ?JSON_DECODE(Line) of
+ {[{<<"log">>,Msg}]} when is_binary(Msg) ->
% we got a message to log. Log it and continue
?LOG_INFO("Query Server Log Message: ~s", [Msg]),
read_json(Port);
@@ -63,17 +63,15 @@ read_json(Port) ->
Else
end.
-writeline(Port, String) ->
- true = port_command(Port, String ++ "\n").
-
% send command and get a response.
prompt(Port, Json) ->
- writeline(Port, cjson:encode(Json)),
+ Bin = iolist_to_binary([?JSON_ENCODE(Json) , "\n"]),
+ true = port_command(Port, Bin),
case read_json(Port) of
- {obj, [{"error", Id}, {"reason", Reason}]} ->
- throw({list_to_atom(Id),Reason});
- {obj, [{"reason", Reason}, {"error", Id}]} ->
- throw({list_to_atom(Id),Reason});
+ {[{<<"error">>, Id}, {<<"reason">>, Reason}]} ->
+ throw({list_to_atom(binary_to_list(Id)),Reason});
+ {[{<<"reason">>, Reason}, {<<"error">>, Id}]} ->
+ throw({list_to_atom(binary_to_list(Id)),Reason});
Result ->
Result
end.
@@ -83,7 +81,7 @@ start_doc_map(Lang, Functions) ->
Port = get_linked_port(Lang),
% send the functions as json strings
lists:foreach(fun(FunctionSource) ->
- true = prompt(Port, {"add_fun", FunctionSource})
+ true = prompt(Port, [<<"add_fun">>, FunctionSource])
end,
Functions),
{ok, {Lang, Port}}.
@@ -93,13 +91,19 @@ map_docs({_Lang, Port}, Docs) ->
Results = lists:map(
fun(Doc) ->
Json = couch_doc:to_json_obj(Doc, []),
- Results = prompt(Port, {"map_doc", Json}),
+
+ FunsResults = prompt(Port, [<<"map_doc">>, Json]),
% the results are a json array of function map yields like this:
- % {FunResults1, FunResults2 ...}
+ % [FunResults1, FunResults2 ...]
% where funresults is are json arrays of key value pairs:
- % {{Key1, Value1}, {Key2, Value2}}
- % Convert to real lists, execept the key, value pairs
- [tuple_to_list(FunResult) || FunResult <- tuple_to_list(Results)]
+ % [[Key1, Value1], [Key2, Value2]]
+ % Convert the key, value pairs to tuples like
+ % [{Key1, Value1}, {Key2, Value2}]
+ lists:map(
+ fun(FunRs) ->
+ [list_to_tuple(FunResult) || FunResult <- FunRs]
+ end,
+ FunsResults)
end,
Docs),
{ok, Results}.
@@ -114,12 +118,13 @@ get_linked_port(Lang) ->
case gen_server:call(couch_query_servers, {get_port, Lang}) of
{ok, Port0} ->
link(Port0),
- true = prompt(Port0, {"reset"}),
+ true = prompt(Port0, [<<"reset">>]),
Port0;
{empty, Cmd} ->
?LOG_INFO("Spawning new ~s instance.", [Lang]),
open_port({spawn, Cmd}, [stream,
{line, 1000},
+ binary,
exit_status,
hide]);
Error ->
@@ -152,8 +157,8 @@ rereduce(Lang, RedSrcs, ReducedValues) ->
Grouped = group_reductions_results(ReducedValues),
Results = lists:zipwith(
fun(FunSrc, Values) ->
- {true, {Result}} =
- prompt(Port, {"rereduce", {FunSrc}, list_to_tuple(Values)}),
+ [true, [Result]] =
+ prompt(Port, [<<"rereduce">>, [FunSrc], Values]),
Result
end, RedSrcs, Grouped),
@@ -164,10 +169,10 @@ reduce(_Lang, [], _KVs) ->
{ok, []};
reduce(Lang, RedSrcs, KVs) ->
Port = get_linked_port(Lang),
- {true, Results} = prompt(Port,
- {"reduce", list_to_tuple(RedSrcs), list_to_tuple(KVs)}),
+ [true, Results] = prompt(Port,
+ [<<"reduce">>, RedSrcs, KVs]),
return_linked_port(Lang, Port),
- {ok, tuple_to_list(Results)}.
+ {ok, Results}.
init([]) ->
@@ -182,18 +187,27 @@ init([]) ->
?MODULE:stop()
end),
- QueryServerList = couch_config:lookup_match(
+ QueryServers = couch_config:lookup_match(
{{"query_servers", '$1'}, '$2'}, []),
+ QueryServers2 =
+ [{list_to_binary(Lang), Path} || {Lang, Path} <- QueryServers],
- {ok, {QueryServerList, []}}.
+ {ok, {QueryServers2, []}}.
terminate(_Reason, _Server) ->
ok.
handle_call({get_port, Lang}, {FromPid, _}, {QueryServerList, LangPorts}) ->
- case lists:keysearch(Lang, 1, LangPorts) of
- {value, {_, Port}=LangPort} ->
+ case proplists:get_value(Lang, LangPorts) of
+ undefined ->
+ case proplists:get_value(Lang, QueryServerList) of
+ undefined -> % not a supported language
+ {reply, {query_language_unknown, Lang}, {QueryServerList, LangPorts}};
+ ServerCmd ->
+ {reply, {empty, ServerCmd}, {QueryServerList, LangPorts}}
+ end;
+ Port ->
Result =
case catch port_connect(Port, FromPid) of
true ->
@@ -203,14 +217,7 @@ handle_call({get_port, Lang}, {FromPid, _}, {QueryServerList, LangPorts}) ->
catch port_close(Port),
Error
end,
- {reply, Result, {QueryServerList, LangPorts -- [LangPort]}};
- false ->
- case lists:keysearch(Lang, 1, QueryServerList) of
- {value, {_, ServerCmd}} ->
- {reply, {empty, ServerCmd}, {QueryServerList, LangPorts}};
- false -> % not a supported language
- {reply, {query_language_unknown, Lang}, {QueryServerList, LangPorts}}
- end
+ {reply, Result, {QueryServerList, LangPorts -- [{Lang,Port}]}}
end;
handle_call({return_port, {Lang, Port}}, _From, {QueryServerList, LangPorts}) ->
case catch port_connect(Port, self()) of
@@ -241,11 +248,11 @@ code_change(_OldVsn, State, _Extra) ->
% test() ->
% test("../js/js -f main.js").
-
-test() ->
- start_link(),
- {ok, DocMap} = start_doc_map("javascript", ["function(doc) {if (doc[0] == 'a') return doc[1];}"]),
- {ok, Results} = map_docs(DocMap, [#doc{body={"a", "b"}}, #doc{body={"c", "d"}},#doc{body={"a", "c"}}]),
- io:format("Results: ~w~n", [Results]),
- stop_doc_map(DocMap),
- ok.
+%
+% test(Cmd) ->
+% start_link(Cmd),
+% {ok, DocMap} = start_doc_map(<<"javascript">>, [<<"function(doc) {if (doc[0] == 'a') return doc[1];}">>]),
+% {ok, Results} = map_docs(DocMap, [#doc{body={"a", "b"}}, #doc{body={"c", "d"}},#doc{body={"a", "c"}}]),
+% io:format("Results: ~w~n", [Results]),
+% stop_doc_map(DocMap),
+% ok.
diff --git a/src/couchdb/couch_rep.erl b/src/couchdb/couch_rep.erl
index f7aaa67c..0e172436 100644
--- a/src/couchdb/couch_rep.erl
+++ b/src/couchdb/couch_rep.erl
@@ -16,6 +16,8 @@
-export([replicate/2, replicate/3]).
+url_encode(Bin) when is_binary(Bin) ->
+ url_encode(binary_to_list(Bin));
url_encode([H|T]) ->
if
H >= $a, $z >= H ->
@@ -56,8 +58,10 @@ replicate(Source, Target, Options) ->
replicate2(Source, DbSrc, Target, DbTgt, Options) ->
{ok, HostName} = inet:gethostname(),
-
- RepRecKey = ?LOCAL_DOC_PREFIX ++ HostName ++ ":" ++ Source ++ ":" ++ Target,
+ HostNameBin = list_to_binary(HostName),
+ RepRecKey = <<?LOCAL_DOC_PREFIX, HostNameBin/binary,
+ ":", Source/binary, ":", Target/binary>>,
+
StartTime = httpd_util:rfc1123_date(),
case proplists:get_value(full, Options, false)
@@ -82,14 +86,14 @@ replicate2(Source, DbSrc, Target, DbTgt, Options) ->
end
end,
- #doc{body={obj,OldRepHistoryProps}} = RepRecSrc,
- #doc{body={obj,OldRepHistoryPropsTrg}} = RepRecTgt,
+ #doc{body={OldRepHistoryProps}} = RepRecSrc,
+ #doc{body={OldRepHistoryPropsTrg}} = RepRecTgt,
SeqNum =
case OldRepHistoryProps == OldRepHistoryPropsTrg of
true ->
% if the records are identical, then we have a valid replication history
- proplists:get_value("source_last_seq", OldRepHistoryProps, 0);
+ proplists:get_value(<<"source_last_seq">>, OldRepHistoryProps, 0);
false ->
?LOG_INFO("Replication records differ. "
"Performing full replication instead of incremental.", []),
@@ -97,26 +101,26 @@ replicate2(Source, DbSrc, Target, DbTgt, Options) ->
0
end,
- {NewSeqNum, Stats} = pull_rep(DbTgt, DbSrc, SeqNum),
+ {NewSeqNum, Stats} = pull_rep(DbTgt, DbSrc, SeqNum),
case NewSeqNum == SeqNum andalso OldRepHistoryProps /= [] of
true ->
% nothing changed, don't record results
- {ok, {obj, OldRepHistoryProps}};
+ {ok, {OldRepHistoryProps}};
false ->
HistEntries =[
- {obj,
- [{"start_time", StartTime},
- {"end_time", httpd_util:rfc1123_date()},
- {"start_last_seq", SeqNum},
- {"end_last_seq", NewSeqNum} | Stats]}
- | tuple_to_list(proplists:get_value("history", OldRepHistoryProps, {}))],
+ {
+ [{start_time, list_to_binary(StartTime)},
+ {end_time, list_to_binary(httpd_util:rfc1123_date())},
+ {start_last_seq, SeqNum},
+ {end_last_seq, NewSeqNum} | Stats]}
+ | proplists:get_value("history", OldRepHistoryProps, [])],
% something changed, record results
NewRepHistory =
- {obj,
- [{"session_id", couch_util:new_uuid()},
- {"source_last_seq", NewSeqNum},
- {"history", list_to_tuple(lists:sublist(HistEntries, 50))}]},
+ {
+ [{session_id, couch_util:new_uuid()},
+ {source_last_seq, NewSeqNum},
+ {history, lists:sublist(HistEntries, 50)}]},
{ok, _} = update_doc(DbSrc, RepRecSrc#doc{body=NewRepHistory}, []),
{ok, _} = update_doc(DbTgt, RepRecTgt#doc{body=NewRepHistory}, []),
@@ -165,6 +169,7 @@ get_missing_revs_loop(DbTarget, OpenDocsPid, RevsChecked, MissingFound) ->
receive
{Src, Id, Revs} ->
Src ! got_it,
+
MissingRevs =
case get_missing_revs(DbTarget, [{Id, Revs}]) of
{ok, [{Id, MissingRevs0}]} ->
@@ -179,8 +184,8 @@ get_missing_revs_loop(DbTarget, OpenDocsPid, RevsChecked, MissingFound) ->
RevsChecked + length(Revs),
MissingFound + length(MissingRevs));
{Src, shutdown} ->
- Src ! {done, self(), [{"missing_checked", RevsChecked},
- {"missing_found", MissingFound}]}
+ Src ! {done, self(), [{missing_checked, RevsChecked},
+ {missing_found, MissingFound}]}
end.
@@ -195,7 +200,7 @@ open_doc_revs_loop(DbSource, SaveDocsPid, DocsRead) ->
SaveDocsPid ! {self(), docs, Docs},
open_doc_revs_loop(DbSource, SaveDocsPid, DocsRead + length(Docs));
{Src, shutdown} ->
- Src ! {done, self(), [{"docs_read", DocsRead}]}
+ Src ! {done, self(), [{docs_read, DocsRead}]}
end.
@@ -207,7 +212,7 @@ save_docs_loop(DbTarget, DocsWritten) ->
ok = save_docs(DbTarget, Docs, []),
save_docs_loop(DbTarget, DocsWritten + length(Docs));
{Src, shutdown} ->
- Src ! {done, self(), [{"docs_written", DocsWritten}]}
+ Src ! {done, self(), [{docs_written, DocsWritten}]}
end.
@@ -224,12 +229,12 @@ do_http_request(Url, Action, JsonBody) ->
[] ->
{Url, []};
_ ->
- {Url, [], "application/json; charset=utf-8", lists:flatten(cjson:encode(JsonBody))}
+ {Url, [], "application/json; charset=utf-8", iolist_to_binary(?JSON_ENCODE(JsonBody))}
end,
{ok, {{_, ResponseCode,_},_Headers, ResponseBody}} = http:request(Action, Request, [], []),
if
ResponseCode >= 200, ResponseCode < 500 ->
- cjson:decode(ResponseBody)
+ ?JSON_DECODE(ResponseBody)
end.
enum_docs0(_InFun, [], Acc) ->
@@ -240,17 +245,25 @@ enum_docs0(InFun, [DocInfo | Rest], Acc) ->
{stop, Acc2} -> Acc2
end.
-open_db("http" ++ DbName)->
- case lists:last(DbName) of
+fix_url(UrlBin) ->
+ Url = binary_to_list(UrlBin),
+ case lists:last(Url) of
$/ ->
- {ok, "http" ++ DbName};
+ {ok, Url};
_ ->
- {ok, "http" ++ DbName ++ "/"}
- end;
+ {ok, Url ++ "/"}
+ end.
+
+open_db(<<"http://", _/binary>>=UrlBin)->
+ fix_url(UrlBin);
+open_db(<<"https://", _/binary>>=UrlBin)->
+ fix_url(UrlBin);
open_db(DbName)->
couch_db:open(DbName, []).
-close_db("http" ++ _)->
+close_db("http://" ++ _)->
+ ok;
+close_db("https://" ++ _)->
ok;
close_db(DbName)->
couch_db:close(DbName).
@@ -258,20 +271,20 @@ close_db(DbName)->
enum_docs_since(DbUrl, StartSeq, InFun, InAcc) when is_list(DbUrl) ->
Url = DbUrl ++ "_all_docs_by_seq?count=100&startkey=" ++ integer_to_list(StartSeq),
- {obj, Results} = do_http_request(Url, get),
+ {Results} = do_http_request(Url, get),
DocInfoList=
- lists:map(fun({obj, RowInfoList}) ->
- {obj, RowValueProps} = proplists:get_value("value", RowInfoList),
+ lists:map(fun({RowInfoList}) ->
+ {RowValueProps} = proplists:get_value(<<"value">>, RowInfoList),
#doc_info{
- id=proplists:get_value("id", RowInfoList),
- rev=proplists:get_value("rev", RowValueProps),
- update_seq = proplists:get_value("key", RowInfoList),
+ id=proplists:get_value(<<"id">>, RowInfoList),
+ rev=proplists:get_value(<<"rev">>, RowValueProps),
+ update_seq = proplists:get_value(<<"key">>, RowInfoList),
conflict_revs =
- tuple_to_list(proplists:get_value("conflicts", RowValueProps, {})),
+ proplists:get_value(<<"conflicts">>, RowValueProps, []),
deleted_conflict_revs =
- tuple_to_list(proplists:get_value("deleted_conflicts", RowValueProps, {})),
- deleted = proplists:get_value("deleted", RowValueProps, false)}
- end, tuple_to_list(proplists:get_value("rows", Results))),
+ proplists:get_value(<<"deleted_conflicts">>, RowValueProps, []),
+ deleted = proplists:get_value(<<"deleted">>, RowValueProps, false)}
+ end, proplists:get_value(<<"rows">>, Results)),
case DocInfoList of
[] ->
{ok, InAcc};
@@ -284,22 +297,18 @@ enum_docs_since(DbSource, StartSeq, Fun, Acc) ->
couch_db:enum_docs_since(DbSource, StartSeq, Fun, Acc).
get_missing_revs(DbUrl, DocIdRevsList) when is_list(DbUrl) ->
- JsonDocIdRevsList = {obj,
- [{Id, list_to_tuple(RevList)} || {Id, RevList} <- DocIdRevsList]},
- {obj, ResponseMembers} =
- do_http_request(DbUrl ++ "_missing_revs",
- post, JsonDocIdRevsList),
- {obj, DocMissingRevsList} = proplists:get_value("missing_revs", ResponseMembers),
- {ok, [{Id, tuple_to_list(MissingRevs)} || {Id, MissingRevs} <- DocMissingRevsList]};
+ {ResponseMembers} = do_http_request(DbUrl ++ "_missing_revs", post, {DocIdRevsList}),
+ {DocMissingRevsList} = proplists:get_value(<<"missing_revs">>, ResponseMembers),
+ {ok, DocMissingRevsList};
get_missing_revs(Db, DocId) ->
couch_db:get_missing_revs(Db, DocId).
update_doc(DbUrl, #doc{id=DocId}=Doc, _Options) when is_list(DbUrl) ->
Url = DbUrl ++ url_encode(DocId),
- {obj, ResponseMembers} =
+ {ResponseMembers} =
do_http_request(Url, put, couch_doc:to_json_obj(Doc, [revs,attachments])),
- RevId = proplists:get_value("_rev", ResponseMembers),
+ RevId = proplists:get_value(<<"_rev">>, ResponseMembers),
{ok, RevId};
update_doc(Db, Doc, Options) ->
couch_db:update_doc(Db, Doc, Options).
@@ -308,9 +317,9 @@ save_docs(_, [], _) ->
ok;
save_docs(DbUrl, Docs, []) when is_list(DbUrl) ->
JsonDocs = [couch_doc:to_json_obj(Doc, [revs,attachments]) || Doc <- Docs],
- {obj, Returned} =
- do_http_request(DbUrl ++ "_bulk_docs", post, {obj, [{new_edits, false}, {docs, list_to_tuple(JsonDocs)}]}),
- true = proplists:get_value("ok", Returned),
+ {Returned} =
+ do_http_request(DbUrl ++ "_bulk_docs", post, {[{new_edits, false}, {docs, JsonDocs}]}),
+ true = proplists:get_value(<<"ok">>, Returned),
ok;
save_docs(Db, Docs, Options) ->
couch_db:save_docs(Db, Docs, Options).
@@ -318,8 +327,8 @@ save_docs(Db, Docs, Options) ->
open_doc(DbUrl, DocId, []) when is_list(DbUrl) ->
case do_http_request(DbUrl ++ url_encode(DocId), get) of
- {obj, [{"error", ErrId}, {"reason", Reason}]} ->
- {list_to_atom(ErrId), Reason};
+ {[{<<"error">>, ErrId}, {<<"reason">>, Reason}]} -> % binaries?
+ {list_to_atom(binary_to_list(ErrId)), Reason};
Doc ->
{ok, couch_doc:from_json_obj(Doc)}
end;
@@ -333,16 +342,16 @@ open_doc_revs(DbUrl, DocId, Revs, Options) when is_list(DbUrl) ->
% latest is only option right now
"latest=true"
end, Options),
- RevsQueryStrs = lists:flatten(cjson:encode(list_to_tuple(Revs))),
- Url = DbUrl ++ DocId ++ "?" ++ couch_util:implode(["revs=true", "attachments=true", "open_revs=" ++ RevsQueryStrs ] ++ QueryOptionStrs, "&"),
+ RevsQueryStrs = lists:flatten(?JSON_ENCODE(Revs)),
+ Url = DbUrl ++ binary_to_list(DocId) ++ "?" ++ couch_util:implode(["revs=true", "attachments=true", "open_revs=" ++ RevsQueryStrs ] ++ QueryOptionStrs, "&"),
JsonResults = do_http_request(Url, get, []),
Results =
lists:map(
- fun({obj, [{"missing", Rev}]}) ->
+ fun({[{<<"missing">>, Rev}]}) ->
{{not_found, missing}, Rev};
- ({obj, [{"ok", JsonDoc}]}) ->
+ ({[{<<"ok">>, JsonDoc}]}) ->
{ok, couch_doc:from_json_obj(JsonDoc)}
- end, tuple_to_list(JsonResults)),
+ end, JsonResults),
{ok, Results};
open_doc_revs(Db, DocId, Revs, Options) ->
couch_db:open_doc_revs(Db, DocId, Revs, Options).
diff --git a/src/couchdb/couch_server.erl b/src/couchdb/couch_server.erl
index b6969f42..c3630f83 100644
--- a/src/couchdb/couch_server.erl
+++ b/src/couchdb/couch_server.erl
@@ -124,7 +124,7 @@ all_databases() ->
[$/ | RelativeFilename] -> ok;
RelativeFilename -> ok
end,
- [filename:rootname(RelativeFilename, ".couch") | AccIn]
+ [list_to_binary(filename:rootname(RelativeFilename, ".couch")) | AccIn]
end, []),
{ok, Filenames}.
@@ -174,9 +174,10 @@ handle_call(get_server, _From, Server) ->
handle_call(get_root, _From, #server{root_dir=Root}=Server) ->
{reply, {ok, Root}, Server};
handle_call({open, DbName, Options}, {FromPid,_}, Server) ->
- case check_dbname(Server, DbName) of
+ DbNameList = binary_to_list(DbName),
+ case check_dbname(Server, DbNameList) of
ok ->
- Filepath = get_full_filename(Server, DbName),
+ Filepath = get_full_filename(Server, DbNameList),
LruTime = now(),
case ets:lookup(couch_dbs_by_name, DbName) of
[] ->
@@ -207,9 +208,10 @@ handle_call({open, DbName, Options}, {FromPid,_}, Server) ->
{reply, Error, Server}
end;
handle_call({create, DbName, Options}, {FromPid,_}, Server) ->
- case check_dbname(Server, DbName) of
+ DbNameList = binary_to_list(DbName),
+ case check_dbname(Server, DbNameList) of
ok ->
- Filepath = get_full_filename(Server, DbName),
+ Filepath = get_full_filename(Server, DbNameList),
case ets:lookup(couch_dbs_by_name, DbName) of
[] ->
@@ -233,9 +235,10 @@ handle_call({create, DbName, Options}, {FromPid,_}, Server) ->
{reply, Error, Server}
end;
handle_call({delete, DbName}, _From, Server) ->
- case check_dbname(Server, DbName) of
+ DbNameList = binary_to_list(DbName),
+ case check_dbname(Server, DbNameList) of
ok ->
- FullFilepath = get_full_filename(Server, DbName),
+ FullFilepath = get_full_filename(Server, DbNameList),
Server2 =
case ets:lookup(couch_dbs_by_name, DbName) of
[] -> Server;
diff --git a/src/couchdb/couch_util.erl b/src/couchdb/couch_util.erl
index 4db10332..0f10c904 100644
--- a/src/couchdb/couch_util.erl
+++ b/src/couchdb/couch_util.erl
@@ -18,6 +18,7 @@
-export([abs_pathname/1,abs_pathname/2, trim/1, ascii_lower/1]).
-export([encodeBase64/1, decodeBase64/1, to_hex/1]).
+-include("couch_db.hrl").
% arbitrarily chosen amount of memory to use before flushing to disk
-define(FLUSH_MAX_MEM, 10000000).
@@ -33,7 +34,7 @@ start_driver(LibDir) ->
end.
new_uuid() ->
- to_hex(crypto:rand_bytes(16)).
+ list_to_binary(to_hex(crypto:rand_bytes(16))).
to_hex([]) ->
[];
@@ -141,27 +142,30 @@ drv_port() ->
put(couch_drv_port, Port),
Port;
Port ->
- Port
+ Port
end.
collate(A, B) ->
collate(A, B, []).
-collate(A, B, Options) when is_list(A), is_list(B) ->
+collate(A, B, Options) when is_binary(A), is_binary(B) ->
Operation =
case lists:member(nocase, Options) of
true -> 1; % Case insensitive
false -> 0 % Case sensitive
end,
- Port = drv_port(),
- LenA = length(A),
- LenB = length(B),
- Bin = list_to_binary([<<LenA:32/native>>, A, <<LenB:32/native>>, B]),
- case erlang:port_control(Port, Operation, Bin) of
- [0] -> -1;
- [1] -> 1;
- [2] -> 0
- end.
+
+ SizeA = size(A),
+ SizeB = size(B),
+ Bin = <<SizeA:32/native, A/binary, SizeB:32/native, B/binary>>,
+ [Result] = erlang:port_control(drv_port(), Operation, Bin),
+ % Result is 0 for lt, 1 for eq and 2 for gt. Subtract 1 to return the
+ % expected typical -1, 0, 1
+ Result - 1;
+
+collate(A, B, _Options) ->
+ io:format("-----A,B:~p,~p~n", [A,B]),
+ throw({error, badtypes}).
should_flush() ->
should_flush(?FLUSH_MAX_MEM).
@@ -187,7 +191,6 @@ should_flush(MemThreshHold) ->
%%% erlang ssl library
-define(st(X,A), ((X-A+256) div 256)).
--define(CHARS, 64).
%% A PEM encoding consists of characters A-Z, a-z, 0-9, +, / and
%% =. Each character encodes a 6 bits value from 0 to 63 (A = 0, / =
@@ -195,42 +198,47 @@ should_flush(MemThreshHold) ->
%%
%%
-%% encode64(Bytes|Binary) -> Chars
+%% encode64(Bytes|Binary) -> binary
%%
%% Take 3 bytes a time (3 x 8 = 24 bits), and make 4 characters out of
%% them (4 x 6 = 24 bits).
%%
encodeBase64(Bs) when list(Bs) ->
- encodeBase64(list_to_binary(Bs));
-encodeBase64(<<B:3/binary, Bs/binary>>) ->
+ encodeBase64(list_to_binary(Bs), <<>>);
+encodeBase64(Bs) ->
+ encodeBase64(Bs, <<>>).
+
+encodeBase64(<<B:3/binary, Bs/binary>>, Acc) ->
<<C1:6, C2:6, C3:6, C4:6>> = B,
- [enc(C1), enc(C2), enc(C3), enc(C4)| encodeBase64(Bs)];
-encodeBase64(<<B:2/binary>>) ->
+ encodeBase64(Bs, <<Acc/binary, (enc(C1)), (enc(C2)), (enc(C3)), (enc(C4))>>);
+encodeBase64(<<B:2/binary>>, Acc) ->
<<C1:6, C2:6, C3:6, _:6>> = <<B/binary, 0>>,
- [enc(C1), enc(C2), enc(C3), $=];
-encodeBase64(<<B:1/binary>>) ->
+ <<Acc/binary, (enc(C1)), (enc(C2)), (enc(C3)), $=>>;
+encodeBase64(<<B:1/binary>>, Acc) ->
<<C1:6, C2:6, _:12>> = <<B/binary, 0, 0>>,
- [enc(C1), enc(C2), $=, $=];
-encodeBase64(<<>>) ->
- [].
+ <<Acc/binary, (enc(C1)), (enc(C2)), $=, $=>>;
+encodeBase64(<<>>, Acc) ->
+ Acc.
%%
-%% decodeBase64(Chars) -> Binary
+%% decodeBase64(BinaryChars) -> Binary
%%
+decodeBase64(Cs) when is_list(Cs)->
+ decodeBase64(list_to_binary(Cs));
decodeBase64(Cs) ->
- list_to_binary(decode1(Cs)).
+ decode1(Cs, <<>>).
-decode1([C1, C2, $=, $=]) ->
+decode1(<<C1, C2, $=, $=>>, Acc) ->
<<B1, _:16>> = <<(dec(C1)):6, (dec(C2)):6, 0:12>>,
- [B1];
-decode1([C1, C2, C3, $=]) ->
+ <<Acc/binary, B1>>;
+decode1(<<C1, C2, C3, $=>>, Acc) ->
<<B1, B2, _:8>> = <<(dec(C1)):6, (dec(C2)):6, (dec(C3)):6, (dec(0)):6>>,
- [B1, B2];
-decode1([C1, C2, C3, C4| Cs]) ->
- Bin = <<(dec(C1)):6, (dec(C2)):6, (dec(C3)):6, (dec(C4)):6>>,
- [Bin| decode1(Cs)];
-decode1([]) ->
- [].
+ <<Acc/binary, B1, B2>>;
+decode1(<<C1, C2, C3, C4, Cs/binary>>, Acc) ->
+ Bin = <<Acc/binary, (dec(C1)):6, (dec(C2)):6, (dec(C3)):6, (dec(C4)):6>>,
+ decode1(Cs, Bin);
+decode1(<<>>, Acc) ->
+ Acc.
%% enc/1 and dec/1
%%
diff --git a/src/couchdb/couch_view.erl b/src/couchdb/couch_view.erl
index 938084df..5b3105f1 100644
--- a/src/couchdb/couch_view.erl
+++ b/src/couchdb/couch_view.erl
@@ -94,6 +94,13 @@ get_reduce_view0(Name, Lang, [#view{reduce_funs=RedFuns}=View|Rest]) ->
N -> {ok, {reduce, N, Lang, View}}
end.
+detuple_kvs([], Acc) ->
+ lists:reverse(Acc);
+detuple_kvs([KV | Rest], Acc) ->
+ {{Key,Id},Value} = KV,
+ NKV = [[Key, Id], Value],
+ detuple_kvs(Rest, [NKV | Acc]).
+
expand_dups([], Acc) ->
lists:reverse(Acc);
expand_dups([{Key, {dups, Vals}} | Rest], Acc) ->
@@ -111,13 +118,13 @@ fold_reduce({temp_reduce, #view{btree=Bt}}, Dir, StartKey, EndKey, GroupFun, Fun
couch_btree:fold_reduce(Bt, Dir, StartKey, EndKey, GroupFun,
WrapperFun, Acc);
-fold_reduce({reduce, NthRed, Lang, #view{btree=Bt, reduce_funs=RedFuns}}, Dir, StartKey, EndKey, GroupFun, Fun, Acc) ->
+fold_reduce({reduce, NthRed, Lang, #view{btree=Bt, reduce_funs=RedFuns}}, Dir, StartKey, EndKey, GroupFun, Fun, Acc) ->
PreResultPadding = lists:duplicate(NthRed - 1, []),
PostResultPadding = lists:duplicate(length(RedFuns) - NthRed, []),
{_Name, FunSrc} = lists:nth(NthRed,RedFuns),
ReduceFun =
fun(reduce, KVs) ->
- {ok, Reduced} = couch_query_servers:reduce(Lang, [FunSrc], expand_dups(KVs, [])),
+ {ok, Reduced} = couch_query_servers:reduce(Lang, [FunSrc], detuple_kvs(expand_dups(KVs, []),[])),
{0, PreResultPadding ++ Reduced ++ PostResultPadding};
(rereduce, Reds) ->
UserReds = [[lists:nth(NthRed, UserRedsList)] || {_, UserRedsList} <- Reds],
@@ -167,16 +174,16 @@ reduce_to_count(Reductions) ->
Count.
-design_doc_to_view_group(#doc{id=Id,body={obj, Fields}}) ->
- Language = proplists:get_value("language", Fields, "javascript"),
- {obj, RawViews} = proplists:get_value("views", Fields, {obj, []}),
+design_doc_to_view_group(#doc{id=Id,body={Fields}}) ->
+ Language = proplists:get_value(<<"language">>, Fields, <<"javascript">>),
+ {RawViews} = proplists:get_value(<<"views">>, Fields, {[]}),
% add the views to a dictionary object, with the map source as the key
DictBySrc =
lists:foldl(
- fun({Name, {obj, MRFuns}}, DictBySrcAcc) ->
- MapSrc = proplists:get_value("map", MRFuns),
- RedSrc = proplists:get_value("reduce", MRFuns, null),
+ fun({Name, {MRFuns}}, DictBySrcAcc) ->
+ MapSrc = proplists:get_value(<<"map">>, MRFuns),
+ RedSrc = proplists:get_value(<<"reduce">>, MRFuns, null),
View =
case dict:find(MapSrc, DictBySrcAcc) of
{ok, View0} -> View0;
@@ -248,15 +255,15 @@ terminate(_Reason,_State) ->
ok.
-handle_call({start_temp_updater, DbName, Lang, MapSrc, RedSrc}, _From, #server{root_dir=Root}=Server) ->
- <<SigInt:128/integer>> = erlang:md5(Lang ++ [0] ++ MapSrc ++ [0] ++ RedSrc),
+handle_call({start_temp_updater, DbName, Lang, MapSrc, RedSrc}, _From, #server{root_dir=Root}=Server) ->
+ <<SigInt:128/integer>> = erlang:md5(term_to_binary({Lang, MapSrc, RedSrc})),
Name = lists:flatten(io_lib:format("_temp_~.36B",[SigInt])),
Pid =
case ets:lookup(couch_views_by_name, {DbName, Name}) of
[] ->
case ets:lookup(couch_views_temp_fd_by_db, DbName) of
[] ->
- FileName = Root ++ "/." ++ DbName ++ "_temp",
+ FileName = Root ++ "/." ++ binary_to_list(DbName) ++ "_temp",
{ok, Fd} = couch_file:open(FileName, [create, overwrite]),
Count = 0;
[{_, Fd, Count}] ->
@@ -298,7 +305,7 @@ handle_cast({reset_indexes, DbName}, #server{root_dir=Root}=Server) ->
end
end, Names),
delete_index_dir(Root, DbName),
- file:delete(Root ++ "/." ++ DbName ++ "_temp"),
+ file:delete(Root ++ "/." ++ binary_to_list(DbName) ++ "_temp"),
{noreply, Server}.
handle_info({'EXIT', _FromPid, normal}, Server) ->
@@ -314,7 +321,7 @@ handle_info({'EXIT', FromPid, Reason}, #server{root_dir=RootDir}=Server) ->
case Count of
1 -> % Last ref
couch_file:close(Fd),
- file:delete(RootDir ++ "/." ++ DbName ++ "_temp"),
+ file:delete(RootDir ++ "/." ++ binary_to_list(DbName) ++ "_temp"),
true = ets:delete(couch_views_temp_fd_by_db, DbName);
_ ->
true = ets:insert(couch_views_temp_fd_by_db, {DbName, Fd, Count - 1})
@@ -398,7 +405,8 @@ start_update_loop(RootDir, DbName, GroupId, NotifyPids) ->
delete_index_file(RootDir, DbName, GroupId),
exit(Else)
end,
- FileName = RootDir ++ "/." ++ DbName ++ GroupId ++".view",
+ FileName = RootDir ++ "/." ++ binary_to_list(DbName) ++
+ binary_to_list(GroupId) ++".view",
Group2 =
case couch_file:open(FileName) of
{ok, Fd} ->
@@ -481,7 +489,6 @@ update_group(#group{db=Db,current_seq=CurrentSeq, views=Views}=Group) ->
fun(DocInfo, _, Acc) -> process_doc(Db, DocInfo, Acc) end,
{[], Group, ViewEmptyKVs, [], CurrentSeq}
),
-
{Group3, Results} = view_compute(Group2, UncomputedDocs),
{ViewKVsToAdd2, DocIdViewIdKeys2} = view_insert_query_results(UncomputedDocs, Results, ViewKVsToAdd, DocIdViewIdKeys),
couch_query_servers:stop_doc_map(Group3#group.query_server),
@@ -493,7 +500,7 @@ update_group(#group{db=Db,current_seq=CurrentSeq, views=Views}=Group) ->
end.
delete_index_dir(RootDir, DbName) ->
- nuke_dir(RootDir ++ "/." ++ DbName ++ "_design").
+ nuke_dir(RootDir ++ "/." ++ binary_to_list(DbName) ++ "_design").
nuke_dir(Dir) ->
case file:list_dir(Dir) of
@@ -513,7 +520,8 @@ nuke_dir(Dir) ->
end.
delete_index_file(RootDir, DbName, GroupId) ->
- file:delete(RootDir ++ "/." ++ DbName ++ GroupId ++ ".view").
+ file:delete(RootDir ++ "/." ++ binary_to_list(DbName)
+ ++ binary_to_list(GroupId) ++ ".view").
init_group(Db, Fd, #group{views=Views}=Group, nil = _IndexHeaderData) ->
init_group(Db, Fd, Group, {0, nil, [nil || _ <- Views]});
@@ -526,8 +534,9 @@ init_group(Db, Fd, #group{def_lang=Lang,views=Views}=Group,
ReduceFun =
fun(reduce, KVs) ->
KVs2 = expand_dups(KVs,[]),
- {ok, Reduced} = couch_query_servers:reduce(Lang, FunSrcs, KVs2),
- {length(KVs2), Reduced};
+ KVs3 = detuple_kvs(KVs2,[]),
+ {ok, Reduced} = couch_query_servers:reduce(Lang, FunSrcs, KVs3),
+ {length(KVs3), Reduced};
(rereduce, Reds) ->
Count = lists:sum([Count0 || {Count0, _} <- Reds]),
UserReds = [UserRedsList || {_, UserRedsList} <- Reds],
@@ -535,7 +544,7 @@ init_group(Db, Fd, #group{def_lang=Lang,views=Views}=Group,
{Count, Reduced}
end,
{ok, Btree} = couch_btree:open(BtreeState, Fd,
- [{less, fun less_json/2},{reduce, ReduceFun}]),
+ [{less, fun less_json_keys/2},{reduce, ReduceFun}]),
View#view{btree=Btree}
end,
ViewStates, Views),
@@ -546,14 +555,17 @@ get_index_header_data(#group{current_seq=Seq,id_btree=IdBtree,views=Views}) ->
ViewStates = [couch_btree:get_state(Btree) || #view{btree=Btree} <- Views],
{Seq, couch_btree:get_state(IdBtree), ViewStates}.
-
+% keys come back in the language of btree - tuples.
+less_json_keys(A, B) ->
+ less_json(tuple_to_list(A), tuple_to_list(B)).
less_json(A, B) ->
TypeA = type_sort(A),
TypeB = type_sort(B),
if
TypeA == TypeB ->
- less_same_type(A,B);
+ Less = less_same_type(A,B),
+ Less;
true ->
TypeA < TypeB
end.
@@ -561,36 +573,35 @@ less_json(A, B) ->
type_sort(V) when is_atom(V) -> 0;
type_sort(V) when is_integer(V) -> 1;
type_sort(V) when is_float(V) -> 1;
-type_sort(V) when is_list(V) -> 2;
-type_sort({obj, _}) -> 4; % must come before tuple test below
-type_sort(V) when is_tuple(V) -> 3;
-type_sort(V) when is_binary(V) -> 5.
+type_sort(V) when is_binary(V) -> 2;
+type_sort(V) when is_list(V) -> 3;
+type_sort({V}) when is_list(V) -> 4; % must come before tuple test below
+type_sort(V) when is_tuple(V) -> 5.
+
atom_sort(nil) -> 0;
atom_sort(null) -> 1;
atom_sort(false) -> 2;
atom_sort(true) -> 3.
+
less_same_type(A,B) when is_atom(A) ->
- atom_sort(A) < atom_sort(B);
-less_same_type(A,B) when is_list(A) ->
- couch_util:collate(A, B) < 0;
-less_same_type({obj, AProps}, {obj, BProps}) ->
- less_props(AProps, BProps);
-less_same_type(A, B) when is_tuple(A) ->
- less_list(tuple_to_list(A),tuple_to_list(B));
+ atom_sort(A) < atom_sort(B);
+less_same_type(A,B) when is_binary(A) ->
+ couch_util:collate(A, B) < 0;
+less_same_type({AProps}, {BProps}) ->
+ less_props(AProps, BProps);
+less_same_type(A, B) when is_list(A) ->
+ less_list(A, B);
less_same_type(A, B) ->
A < B.
-
-ensure_list(V) when is_list(V) -> V;
-ensure_list(V) when is_atom(V) -> atom_to_list(V).
-
+
less_props([], [_|_]) ->
true;
less_props(_, []) ->
false;
less_props([{AKey, AValue}|RestA], [{BKey, BValue}|RestB]) ->
- case couch_util:collate(ensure_list(AKey), ensure_list(BKey)) of
+ case couch_util:collate(AKey, BKey) of
-1 -> true;
1 -> false;
0 ->
@@ -639,7 +650,7 @@ process_doc(Db, DocInfo, {Docs, #group{sig=Sig,name=GroupId}=Group, ViewKVs, Doc
{not_found, deleted} ->
throw(restart)
end;
- ?DESIGN_DOC_PREFIX ++ _ -> % we skip design docs
+ <<?DESIGN_DOC_PREFIX, _>> -> % we skip design docs
{ok, {Docs, Group, ViewKVs, DocIdViewIdKeys, Seq}};
_ ->
{Docs2, DocIdViewIdKeys2} =
@@ -708,7 +719,6 @@ view_compute(#group{def_lang=DefLang, query_server=QueryServerIn}=Group, Docs) -
{ok, QueryServerIn}
end,
{ok, Results} = couch_query_servers:map_docs(QueryServer, Docs),
-
{Group#group{query_server=QueryServer}, Results}.
@@ -726,7 +736,6 @@ write_changes(Group, ViewKeyValuesToAdd, DocIdViewIdKeys, NewSeq) ->
AddDocIdViewIdKeys = [{DocId, ViewIdKeys} || {DocId, ViewIdKeys} <- DocIdViewIdKeys, ViewIdKeys /= []],
RemoveDocIds = [DocId || {DocId, ViewIdKeys} <- DocIdViewIdKeys, ViewIdKeys == []],
LookupDocIds = [DocId || {DocId, _ViewIdKeys} <- DocIdViewIdKeys],
-
{ok, LookupResults, IdBtree2}
= couch_btree:query_modify(IdBtree, LookupDocIds, AddDocIdViewIdKeys, RemoveDocIds),
KeysToRemoveByView = lists:foldl(
diff --git a/src/mochiweb/mochijson2.erl b/src/mochiweb/mochijson2.erl
index 592b4790..6fa7499c 100644
--- a/src/mochiweb/mochijson2.erl
+++ b/src/mochiweb/mochijson2.erl
@@ -104,7 +104,7 @@ json_encode(S, State) when is_binary(S); is_atom(S) ->
json_encode_string(S, State);
json_encode(Array, State) when is_list(Array) ->
json_encode_array(Array, State);
-json_encode({struct, Props}, State) when is_list(Props) ->
+json_encode({Props}, State) when is_list(Props) ->
json_encode_proplist(Props, State);
json_encode(Bad, #encoder{handler=null}) ->
exit({json_encode, {bad_term, Bad}});
@@ -220,7 +220,7 @@ decode_object(B, S) ->
decode_object(B, S=#decoder{state=key}, Acc) ->
case tokenize(B, S) of
{end_object, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
+ V = make_object({lists:reverse(Acc)}, S1),
{V, S1#decoder{state=null}};
{{const, K}, S1} ->
{colon, S2} = tokenize(B, S1),
@@ -230,7 +230,7 @@ decode_object(B, S=#decoder{state=key}, Acc) ->
decode_object(B, S=#decoder{state=comma}, Acc) ->
case tokenize(B, S) of
{end_object, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
+ V = make_object({lists:reverse(Acc)}, S1),
{V, S1#decoder{state=null}};
{comma, S1} ->
decode_object(B, S1#decoder{state=key}, Acc)
@@ -396,9 +396,9 @@ tokenize(B, S=#decoder{offset=O}) ->
%% Create an object from a list of Key/Value pairs.
obj_new() ->
- {struct, []}.
+ {[]}.
-is_obj({struct, Props}) ->
+is_obj({Props}) ->
F = fun ({K, _}) when is_binary(K) ->
true;
(_) ->
@@ -407,7 +407,7 @@ is_obj({struct, Props}) ->
lists:all(F, Props).
obj_from_list(Props) ->
- Obj = {struct, Props},
+ Obj = {Props},
case is_obj(Obj) of
true -> Obj;
false -> exit({json_bad_object, Obj})
@@ -418,7 +418,7 @@ obj_from_list(Props) ->
%% compare unequal as erlang terms, so we need to carefully recurse
%% through aggregates (tuples and objects).
-equiv({struct, Props1}, {struct, Props2}) ->
+equiv({Props1}, {Props2}) ->
equiv_object(Props1, Props2);
equiv(L1, L2) when is_list(L1), is_list(L2) ->
equiv_list(L1, L2);