diff options
Diffstat (limited to 'src/couchdb/couch_httpd.erl')
-rw-r--r-- | src/couchdb/couch_httpd.erl | 416 |
1 files changed, 197 insertions, 219 deletions
diff --git a/src/couchdb/couch_httpd.erl b/src/couchdb/couch_httpd.erl index cd5c6a70..048445a9 100644 --- a/src/couchdb/couch_httpd.erl +++ b/src/couchdb/couch_httpd.erl @@ -26,14 +26,14 @@ -record(view_query_args, { start_key = nil, - end_key = <<>>, + end_key = {}, count = 10000000000, % a huge huge default number. Picked so we don't have % to do different logic for when there is no count % limit update = true, direction = fwd, start_docid = nil, - end_docid = <<>>, + end_docid = {}, skip = 0, group_level = 0 }). @@ -91,7 +91,7 @@ handle_request(Req, DocumentRoot) -> {Path, _, _} = mochiweb_util:urlsplit_path(Req:get(raw_path)), ?LOG_DEBUG("~p ~s ~p~nHeaders: ~p", [ - Method, + Req:get(method), Path, Req:get(version), mochiweb_headers:to_list(Req:get(headers)) @@ -148,9 +148,9 @@ handle_request0(Req, DocumentRoot, Method, Path) -> % Global request handlers handle_welcome_request(Req, 'GET') -> - send_json(Req, {obj, [ - {"couchdb", "Welcome"}, - {"version", couch_server:get_version()} + send_json(Req, {[ + {couchdb, <<"Welcome">>}, + {version, list_to_binary(couch_server:get_version())} ]}); handle_welcome_request(_Req, _Method) -> @@ -158,24 +158,24 @@ handle_welcome_request(_Req, _Method) -> handle_all_dbs_request(Req, 'GET') -> {ok, DbNames} = couch_server:all_databases(), - send_json(Req, list_to_tuple(DbNames)); + send_json(Req, DbNames); handle_all_dbs_request(_Req, _Method) -> throw({method_not_allowed, "GET,HEAD"}). handle_replicate_request(Req, 'POST') -> - {obj, Props} = cjson:decode(Req:recv_body()), - Source = proplists:get_value("source", Props), - Target = proplists:get_value("target", Props), - {obj, Options} = proplists:get_value("options", Props, {obj, []}), - {ok, {obj, JsonResults}} = couch_rep:replicate(Source, Target, Options), - send_json(Req, {obj, [{ok, true} | JsonResults]}); + {Props} = ?JSON_DECODE(Req:recv_body()), + Source = proplists:get_value(<<"source">>, Props), + Target = proplists:get_value(<<"target">>, Props), + {Options} = proplists:get_value(<<"options">>, Props, {[]}), + {ok, {JsonResults}} = couch_rep:replicate(Source, Target, Options), + send_json(Req, {[{ok, true} | JsonResults]}); handle_replicate_request(_Req, _Method) -> throw({method_not_allowed, "POST"}). handle_restart_request(Req, 'POST') -> - Response = send_json(Req, {obj, [{ok, true}]}), + Response = send_json(Req, {[{ok, true}]}), spawn(fun() -> couch_server:remote_restart() end), Response; @@ -187,7 +187,7 @@ handle_uuids_request(Req, 'POST') -> % generate the uuids UUIDs = [ couch_util:new_uuid() || _ <- lists:seq(1,Count)], % send a JSON response - send_json(Req, {obj, [{"uuids", list_to_tuple(UUIDs)}]}); + send_json(Req, {[{"uuids", UUIDs}]}); handle_uuids_request(_Req, _Method) -> throw({method_not_allowed, "POST"}). @@ -197,14 +197,15 @@ handle_uuids_request(_Req, _Method) -> handle_db_request(Req, Method, {Path}) -> UriParts = string:tokens(Path, "/"), - [DbName|Rest] = UriParts, - handle_db_request(Req, Method, {mochiweb_util:unquote(DbName), Rest}); + [DbName|Rest] = + [list_to_binary(mochiweb_util:unquote(Part)) || Part <- UriParts], + handle_db_request(Req, Method, {DbName, Rest}); handle_db_request(Req, 'PUT', {DbName, []}) -> case couch_server:create(DbName, []) of {ok, Db} -> couch_db:close(Db), - send_json(Req, 201, {obj, [{ok, true}]}); + send_json(Req, 201, {[{ok, true}]}); {error, database_already_exists} -> Msg = io_lib:format("Database ~p already exists.", [DbName]), throw({database_already_exists, Msg}); @@ -216,7 +217,7 @@ handle_db_request(Req, 'PUT', {DbName, []}) -> handle_db_request(Req, 'DELETE', {DbName, []}) -> case couch_server:delete(DbName) of ok -> - send_json(Req, 200, {obj, [ + send_json(Req, 200, {[ {ok, true} ]}); Error -> @@ -237,15 +238,15 @@ handle_db_request(Req, Method, {DbName, Rest}) -> handle_db_request(Req, 'GET', {DbName, Db, []}) -> {ok, DbInfo} = couch_db:get_db_info(Db), - send_json(Req, {obj, [{db_name, DbName} | DbInfo]}); + send_json(Req, {[{db_name, DbName} | DbInfo]}); handle_db_request(Req, 'POST', {_DbName, Db, []}) -> % TODO: Etag handling - Json = cjson:decode(Req:recv_body(?MAX_DOC_SIZE)), + Json = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)), Doc = couch_doc:from_json_obj(Json), DocId = couch_util:new_uuid(), {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=[]}, []), - send_json(Req, 201, {obj, [ + send_json(Req, 201, {[ {ok, true}, {id, DocId}, {rev, NewRev} @@ -254,63 +255,63 @@ handle_db_request(Req, 'POST', {_DbName, Db, []}) -> handle_db_request(_Req, _Method, {_DbName, _Db, []}) -> throw({method_not_allowed, "DELETE,GET,HEAD,POST"}); -handle_db_request(Req, 'POST', {_DbName, Db, ["_bulk_docs"]}) -> +handle_db_request(Req, 'POST', {_DbName, Db, [<<"_bulk_docs">>]}) -> Options = [], % put options here. - {obj, JsonProps} = cjson:decode(Req:recv_body(?MAX_DOC_SIZE)), - DocsArray = proplists:get_value("docs", JsonProps), + {JsonProps} = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)), + DocsArray = proplists:get_value(<<"docs">>, JsonProps), % convert all the doc elements to native docs - case proplists:get_value("new_edits", JsonProps, true) of + case proplists:get_value(<<"new_edits">>, JsonProps, true) of true -> Docs = lists:map( - fun({obj, ObjProps} = JsonObj) -> + fun({ObjProps} = JsonObj) -> Doc = couch_doc:from_json_obj(JsonObj), Id = case Doc#doc.id of - "" -> couch_util:new_uuid(); + <<>> -> couch_util:new_uuid(); Id0 -> Id0 end, - Revs = case proplists:get_value("_rev", ObjProps) of + Revs = case proplists:get_value(<<"_rev">>, ObjProps) of undefined -> []; Rev -> [Rev] end, Doc#doc{id=Id,revs=Revs} end, - tuple_to_list(DocsArray)), + DocsArray), {ok, ResultRevs} = couch_db:update_docs(Db, Docs, Options), % output the results DocResults = lists:zipwith( fun(Doc, NewRev) -> - {obj, [{"id", Doc#doc.id}, {"rev", NewRev}]} + {[{"id", Doc#doc.id}, {"rev", NewRev}]} end, Docs, ResultRevs), - send_json(Req, 201, {obj, [ + send_json(Req, 201, {[ {ok, true}, - {new_revs, list_to_tuple(DocResults)} + {new_revs, DocResults} ]}); false -> - Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- tuple_to_list(DocsArray)], + Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray], ok = couch_db:save_docs(Db, Docs, Options), - send_json(Req, 201, {obj, [ + send_json(Req, 201, {[ {ok, true} ]}) end; -handle_db_request(_Req, _Method, {_DbName, _Db, ["_bulk_docs"]}) -> +handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_bulk_docs">>]}) -> throw({method_not_allowed, "POST"}); -handle_db_request(Req, 'POST', {_DbName, Db, ["_compact"]}) -> +handle_db_request(Req, 'POST', {_DbName, Db, [<<"_compact">>]}) -> ok = couch_db:start_compact(Db), - send_json(Req, 202, {obj, [ + send_json(Req, 202, {[ {ok, true} ]}); -handle_db_request(_Req, _Method, {_DbName, _Db, ["_compact"]}) -> +handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_compact">>]}) -> throw({method_not_allowed, "POST"}); % View request handlers -handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) -> +handle_db_request(Req, 'GET', {_DbName, Db, [<<"_all_docs">>]}) -> #view_query_args{ start_key = StartKey, start_docid = StartDocId, @@ -321,7 +322,7 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) -> {ok, Info} = couch_db:get_db_info(Db), TotalRowCount = proplists:get_value(doc_count, Info), - StartId = if is_list(StartKey) -> StartKey; + StartId = if is_binary(StartKey) -> StartKey; true -> StartDocId end, @@ -330,7 +331,7 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) -> AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) -> case couch_doc:to_doc_info(FullDocInfo) of #doc_info{deleted=false, rev=Rev} -> - FoldlFun({{Id, Id}, {obj, [{rev, Rev}]}}, Offset, Acc); + FoldlFun({{Id, Id}, {[{rev, Rev}]}}, Offset, Acc); #doc_info{deleted=true} -> {ok, Acc} end @@ -339,10 +340,10 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs"]}) -> {Count, SkipCount, undefined, []}), finish_view_fold(Req, TotalRowCount, {ok, FoldResult}); -handle_db_request(_Req, _Method, {_DbName, _Db, ["_all_docs"]}) -> +handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_all_docs">>]}) -> throw({method_not_allowed, "GET,HEAD"}); -handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs_by_seq"]}) -> +handle_db_request(Req, 'GET', {_DbName, Db, [<<"_all_docs_by_seq">>]}) -> #view_query_args{ start_key = StartKey, count = Count, @@ -370,15 +371,15 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs_by_seq"]}) -> conflict_revs=ConflictRevs, deleted_conflict_revs=DelConflictRevs } = DocInfo, - Json = {obj, + Json = { [{"rev", Rev}] ++ case ConflictRevs of [] -> []; - _ -> [{"conflicts", list_to_tuple(ConflictRevs)}] + _ -> [{"conflicts", ConflictRevs}] end ++ case DelConflictRevs of [] -> []; - _ -> [{"deleted_conflicts", list_to_tuple(DelConflictRevs)}] + _ -> [{"deleted_conflicts", DelConflictRevs}] end ++ case Deleted of true -> [{"deleted", true}]; @@ -392,7 +393,7 @@ handle_db_request(Req, 'GET', {_DbName, Db, ["_all_docs_by_seq"]}) -> handle_db_request(_Req, _Method, {_DbName, _Db, ["_all_docs_by_seq"]}) -> throw({method_not_allowed, "GET,HEAD"}); -handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) -> +handle_db_request(Req, 'GET', {DbName, _Db, [<<"_view">>, DocId, ViewName]}) -> #view_query_args{ start_key = StartKey, count = Count, @@ -400,8 +401,9 @@ handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) -> direction = Dir, start_docid = StartDocId } = QueryArgs = parse_view_query(Req), - case couch_view:get_map_view({DbName, "_design/" ++ DocId, ViewName}) of - {ok, View} -> + + case couch_view:get_map_view({DbName, <<"_design/", DocId/binary>>, ViewName}) of + {ok, View} -> {ok, RowCount} = couch_view:get_row_count(View), Start = {StartKey, StartDocId}, FoldlFun = make_view_fold_fun(Req, QueryArgs, RowCount, @@ -410,7 +412,7 @@ handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) -> FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit), finish_view_fold(Req, RowCount, FoldResult); {not_found, Reason} -> - case couch_view:get_reduce_view({DbName, "_design/" ++ DocId, ViewName}) of + case couch_view:get_reduce_view({DbName, <<"_design/", DocId/binary>>, ViewName}) of {ok, View} -> output_reduce_view(Req, View); _ -> @@ -418,27 +420,25 @@ handle_db_request(Req, 'GET', {DbName, _Db, ["_view", DocId, ViewName]}) -> end end; -handle_db_request(_Req, _Method, {_DbName, _Db, ["_view", _DocId, _ViewName]}) -> +handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_view">>, _DocId, _ViewName]}) -> throw({method_not_allowed, "GET,HEAD"}); -handle_db_request(Req, 'POST', {_DbName, Db, ["_missing_revs"]}) -> - {obj, JsonDocIdRevs} = cjson:decode(Req:recv_body()), - DocIdRevs = [{Id, tuple_to_list(Revs)} || {Id, Revs} <- JsonDocIdRevs], - {ok, Results} = couch_db:get_missing_revs(Db, DocIdRevs), - JsonResults = [{Id, list_to_tuple(Revs)} || {Id, Revs} <- Results], - send_json(Req, {obj, [ - {missing_revs, {obj, JsonResults}} +handle_db_request(Req, 'POST', {_DbName, Db, [<<"_missing_revs">>]}) -> + {JsonDocIdRevs} = ?JSON_DECODE(Req:recv_body()), + {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs), + send_json(Req, {[ + {missing_revs, {Results}} ]}); -handle_db_request(Req, 'POST', {_DbName, Db, ["_increment_update_seq"]}) -> +handle_db_request(Req, 'POST', {_DbName, Db, [<<"_increment_update_seq">>]}) -> % NOTE, use at own risk. This functionality is experimental % and might go away entirely. {ok, NewSeq} = couch_db:increment_update_seq(Db), - send_json(Req, {obj, [{ok, true}, + send_json(Req, {[{ok, true}, {update_seq, NewSeq} ]}); -handle_db_request(Req, 'POST', {DbName, _Db, ["_temp_view"]}) -> +handle_db_request(Req, 'POST', {DbName, _Db, [<<"_temp_view">>]}) -> #view_query_args{ start_key = StartKey, count = Count, @@ -452,18 +452,22 @@ handle_db_request(Req, 'POST', {DbName, _Db, ["_temp_view"]}) -> "application/json" -> ok; Else -> throw({incorrect_mime_type, Else}) end, - {obj, Props} = cjson:decode(Req:recv_body()), - Language = proplists:get_value("language", Props, "javascript"), - MapSrc = proplists:get_value("map", Props), - case proplists:get_value("reduce", Props, null) of + {Props} = ?JSON_DECODE(Req:recv_body()), + Language = proplists:get_value(<<"language">>, Props, <<"javascript">>), + MapSrc = proplists:get_value(<<"map">>, Props), + case proplists:get_value(<<"reduce">>, Props, null) of null -> {ok, View} = couch_view:get_map_view({temp, DbName, Language, MapSrc}), Start = {StartKey, StartDocId}, + {ok, TotalRows} = couch_view:get_row_count(View), + FoldlFun = make_view_fold_fun(Req, QueryArgs, TotalRows, fun couch_view:reduce_to_count/1), FoldAccInit = {Count, SkipCount, undefined, []}, - FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit), + FoldResult = couch_view:fold(View, Start, Dir, fun(A, B, C) -> + FoldlFun(A, B, C) + end, FoldAccInit), finish_view_fold(Req, TotalRows, FoldResult); RedSrc -> @@ -472,26 +476,22 @@ handle_db_request(Req, 'POST', {DbName, _Db, ["_temp_view"]}) -> output_reduce_view(Req, View) end; -handle_db_request(_Req, _Method, {_DbName, _Db, ["_temp_view"]}) -> +handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_temp_view">>]}) -> throw({method_not_allowed, "POST"}); % Document request handlers -handle_db_request(Req, Method, {DbName, Db, ["_design", DesignName]}) -> +handle_db_request(Req, Method, {DbName, Db, [<<"_design">>, Name]}) -> % Special case to enable using an unencoded in the URL of design docs, as % slashes in document IDs must otherwise be URL encoded - DocId = mochiweb_util:join(["_design", DesignName], "/"), - handle_db_request(Req, Method, {DbName, Db, [DocId]}); + handle_db_request(Req, Method, {DbName, Db, [<<"_design/", Name/binary>>]}); handle_db_request(Req, Method, {DbName, Db, [DocId]}) -> - UnquotedDocId = mochiweb_util:unquote(DocId), - handle_doc_request(Req, Method, DbName, Db, UnquotedDocId); + handle_doc_request(Req, Method, DbName, Db,DocId); handle_db_request(Req, Method, {DbName, Db, [DocId, FileName]}) -> - UnquotedDocId = mochiweb_util:unquote(DocId), - UnquotedFileName = mochiweb_util:unquote(FileName), - handle_attachment_request(Req, Method, DbName, Db, UnquotedDocId, - UnquotedFileName). + handle_attachment_request(Req, Method, DbName, Db, DocId, + FileName). output_reduce_view(Req, View) -> #view_query_args{ @@ -508,8 +508,8 @@ output_reduce_view(Req, View) -> fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 -> true; ({Key1,_}, {Key2,_}) - when is_integer(GroupLevel) and is_tuple(Key1) and is_tuple(Key2) -> - lists:sublist(tuple_to_list(Key1), GroupLevel) == lists:sublist(tuple_to_list(Key2), GroupLevel); + when is_integer(GroupLevel) and is_list(Key1) and is_list(Key2) -> + lists:sublist(Key1, GroupLevel) == lists:sublist(Key2, GroupLevel); ({Key1,_}, {Key2,_}) -> Key1 == Key2 end, @@ -522,52 +522,37 @@ output_reduce_view(Req, View) -> (_Key, _Red, {AccSeparator,0,AccCount}) when AccCount == 0 -> {stop, {AccSeparator,0,AccCount}}; (_Key, Red, {AccSeparator,0,AccCount}) when GroupLevel == 0 -> - Json = lists:flatten(cjson:encode({obj, [{key, null}, {value, Red}]})), + Json = ?JSON_ENCODE({[{key, null}, {value, Red}]}), Resp:write_chunk(AccSeparator ++ Json), {ok, {",",0,AccCount-1}}; (Key, Red, {AccSeparator,0,AccCount}) - when is_integer(GroupLevel) - andalso is_tuple(Key) - andalso element(1, Key) /= obj -> - Json = lists:flatten(cjson:encode( - {obj, [{key, list_to_tuple(lists:sublist(tuple_to_list(Key), GroupLevel))}, - {value, Red}]})), + when is_integer(GroupLevel) + andalso is_list(Key) -> + Json = ?JSON_ENCODE( + {[{key, lists:sublist(Key, GroupLevel)},{value, Red}]}), Resp:write_chunk(AccSeparator ++ Json), {ok, {",",0,AccCount-1}}; (Key, Red, {AccSeparator,0,AccCount}) -> - Json = lists:flatten(cjson:encode({obj, [{key, Key}, {value, Red}]})), + Json = ?JSON_ENCODE({[{key, Key}, {value, Red}]}), Resp:write_chunk(AccSeparator ++ Json), {ok, {",",0,AccCount-1}} end, {"", Skip, Count}), Resp:write_chunk("]}"), end_json_response(Resp). + handle_doc_request(Req, 'DELETE', _DbName, Db, DocId) -> - QueryRev = proplists:get_value("rev", Req:parse_qs()), - Etag = case Req:get_header_value("If-Match") of - undefined -> - undefined; - Tag -> - string:strip(Tag, both, $") - end, - RevToDelete = case {QueryRev, Etag} of - {undefined, undefined} -> - throw({missing_rev, "Document rev/etag must be specified to delete"}); - {_, undefined} -> - QueryRev; - {undefined, _} -> - Etag; - _ when QueryRev == Etag -> - Etag; - _ -> - throw({bad_request, "Document rev and etag have different values"}) - end, - {ok, NewRev} = couch_db:delete_doc(Db, DocId, [RevToDelete]), - send_json(Req, 200, {obj, [ - {ok, true}, - {id, DocId}, - {rev, NewRev} - ]}); + case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of + missing_rev -> + {missing_rev, "Document rev/etag must be specified to delete"}; + RevToDelete -> + {ok, NewRev} = couch_db:delete_doc(Db, DocId, [RevToDelete]), + send_json(Req, 200, {[ + {ok, true}, + {id, DocId}, + {rev, NewRev} + ]}) + end; handle_doc_request(Req, 'GET', _DbName, Db, DocId) -> #doc_query_args{ @@ -596,10 +581,10 @@ handle_doc_request(Req, 'GET', _DbName, Db, DocId) -> case Result of {ok, Doc} -> JsonDoc = couch_doc:to_json_obj(Doc, Options), - Json = lists:flatten(cjson:encode({obj, [{ok, JsonDoc}]})), + Json = ?JSON_ENCODE({[{ok, JsonDoc}]}), Resp:write_chunk(AccSeparator ++ Json); {{not_found, missing}, RevId} -> - Json = lists:flatten(cjson:encode({obj, [{"missing", RevId}]})), + Json = ?JSON_ENCODE({[{"missing", RevId}]}), Resp:write_chunk(AccSeparator ++ Json) end, "," % AccSeparator now has a comma @@ -611,16 +596,17 @@ handle_doc_request(Req, 'GET', _DbName, Db, DocId) -> handle_doc_request(Req, 'POST', _DbName, Db, DocId) -> Form = mochiweb_multipart:parse_form(Req), - Rev = proplists:get_value("_rev", Form), - NewAttachments = [{Name, {ContentType, Content}} || - {Name, {ContentType, _}, Content} <- - proplists:get_all_values("_attachments", Form)], - + Rev = list_to_binary(proplists:get_value("_rev", Form)), Doc = case couch_db:open_doc_revs(Db, DocId, [Rev], []) of {ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]}; {ok, [Error]} -> throw(Error) end, + NewAttachments = [ + {list_to_binary(Name), {list_to_binary(ContentType), Content}} || + {Name, {ContentType, _}, Content} <- + proplists:get_all_values("_attachments", Form) + ], #doc{attachments=Attachments} = Doc, NewDoc = Doc#doc{ attachments = Attachments ++ NewAttachments @@ -634,87 +620,78 @@ handle_doc_request(Req, 'POST', _DbName, Db, DocId) -> ]}); handle_doc_request(Req, 'PUT', _DbName, Db, DocId) -> - Json = {obj, DocProps} = cjson:decode(Req:recv_body(?MAX_DOC_SIZE)), - DocRev = proplists:get_value("_rev", DocProps), - Etag = case Req:get_header_value("If-Match") of - undefined -> - undefined; - Tag -> - string:strip(Tag, both, $") + Json = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)), + Doc = couch_doc:from_json_obj(Json), + ExplicitRev = + case Doc#doc.revs of + [Rev0|_] -> Rev0; + [] -> undefined end, - Revs = case {DocRev, Etag} of - {undefined, undefined} -> - []; - {_, undefined} -> - [DocRev]; - {undefined, _} -> - [Etag]; - _ when DocRev == Etag -> - [Etag]; - _ -> - throw({bad_request, "Document rev and etag have different values"}) + case extract_header_rev(Req, ExplicitRev) of + missing_rev -> + Revs = []; + Rev -> + Revs = [Rev] end, - - Doc = couch_doc:from_json_obj(Json), - {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=Revs}, []), - send_json(Req, 201, [{"Etag", "\"" ++ NewRev ++ "\""}], {obj, [ + send_json(Req, 201, [{"Etag", <<"\"", NewRev/binary, "\"">>}], {[ {ok, true}, {id, DocId}, {rev, NewRev} ]}); handle_doc_request(Req, 'COPY', _DbName, Db, SourceDocId) -> - SourceRev = case extract_header_rev(Req) of - missing_rev -> []; - Rev -> Rev + SourceRev = + case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of + missing_rev -> []; + Rev -> Rev end, {TargetDocId, TargetRev} = parse_copy_destination_header(Req), - % open revision Rev or Current + % open revision Rev or Current {Doc, _DocRev} = couch_doc_open(Db, SourceDocId, SourceRev, []), % save new doc {ok, NewTargetRev} = couch_db:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRev}, []), - send_json(Req, 201, [{"Etag", "\"" ++ NewTargetRev ++ "\""}], {obj, [ + send_json(Req, 201, [{"Etag", "\"" ++ binary_to_list(NewTargetRev) ++ "\""}], {[ {ok, true}, {id, TargetDocId}, {rev, NewTargetRev} ]}); handle_doc_request(Req, 'MOVE', _DbName, Db, SourceDocId) -> - SourceRev = case extract_header_rev(Req) of - missing_rev -> + SourceRev = + case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of + missing_rev -> throw({ - bad_request, - "MOVE requires a specified rev parameter for the origin resource."} - ); - Rev -> Rev + bad_request, + "MOVE requires a specified rev parameter for the origin resource."} + ); + Rev -> Rev end, {TargetDocId, TargetRev} = parse_copy_destination_header(Req), - % open revision Rev or Current {Doc, _DocRev} = couch_doc_open(Db, SourceDocId, SourceRev, []), % save new doc & delete old doc in one operation Docs = [ - Doc#doc{id=TargetDocId, revs=TargetRev}, - #doc{id=SourceDocId, revs=[SourceRev], deleted=true} - ], + Doc#doc{id=TargetDocId, revs=TargetRev}, + #doc{id=SourceDocId, revs=[SourceRev], deleted=true} + ], {ok, ResultRevs} = couch_db:update_docs(Db, Docs, []), DocResults = lists:zipwith( fun(FDoc, NewRev) -> - {obj, [{"id", FDoc#doc.id}, {"rev", NewRev}]} + {[{id, FDoc#doc.id}, {rev, NewRev}]} end, Docs, ResultRevs), - send_json(Req, 201, {obj, [ + send_json(Req, 201, {[ {ok, true}, - {new_revs, list_to_tuple(DocResults)} + {new_revs, DocResults} ]}); handle_doc_request(_Req, _Method, _DbName, _Db, _DocId) -> @@ -728,19 +705,19 @@ couch_doc_open(Db, DocId, Rev, Options) -> case Rev of "" -> % open most recent rev case couch_db:open_doc(Db, DocId, Options) of - {ok, #doc{revs=[DocRev|_]}=Doc} -> - {Doc, DocRev}; - Error -> - throw(Error) - end; - _ -> % open a specific rev (deletions come back as stubs) - case couch_db:open_doc_revs(Db, DocId, [Rev], Options) of - {ok, [{ok, Doc}]} -> - {Doc, Rev}; - {ok, [Else]} -> - throw(Else) - end - end. + {ok, #doc{revs=[DocRev|_]}=Doc} -> + {Doc, DocRev}; + Error -> + throw(Error) + end; + _ -> % open a specific rev (deletions come back as stubs) + case couch_db:open_doc_revs(Db, DocId, [Rev], Options) of + {ok, [{ok, Doc}]} -> + {Doc, Rev}; + {ok, [Else]} -> + throw(Else) + end + end. % Attachment request handlers @@ -773,8 +750,6 @@ handle_attachment_request(Req, 'GET', _DbName, Db, DocId, FileName) -> handle_attachment_request(Req, Method, _DbName, Db, DocId, FileName) when (Method == 'PUT') or (Method == 'DELETE') -> - Rev = extract_header_rev(Req), - NewAttachment = case Method of 'DELETE' -> []; @@ -785,12 +760,12 @@ handle_attachment_request(Req, Method, _DbName, Db, DocId, FileName) }}] end, - Doc = case Rev of + Doc = case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of missing_rev -> % make the new doc #doc{id=DocId}; - _ -> + Rev -> case couch_db:open_doc_revs(Db, DocId, [Rev], []) of - {ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]}; + {ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]}; {ok, [Error]} -> throw(Error) end end, @@ -800,7 +775,7 @@ handle_attachment_request(Req, Method, _DbName, Db, DocId, FileName) attachments = NewAttachment ++ proplists:delete(FileName, Attachments) }, {ok, UpdatedRev} = couch_db:update_doc(Db, DocEdited, []), - send_json(Req, case Method of 'DELETE' -> 200; _ -> 201 end, {obj, [ + send_json(Req, case Method of 'DELETE' -> 200; _ -> 201 end, {[ {ok, true}, {id, DocId}, {rev, UpdatedRev} @@ -871,8 +846,8 @@ handle_config_request(Req, 'DELETE', {[Section, Option]}) -> % View request handling internals -reverse_key_default(nil) -> <<>>; -reverse_key_default(<<>>) -> nil; +reverse_key_default(nil) -> {}; +reverse_key_default({}) -> nil; reverse_key_default(Key) -> Key. parse_view_query(Req) -> @@ -882,16 +857,16 @@ parse_view_query(Req) -> {"", _} -> Args; {"key", Value} -> - JsonKey = cjson:decode(Value), + JsonKey = ?JSON_DECODE(Value), Args#view_query_args{start_key=JsonKey,end_key=JsonKey}; {"startkey_docid", DocId} -> - Args#view_query_args{start_docid=DocId}; + Args#view_query_args{start_docid=list_to_binary(DocId)}; {"endkey_docid", DocId} -> - Args#view_query_args{end_docid=DocId}; + Args#view_query_args{end_docid=list_to_binary(DocId)}; {"startkey", Value} -> - Args#view_query_args{start_key=cjson:decode(Value)}; + Args#view_query_args{start_key=?JSON_DECODE(Value)}; {"endkey", Value} -> - Args#view_query_args{end_key=cjson:decode(Value)}; + Args#view_query_args{end_key=?JSON_DECODE(Value)}; {"count", Value} -> case (catch list_to_integer(Value)) of Count when is_integer(Count) -> @@ -966,11 +941,11 @@ make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) -> case Dir of fwd -> fun(ViewKey, ViewId) -> - couch_view:less_json({EndKey, EndDocId}, {ViewKey, ViewId}) + couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId]) end; rev-> fun(ViewKey, ViewId) -> - couch_view:less_json({ViewKey, ViewId}, {EndKey, EndDocId}) + couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId]) end end, @@ -991,18 +966,19 @@ make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) -> lists:min([TotalViewCount - Offset, - AccCount]), JsonBegin = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n", [TotalViewCount, Offset2]), - Resp2:write_chunk(lists:flatten(JsonBegin)), - JsonObj = {obj, [{id, DocId}, {key, Key}, {value, Value}]}, - {ok, {AccCount + 1, 0, Resp2, [cjson:encode(JsonObj) | AccRevRows]}}; + Resp2:write_chunk(JsonBegin), + JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]}, + {ok, {AccCount + 1, 0, Resp2, [?JSON_ENCODE(JsonObj) | AccRevRows]}}; {_, AccCount, _, Resp} -> - JsonObj = {obj, [{id, DocId}, {key, Key}, {value, Value}]}, - {ok, {AccCount + 1, 0, Resp, [cjson:encode(JsonObj), ",\r\n" | AccRevRows]}} + + JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]}, + {ok, {AccCount + 1, 0, Resp, [?JSON_ENCODE(JsonObj), ",\r\n" | AccRevRows]}} end end, PosCountFun = fun({{Key, DocId}, Value}, OffsetReds, {AccCount, AccSkip, Resp, AccRevRows}) -> - Offset = ReduceCountFun(OffsetReds), + Offset = ReduceCountFun(OffsetReds), % I think we only need this call once per view PassedEnd = PassedEndFun(Key, DocId), case {PassedEnd, AccCount, AccSkip, Resp} of {true, _, _, _} -> @@ -1017,12 +993,13 @@ make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) -> Resp2 = start_json_response(Req, 200), JsonBegin = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n", [TotalViewCount, Offset]), - JsonObj = {obj, [{id, DocId}, {key, Key}, {value, Value}]}, - Resp2:write_chunk(lists:flatten(JsonBegin ++ cjson:encode(JsonObj))), + JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]}, + + Resp2:write_chunk(JsonBegin ++ ?JSON_ENCODE(JsonObj)), {ok, {AccCount - 1, 0, Resp2, AccRevRows}}; {_, AccCount, _, Resp} when (AccCount > 0) -> - JsonObj = {obj, [{"id", DocId}, {"key", Key}, {"value", Value}]}, - Resp:write_chunk(",\r\n" ++ lists:flatten(cjson:encode(JsonObj))), + JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]}, + Resp:write_chunk(",\r\n" ++ ?JSON_ENCODE(JsonObj)), {ok, {AccCount - 1, 0, Resp, AccRevRows}} end end, @@ -1036,13 +1013,13 @@ finish_view_fold(Req, TotalRows, FoldResult) -> {ok, {_, _, undefined, _}} -> % nothing found in the view, nothing has been returned % send empty view - send_json(Req, 200, {obj, [ + send_json(Req, 200, {[ {total_rows, TotalRows}, - {rows, {}} + {rows, []} ]}); {ok, {_, _, Resp, AccRevRows}} -> % end the view - Resp:write_chunk(lists:flatten(AccRevRows) ++ "\r\n]}"), + Resp:write_chunk(AccRevRows ++ "\r\n]}"), end_json_response(Resp); Error -> throw(Error) @@ -1076,8 +1053,8 @@ parse_doc_query(Req) -> {"open_revs", "all"} -> Args#doc_query_args{open_revs=all}; {"open_revs", RevsJsonStr} -> - JsonArray = cjson:decode(RevsJsonStr), - Args#doc_query_args{open_revs=tuple_to_list(JsonArray)}; + JsonArray = ?JSON_DECODE(RevsJsonStr), + Args#doc_query_args{open_revs=JsonArray}; _Else -> % unknown key value pair, ignore. Args end @@ -1086,7 +1063,7 @@ parse_doc_query(Req) -> % Utilities none_match(Req, Tag) -> - Etag = "\"" ++ Tag ++ "\"", + Etag = "\"" ++ binary_to_list(Tag) ++ "\"", Etags = case Req:get_header_value("If-None-Match") of undefined -> []; @@ -1109,9 +1086,9 @@ error_to_json(Error) -> _ -> lists:flatten(io_lib:format("~p", [Reason])) % else term to text end, - Json = {obj, [ - {error, atom_to_list(Atom)}, - {reason, FormattedReason} + Json = {[ + {error, Atom}, + {reason, list_to_binary(FormattedReason)} ]}, {HttpCode, Json}. @@ -1136,17 +1113,18 @@ error_to_json0({Id, Reason}) when is_atom(Id) -> error_to_json0(Error) -> {500, error, Error}. -extract_header_rev(Req) -> - QueryRev = proplists:get_value("rev", Req:parse_qs()), +extract_header_rev(Req, ExplictRev) when is_list(ExplictRev)-> + extract_header_rev(Req, list_to_binary(ExplictRev)); +extract_header_rev(Req, ExplictRev) -> Etag = case Req:get_header_value("If-Match") of undefined -> undefined; Tag -> string:strip(Tag, both, $") end, - case {QueryRev, Etag} of + case {ExplictRev, Etag} of {undefined, undefined} -> missing_rev; - {_, undefined} -> QueryRev; - {undefined, _} -> Etag; - _ when QueryRev == Etag -> Etag; + {_, undefined} -> ExplictRev; + {undefined, _} -> list_to_binary(Etag); + _ when ExplictRev == Etag -> list_to_binary(Etag); _ -> throw({bad_request, "Document rev and etag have different values"}) end. @@ -1154,12 +1132,12 @@ extract_header_rev(Req) -> parse_copy_destination_header(Req) -> Destination = Req:get_header_value("Destination"), case regexp:match(Destination, "\\?") of - nomatch -> - {Destination, []}; - {match, _, _} -> - {ok, [DocId, RevQueryOptions]} = regexp:split(Destination, "\\?"), - {ok, [_RevQueryKey, Rev]} = regexp:split(RevQueryOptions, "="), - {DocId, [Rev]} + nomatch -> + {list_to_binary(Destination), []}; + {match, _, _} -> + {ok, [DocId, RevQueryOptions]} = regexp:split(Destination, "\\?"), + {ok, [_RevQueryKey, Rev]} = regexp:split(RevQueryOptions, "="), + {list_to_binary(DocId), [list_to_binary(Rev)]} end. send_error(Req, {method_not_allowed, Methods}) -> @@ -1187,7 +1165,7 @@ send_json(Req, Code, Headers, Value) -> {"Content-Type", negotiate_content_type(Req)}, {"Cache-Control", "must-revalidate"} ] ++ server_header(), - Body = cjson:encode(Value), + Body = ?JSON_ENCODE(Value), Resp = Req:respond({Code, DefaultHeaders ++ Headers, Body}), {ok, Resp}. |