summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/couchdb/Makefile.am6
-rw-r--r--src/couchdb/couch_db.erl64
-rw-r--r--src/couchdb/couch_db.hrl25
-rw-r--r--src/couchdb/couch_db_updater.erl36
-rw-r--r--src/couchdb/couch_file.erl2
-rw-r--r--src/couchdb/couch_httpd.erl1306
-rw-r--r--src/couchdb/couch_server.erl2
-rw-r--r--src/couchdb/couch_server_sup.erl3
-rw-r--r--src/couchdb/couch_util.erl18
-rw-r--r--src/couchdb/couch_view.erl9
10 files changed, 295 insertions, 1176 deletions
diff --git a/src/couchdb/Makefile.am b/src/couchdb/Makefile.am
index 38970b07..83220a57 100644
--- a/src/couchdb/Makefile.am
+++ b/src/couchdb/Makefile.am
@@ -45,6 +45,9 @@ source_files = \
couch_event_sup.erl \
couch_file.erl \
couch_httpd.erl \
+ couch_httpd_db.erl \
+ couch_httpd_view.erl \
+ couch_httpd_misc_handlers.erl \
couch_ft_query.erl \
couch_key_tree.erl \
couch_log.erl \
@@ -70,6 +73,9 @@ compiled_files = \
couch_file.beam \
couch_ft_query.beam \
couch_httpd.beam \
+ couch_httpd_db.beam \
+ couch_httpd_view.beam \
+ couch_httpd_misc_handlers.beam \
couch_key_tree.beam \
couch_log.beam \
couch_query_servers.beam \
diff --git a/src/couchdb/couch_db.erl b/src/couchdb/couch_db.erl
index 34f92222..f74a83fc 100644
--- a/src/couchdb/couch_db.erl
+++ b/src/couchdb/couch_db.erl
@@ -17,7 +17,7 @@
-export([open_ref_counted/2,num_refs/1,monitor/1]).
-export([save_docs/3,update_doc/3,update_docs/2,update_docs/3,delete_doc/3]).
-export([get_doc_info/2,open_doc/2,open_doc/3,open_doc_revs/4]).
--export([get_missing_revs/2]).
+-export([get_missing_revs/2,name/1]).
-export([enum_docs/4,enum_docs/5,enum_docs_since/4,enum_docs_since/5]).
-export([enum_docs_since_reduce_to_count/1,enum_docs_reduce_to_count/1]).
-export([increment_update_seq/1,get_purge_seq/1,purge_docs/2,get_last_purged/1]).
@@ -27,44 +27,47 @@
-include("couch_db.hrl").
+
start_link(DbName, Filepath, Options) ->
- catch start_link0(DbName, Filepath, Options).
-
-start_link0(DbName, Filepath, Options) ->
- Fd =
+ case open_db_file(Filepath, Options) of
+ {ok, Fd} ->
+ StartResult = gen_server:start_link(couch_db, {DbName, Filepath, Fd, Options}, []),
+ unlink(Fd),
+ case StartResult of
+ {ok, _} ->
+ % We successfully opened the db, delete old storage files if around
+ case file:delete(Filepath ++ ".old") of
+ ok ->
+ ?LOG_INFO("Deleted old storage file ~s~s", [Filepath, ".old"]);
+ {error, enoent} ->
+ ok % normal result
+ end,
+ StartResult;
+ Error ->
+ Error
+ end;
+ Else ->
+ Else
+ end.
+
+open_db_file(Filepath, Options) ->
case couch_file:open(Filepath, Options) of
- {ok, Fd0} ->
- Fd0;
+ {ok, Fd} ->
+ {ok, Fd};
{error, enoent} ->
% couldn't find file. is there a compact version? This can happen if
% crashed during the file switch.
case couch_file:open(Filepath ++ ".compact") of
- {ok, Fd0} ->
+ {ok, Fd} ->
?LOG_INFO("Found ~s~s compaction file, using as primary storage.", [Filepath, ".compact"]),
ok = file:rename(Filepath ++ ".compact", Filepath),
- Fd0;
+ {ok, Fd};
{error, enoent} ->
- throw(not_found)
+ not_found
end;
- Else ->
- throw(Else)
- end,
-
- StartResult = gen_server:start_link(couch_db, {DbName, Filepath, Fd, Options}, []),
- unlink(Fd),
- case StartResult of
- {ok, _} ->
- % We successfully opened the db, delete old storage files if around
- case file:delete(Filepath ++ ".old") of
- ok ->
- ?LOG_INFO("Deleted old storage file ~s~s", [Filepath, ".old"]);
- {error, enoent} ->
- ok % normal result
- end;
- _ ->
- ok
- end,
- StartResult.
+ Error ->
+ Error
+ end.
create(DbName, Options) ->
@@ -178,6 +181,9 @@ get_db_info(Db) ->
],
{ok, InfoList}.
+name(#db{name=Name}) ->
+ Name.
+
update_doc(Db, Doc, Options) ->
{ok, [NewRev]} = update_docs(Db, [Doc], Options),
{ok, NewRev}.
diff --git a/src/couchdb/couch_db.hrl b/src/couchdb/couch_db.hrl
index f4533146..f5f4a0f1 100644
--- a/src/couchdb/couch_db.hrl
+++ b/src/couchdb/couch_db.hrl
@@ -52,6 +52,14 @@
rev_tree = []
}).
+-record(httpd,
+ {mochi_req,
+ method,
+ path_parts,
+ db_url_handlers
+ }).
+
+
-record(doc,
{
id = <<"">>,
@@ -103,7 +111,22 @@
filepath
}).
-
+
+-record(view_query_args, {
+ start_key = nil,
+ end_key = {},
+ count = 10000000000, % a huge huge default number. Picked so we don't have
+ % to do different logic for when there is no count
+ % limit
+ update = true,
+ direction = fwd,
+ start_docid = nil,
+ end_docid = {},
+ skip = 0,
+ group_level = 0,
+ reduce = true
+}).
+
% small value used in revision trees to indicate the revision isn't stored
-define(REV_MISSING, []).
diff --git a/src/couchdb/couch_db_updater.erl b/src/couchdb/couch_db_updater.erl
index a368ccac..6982fb01 100644
--- a/src/couchdb/couch_db_updater.erl
+++ b/src/couchdb/couch_db_updater.erl
@@ -478,28 +478,40 @@ copy_rev_tree(SrcFd, DestFd, DestStream, [{RevId, _, SubTree} | RestTree]) ->
% inner node, only copy info/data from leaf nodes
[{RevId, ?REV_MISSING, copy_rev_tree(SrcFd, DestFd, DestStream, SubTree)} | copy_rev_tree(SrcFd, DestFd, DestStream, RestTree)].
-copy_docs(#db{fd=SrcFd}=Db, #db{fd=DestFd,summary_stream=DestStream}=NewDb, InfoBySeq) ->
+copy_docs(#db{fd=SrcFd}=Db, #db{fd=DestFd,summary_stream=DestStream}=NewDb, InfoBySeq, Retry) ->
Ids = [Id || #doc_info{id=Id} <- InfoBySeq],
LookupResults = couch_btree:lookup(Db#db.fulldocinfo_by_id_btree, Ids),
NewFullDocInfos = lists:map(
fun({ok, #full_doc_info{rev_tree=RevTree}=Info}) ->
Info#full_doc_info{rev_tree=copy_rev_tree(SrcFd, DestFd, DestStream, RevTree)}
end, LookupResults),
- NewDocInfos = [couch_doc:to_doc_info(FullDocInfo) || FullDocInfo <- NewFullDocInfos],
- {ok, DocInfoBTree} =
- couch_btree:add_remove(NewDb#db.docinfo_by_seq_btree, NewDocInfos, []),
- {ok, FullDocInfoBTree} =
- couch_btree:add_remove(NewDb#db.fulldocinfo_by_id_btree, NewFullDocInfos, []),
- NewDb#db{fulldocinfo_by_id_btree=FullDocInfoBTree, docinfo_by_seq_btree=DocInfoBTree}.
+ NewDocInfos = [couch_doc:to_doc_info(Info) || Info <- NewFullDocInfos],
+ RemoveSeqs =
+ case Retry of
+ false ->
+ [];
+ true ->
+ % We are retrying a compaction, meaning the documents we are copying may
+ % already exist in our file and must be removed from the by_seq index.
+ Existing = couch_btree:lookup(NewDb#db.fulldocinfo_by_id_btree, Ids),
+ [Seq || {ok, #full_doc_info{update_seq=Seq}} <- Existing]
+ end,
+
+ {ok, DocInfoBTree} = couch_btree:add_remove(
+ NewDb#db.docinfo_by_seq_btree, NewDocInfos, RemoveSeqs),
+ {ok, FullDocInfoBTree} = couch_btree:add_remove(
+ NewDb#db.fulldocinfo_by_id_btree, NewFullDocInfos, []),
+ NewDb#db{ fulldocinfo_by_id_btree=FullDocInfoBTree,
+ docinfo_by_seq_btree=DocInfoBTree}.
-copy_compact_docs(Db, NewDb) ->
+copy_compact_docs(Db, NewDb, Retry) ->
EnumBySeqFun =
fun(#doc_info{update_seq=Seq}=DocInfo, _Offset, {AccNewDb, AccUncopied}) ->
case couch_util:should_flush() of
true ->
- NewDb2 = copy_docs(Db, AccNewDb, lists:reverse([DocInfo | AccUncopied])),
+ NewDb2 = copy_docs(Db, AccNewDb, lists:reverse([DocInfo | AccUncopied]), Retry),
{ok, {commit_data(NewDb2#db{update_seq=Seq}), []}};
false ->
{ok, {AccNewDb, [DocInfo | AccUncopied]}}
@@ -511,7 +523,7 @@ copy_compact_docs(Db, NewDb) ->
case Uncopied of
[#doc_info{update_seq=LastSeq} | _] ->
commit_data( copy_docs(Db, NewDb2#db{update_seq=LastSeq},
- lists:reverse(Uncopied)));
+ lists:reverse(Uncopied), Retry));
[] ->
NewDb2
end.
@@ -522,13 +534,15 @@ start_copy_compact_int(#db{name=Name,filepath=Filepath}=Db) ->
case couch_file:open(CompactFile) of
{ok, Fd} ->
?LOG_DEBUG("Found existing compaction file for db \"~s\"", [Name]),
+ Retry = true,
{ok, Header} = couch_file:read_header(Fd, ?HEADER_SIG);
{error, enoent} ->
{ok, Fd} = couch_file:open(CompactFile, [create]),
+ Retry = false,
ok = couch_file:write_header(Fd, ?HEADER_SIG, Header=#db_header{})
end,
NewDb = init_db(Name, CompactFile, Fd, Header),
- NewDb2 = commit_data(copy_compact_docs(Db, NewDb)),
+ NewDb2 = copy_compact_docs(Db, NewDb, Retry),
close_db(NewDb2),
gen_server:cast(Db#db.update_pid, {compact_done, CompactFile}).
diff --git a/src/couchdb/couch_file.erl b/src/couchdb/couch_file.erl
index 13ce3aad..d844d5b2 100644
--- a/src/couchdb/couch_file.erl
+++ b/src/couchdb/couch_file.erl
@@ -315,7 +315,7 @@ init({Filepath, Options, ReturnPid}) ->
init_status_ok(ReturnPid, Fd);
false ->
ok = file:close(Fd),
- init_status_error(ReturnPid, {error, file_exists})
+ init_status_error(ReturnPid, file_exists)
end;
false ->
init_status_ok(ReturnPid, Fd)
diff --git a/src/couchdb/couch_httpd.erl b/src/couchdb/couch_httpd.erl
index ba5ba8bb..e0f6b4f5 100644
--- a/src/couchdb/couch_httpd.erl
+++ b/src/couchdb/couch_httpd.erl
@@ -13,31 +13,19 @@
-module(couch_httpd).
-include("couch_db.hrl").
--export([start_link/0, stop/0, handle_request/1, handle_request/2]).
+-export([start_link/0, stop/0, handle_request/3]).
-% Maximum size of document PUT request body (4GB)
--define(MAX_DOC_SIZE, (4*1024*1024*1024)).
+-export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,path/1,unquote/1]).
+-export([parse_form/1,json_body/1,body/1,doc_etag/1]).
+-export([primary_header_value/2,partition/1,serve_file/3]).
+-export([start_chunked_response/3,send_chunk/2]).
+-export([start_json_response/2, start_json_response/3, end_json_response/1]).
+-export([send_response/4,send_method_not_allowed/2]).
+-export([send_json/2,send_json/3,send_json/4]).
--record(doc_query_args, {
- options = [],
- rev = "",
- open_revs = ""
-}).
--record(view_query_args, {
- start_key = nil,
- end_key = {},
- count = 10000000000, % a huge huge default number. Picked so we don't have
- % to do different logic for when there is no count
- % limit
- update = true,
- direction = fwd,
- start_docid = nil,
- end_docid = {},
- skip = 0,
- group_level = 0,
- reduce = true
-}).
+% Maximum size of document PUT request body (4GB)
+-define(MAX_DOC_SIZE, (4*1024*1024*1024)).
start_link() ->
% read config and register for configuration changes
@@ -47,11 +35,35 @@ start_link() ->
BindAddress = couch_config:get("httpd", "bind_address", any),
Port = couch_config:get("httpd", "port", "5984"),
- DocumentRoot = couch_config:get("httpd", "utils_dir", "../../share/www"),
+
+ UrlHandlersList = lists:map(
+ fun({UrlKey, SpecStr}) ->
+ case couch_util:parse_term(SpecStr) of
+ {ok, {M, F, A}} ->
+ {list_to_binary(UrlKey), fun(Req) -> apply(M, F, [Req, A]) end};
+ {ok, {M, F}} ->
+ {list_to_binary(UrlKey), fun(Req) -> apply(M, F, [Req]) end}
+ end
+ end, couch_config:get("httpd_global_handlers")),
+
+ DbUrlHandlersList = lists:map(
+ fun({UrlKey, SpecStr}) ->
+ case couch_util:parse_term(SpecStr) of
+ {ok, {M, F, A}} ->
+ {list_to_binary(UrlKey),
+ fun(Req, Db) -> apply(M, F, [Req, Db, A]) end};
+ {ok, {M, F}} ->
+ {list_to_binary(UrlKey),
+ fun(Req, Db) -> apply(M, F, [Req, Db]) end}
+ end
+ end, couch_config:get("httpd_db_handlers")),
+ UrlHandlers = dict:from_list(UrlHandlersList),
+ DbUrlHandlers = dict:from_list(DbUrlHandlersList),
+ Loop = fun(Req)->
+ apply(?MODULE, handle_request, [Req, UrlHandlers, DbUrlHandlers])
+ end,
% and off we go
- Loop = fun (Req) -> apply(couch_httpd, handle_request,
- [Req, DocumentRoot]) end,
{ok, Pid} = mochiweb_http:start([
{loop, Loop},
{name, ?MODULE},
@@ -63,7 +75,9 @@ start_link() ->
?MODULE:stop();
("httpd", "port") ->
?MODULE:stop();
- ("httpd", "utils_dir") ->
+ ("httpd_global_handlers", _) ->
+ ?MODULE:stop();
+ ("httpd_db_handlers", _) ->
?MODULE:stop()
end, Pid),
@@ -72,1124 +86,131 @@ start_link() ->
stop() ->
mochiweb_http:stop(?MODULE).
-handle_request(config_change) ->
- stop().
-
-handle_request(Req, DocumentRoot) ->
- % alias HEAD to GET as mochiweb takes care of stripping the body
- Method = case Req:get(method) of
- 'HEAD' -> 'GET';
-
- % handling of non standard HTTP verbs. Should be fixed in gen_tcp:recv()
- "COPY" -> 'COPY';
- "MOVE" -> 'MOVE';
- StandardMethod -> StandardMethod
- end,
+handle_request(MochiReq, UrlHandlers, DbUrlHandlers) ->
% for the path, use the raw path with the query string and fragment
% removed, but URL quoting left intact
- {Path, QueryString, _} = mochiweb_util:urlsplit_path(Req:get(raw_path)),
-
- ?LOG_DEBUG("~p ~s ~p~nQuery String: ~p~nHeaders: ~p", [
- Req:get(method),
- Path,
- Req:get(version),
- QueryString,
- mochiweb_headers:to_list(Req:get(headers))
- ]),
-
- {ok, Resp} = case catch(handle_request(Req, DocumentRoot, Method, Path)) of
- {ok, Resp0} ->
- {ok, Resp0};
- Error ->
- send_error(Req, Error)
- end,
-
- ?LOG_INFO("~s - - ~p ~s ~B", [
- Req:get(peer),
- Method,
- Path,
- Resp:get(code)
- ]).
-
-handle_request(Req, DocumentRoot, Method, Path) ->
- % Start = erlang:now(),
- X = handle_request0(Req, DocumentRoot, Method, Path),
- % io:format("now_diff:~p~n", [timer:now_diff(erlang:now(), Start)]),
- X.
-
-handle_request0(Req, DocumentRoot, Method, Path) ->
- case Path of
- "/" ->
- handle_welcome_request(Req, Method);
- "/_all_dbs" ->
- handle_all_dbs_request(Req, Method);
- "/_replicate" ->
- handle_replicate_request(Req, Method);
- "/_restart" ->
- handle_restart_request(Req, Method);
- "/_uuids" ->
- handle_uuids_request(Req, Method);
- "/_utils" ->
- {ok, Req:respond({301, [
- {"Location", "/_utils/"}
- ] ++ server_header(), <<>>})};
- "/_utils/" ++ PathInfo ->
- {ok, Req:serve_file(PathInfo, DocumentRoot, server_header())};
- "/_config/" ++ Config ->
- handle_config_request(Req, Method, {config, Config});
- "/_" ++ _Path ->
- throw({not_found, unknown_private_path});
- "/favicon.ico" ->
- {ok, Req:serve_file("favicon.ico", DocumentRoot)};
- _Else ->
- handle_db_request(Req, Method, {Path})
- end.
-
-% Global request handlers
-
-handle_welcome_request(Req, 'GET') ->
- send_json(Req, {[
- {couchdb, <<"Welcome">>},
- {version, list_to_binary(couch_server:get_version())}
- ]});
-
-handle_welcome_request(_Req, _Method) ->
- throw({method_not_allowed, "GET,HEAD"}).
-
-handle_all_dbs_request(Req, 'GET') ->
- {ok, DbNames} = couch_server:all_databases(),
- send_json(Req, DbNames);
-
-handle_all_dbs_request(_Req, _Method) ->
- throw({method_not_allowed, "GET,HEAD"}).
-
-handle_replicate_request(Req, 'POST') ->
- {Props} = ?JSON_DECODE(Req:recv_body()),
- Source = proplists:get_value(<<"source">>, Props),
- Target = proplists:get_value(<<"target">>, Props),
- {Options} = proplists:get_value(<<"options">>, Props, {[]}),
- {ok, {JsonResults}} = couch_rep:replicate(Source, Target, Options),
- send_json(Req, {[{ok, true} | JsonResults]});
-
-handle_replicate_request(_Req, _Method) ->
- throw({method_not_allowed, "POST"}).
-
-handle_restart_request(Req, 'POST') ->
- Response = send_json(Req, {[{ok, true}]}),
- spawn(fun() -> couch_server:remote_restart() end),
- Response;
-
-handle_restart_request(_Req, _Method) ->
- throw({method_not_allowed, "POST"}).
-
-handle_uuids_request(Req, 'POST') ->
- Count = list_to_integer(proplists:get_value("count", Req:parse_qs(), "1")),
- % generate the uuids
- UUIDs = [ couch_util:new_uuid() || _ <- lists:seq(1,Count)],
- % send a JSON response
- send_json(Req, {[{"uuids", UUIDs}]});
-
-handle_uuids_request(_Req, _Method) ->
- throw({method_not_allowed, "POST"}).
-
-
-% Database request handlers
-
-handle_db_request(Req, Method, {Path}) ->
- UriParts = string:tokens(Path, "/"),
- [DbName|Rest] =
- [list_to_binary(mochiweb_util:unquote(Part)) || Part <- UriParts],
- handle_db_request(Req, Method, {DbName, Rest});
-
-handle_db_request(Req, 'PUT', {DbName, []}) ->
- case couch_server:create(DbName, []) of
- {ok, Db} ->
- couch_db:close(Db),
- send_json(Req, 201, {[{ok, true}]});
- {error, database_already_exists} ->
- Msg = io_lib:format("Database ~p already exists.", [
- binary_to_list(DbName)
- ]),
- throw({database_already_exists, Msg});
- Error ->
- Msg = io_lib:format("Error creating database ~p: ~p", [
- binary_to_list(DbName), Error
- ]),
- throw({unknown_error, Msg})
- end;
-
-handle_db_request(Req, 'DELETE', {DbName, []}) ->
- case couch_server:delete(DbName) of
- ok ->
- send_json(Req, 200, {[
- {ok, true}
- ]});
- Error ->
- throw(Error)
- end;
-
-handle_db_request(Req, Method, {DbName, Rest}) ->
- case couch_db:open(DbName, []) of
- {ok, Db} ->
- try
- handle_db_request(Req, Method, {DbName, Db, Rest})
- after
- couch_db:close(Db)
- end;
- Error ->
- throw(Error)
- end;
-
-handle_db_request(Req, 'GET', {DbName, Db, []}) ->
- {ok, DbInfo} = couch_db:get_db_info(Db),
- send_json(Req, {[{db_name, DbName} | DbInfo]});
-
-handle_db_request(Req, 'POST', {_DbName, Db, []}) ->
- % TODO: Etag handling
- Json = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)),
- Doc = couch_doc:from_json_obj(Json),
- DocId = couch_util:new_uuid(),
- {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=[]}, []),
- send_json(Req, 201, {[
- {ok, true},
- {id, DocId},
- {rev, NewRev}
- ]});
-
-handle_db_request(_Req, _Method, {_DbName, _Db, []}) ->
- throw({method_not_allowed, "DELETE,GET,HEAD,POST"});
-
-handle_db_request(Req, 'POST', {_DbName, Db, [<<"_bulk_docs">>]}) ->
- Options = [], % put options here.
- {JsonProps} = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)),
- DocsArray = proplists:get_value(<<"docs">>, JsonProps),
- % convert all the doc elements to native docs
- case proplists:get_value(<<"new_edits">>, JsonProps, true) of
- true ->
- Docs = lists:map(
- fun({ObjProps} = JsonObj) ->
- Doc = couch_doc:from_json_obj(JsonObj),
- Id = case Doc#doc.id of
- <<>> -> couch_util:new_uuid();
- Id0 -> Id0
- end,
- Revs = case proplists:get_value(<<"_rev">>, ObjProps) of
- undefined -> [];
- Rev -> [Rev]
- end,
- Doc#doc{id=Id,revs=Revs}
- end,
- DocsArray),
- {ok, ResultRevs} = couch_db:update_docs(Db, Docs, Options),
-
- % output the results
- DocResults = lists:zipwith(
- fun(Doc, NewRev) ->
- {[{"id", Doc#doc.id}, {"rev", NewRev}]}
- end,
- Docs, ResultRevs),
- send_json(Req, 201, {[
- {ok, true},
- {new_revs, DocResults}
- ]});
-
- false ->
- Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray],
- ok = couch_db:save_docs(Db, Docs, Options),
- send_json(Req, 201, {[
- {ok, true}
- ]})
- end;
-
-handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_bulk_docs">>]}) ->
- throw({method_not_allowed, "POST"});
-
-handle_db_request(Req, 'POST', {_DbName, Db, [<<"_compact">>]}) ->
- ok = couch_db:start_compact(Db),
- send_json(Req, 202, {[
- {ok, true}
- ]});
-
-handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_compact">>]}) ->
- throw({method_not_allowed, "POST"});
-
-handle_db_request(Req, 'POST', {_DbName, Db, [<<"_purge">>]}) ->
- {IdsRevs} = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)),
- % validate the json input
- [{_Id, [_|_]=_Revs} = IdRevs || IdRevs <- IdsRevs],
+ RawUri = MochiReq:get(raw_path),
+ {"/" ++ Path, _, _} = mochiweb_util:urlsplit_path(RawUri),
- case couch_db:purge_docs(Db, IdsRevs) of
- {ok, PurgeSeq, PurgedIdsRevs} ->
- send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs}}]});
- Error ->
- throw(Error)
- end;
-
-handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_purge">>]}) ->
- throw({method_not_allowed, "POST"});
-
-% View request handlers
-
-handle_db_request(Req, 'GET', {_DbName, Db, [<<"_all_docs">>]}) ->
- #view_query_args{
- start_key = StartKey,
- start_docid = StartDocId,
- count = Count,
- skip = SkipCount,
- direction = Dir
- } = QueryArgs = parse_view_query(Req),
- {ok, Info} = couch_db:get_db_info(Db),
- TotalRowCount = proplists:get_value(doc_count, Info),
-
- StartId = if is_binary(StartKey) -> StartKey;
- true -> StartDocId
- end,
-
- FoldlFun = make_view_fold_fun(Req, QueryArgs, TotalRowCount,
- fun couch_db:enum_docs_reduce_to_count/1),
- AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) ->
- case couch_doc:to_doc_info(FullDocInfo) of
- #doc_info{deleted=false, rev=Rev} ->
- FoldlFun({{Id, Id}, {[{rev, Rev}]}}, Offset, Acc);
- #doc_info{deleted=true} ->
- {ok, Acc}
- end
- end,
- {ok, FoldResult} = couch_db:enum_docs(Db, StartId, Dir, AdapterFun,
- {Count, SkipCount, undefined, []}),
- finish_view_fold(Req, TotalRowCount, {ok, FoldResult});
-
-handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_all_docs">>]}) ->
- throw({method_not_allowed, "GET,HEAD"});
-
-handle_db_request(Req, 'GET', {_DbName, Db, [<<"_all_docs_by_seq">>]}) ->
- #view_query_args{
- start_key = StartKey,
- count = Count,
- skip = SkipCount,
- direction = Dir
- } = QueryArgs = parse_view_query(Req),
-
- {ok, Info} = couch_db:get_db_info(Db),
- TotalRowCount = proplists:get_value(doc_count, Info),
-
- FoldlFun = make_view_fold_fun(Req, QueryArgs, TotalRowCount,
- fun couch_db:enum_docs_since_reduce_to_count/1),
- StartKey2 = case StartKey of
- nil -> 0;
- <<>> -> 100000000000;
- StartKey when is_integer(StartKey) -> StartKey
+ HandlerKey =
+ case mochiweb_util:partition(Path, "/") of
+ {"", "", ""} ->
+ <<"/">>; % Special case the root url handler
+ {FirstPart, _, _} ->
+ list_to_binary(FirstPart)
end,
- {ok, FoldResult} = couch_db:enum_docs_since(Db, StartKey2, Dir,
- fun(DocInfo, Offset, Acc) ->
- #doc_info{
- id=Id,
- rev=Rev,
- update_seq=UpdateSeq,
- deleted=Deleted,
- conflict_revs=ConflictRevs,
- deleted_conflict_revs=DelConflictRevs
- } = DocInfo,
- Json = {
- [{"rev", Rev}] ++
- case ConflictRevs of
- [] -> [];
- _ -> [{"conflicts", ConflictRevs}]
- end ++
- case DelConflictRevs of
- [] -> [];
- _ -> [{"deleted_conflicts", DelConflictRevs}]
- end ++
- case Deleted of
- true -> [{"deleted", true}];
- false -> []
- end
- },
- FoldlFun({{UpdateSeq, Id}, Json}, Offset, Acc)
- end, {Count, SkipCount, undefined, []}),
- finish_view_fold(Req, TotalRowCount, {ok, FoldResult});
-
-handle_db_request(_Req, _Method, {_DbName, _Db, ["_all_docs_by_seq"]}) ->
- throw({method_not_allowed, "GET,HEAD"});
-
-handle_db_request(Req, 'GET', {DbName, _Db, [<<"_view">>, DocId, ViewName]}) ->
- #view_query_args{
- start_key = StartKey,
- count = Count,
- skip = SkipCount,
- direction = Dir,
- start_docid = StartDocId,
- reduce = Reduce
- } = QueryArgs = parse_view_query(Req),
+ ?LOG_DEBUG("~p ~s ~p~nHeaders: ~p", [
+ MochiReq:get(method),
+ RawUri,
+ MochiReq:get(version),
+ mochiweb_headers:to_list(MochiReq:get(headers))
+ ]),
- case couch_view:get_map_view({DbName, <<"_design/", DocId/binary>>, ViewName}) of
- {ok, View} ->
- {ok, RowCount} = couch_view:get_row_count(View),
- Start = {StartKey, StartDocId},
- FoldlFun = make_view_fold_fun(Req, QueryArgs, RowCount,
- fun couch_view:reduce_to_count/1),
- FoldAccInit = {Count, SkipCount, undefined, []},
- FoldResult = couch_view:fold(View, Start, Dir, FoldlFun, FoldAccInit),
- finish_view_fold(Req, RowCount, FoldResult);
- {not_found, Reason} ->
- case couch_view:get_reduce_view({DbName, <<"_design/", DocId/binary>>, ViewName}) of
- {ok, View} ->
- case Reduce of
- false ->
- {reduce, _N, _Lang, MapView} = View,
- {ok, RowCount} = couch_view:get_row_count(MapView),
- Start = {StartKey, StartDocId},
- FoldlFun = make_view_fold_fun(Req, QueryArgs, RowCount,
- fun couch_view:reduce_to_count/1),
- FoldAccInit = {Count, SkipCount, undefined, []},
- FoldResult = couch_view:fold(MapView, Start, Dir, FoldlFun, FoldAccInit),
- finish_view_fold(Req, RowCount, FoldResult);
- _ ->
- output_reduce_view(Req, View)
- end;
- _ ->
- throw({not_found, Reason})
- end
- end;
-
-handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_view">>, _DocId, _ViewName]}) ->
- throw({method_not_allowed, "GET,HEAD"});
-
-handle_db_request(Req, 'POST', {_DbName, Db, [<<"_missing_revs">>]}) ->
- {JsonDocIdRevs} = ?JSON_DECODE(Req:recv_body()),
- {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs),
- send_json(Req, {[
- {missing_revs, {Results}}
- ]});
-
-handle_db_request(Req, 'POST', {_DbName, Db, [<<"_increment_update_seq">>]}) ->
- % NOTE, use at own risk. This functionality is experimental
- % and might go away entirely.
- {ok, NewSeq} = couch_db:increment_update_seq(Db),
- send_json(Req, {[{ok, true},
- {update_seq, NewSeq}
- ]});
-
-handle_db_request(Req, 'POST', {DbName, _Db, [<<"_temp_view">>]}) ->
- #view_query_args{
- start_key = StartKey,
- count = Count,
- skip = SkipCount,
- direction = Dir,
- start_docid = StartDocId
- } = QueryArgs = parse_view_query(Req),
-
- case Req:get_primary_header_value("content-type") of
- undefined -> ok;
- "application/json" -> ok;
- Else -> throw({incorrect_mime_type, Else})
- end,
- {Props} = ?JSON_DECODE(Req:recv_body()),
- Language = proplists:get_value(<<"language">>, Props, <<"javascript">>),
- MapSrc = proplists:get_value(<<"map">>, Props),
- case proplists:get_value(<<"reduce">>, Props, null) of
- null ->
- {ok, View} = couch_view:get_map_view({temp, DbName, Language, MapSrc}),
- Start = {StartKey, StartDocId},
+ Method =
+ case MochiReq:get(method) of
+ % alias HEAD to GET as mochiweb takes care of stripping the body
+ 'HEAD' -> 'GET';
- {ok, TotalRows} = couch_view:get_row_count(View),
+ % already an atom
+ Meth when is_atom(Meth) -> Meth;
- FoldlFun = make_view_fold_fun(Req, QueryArgs, TotalRows,
- fun couch_view:reduce_to_count/1),
- FoldAccInit = {Count, SkipCount, undefined, []},
- FoldResult = couch_view:fold(View, Start, Dir, fun(A, B, C) ->
- FoldlFun(A, B, C)
- end, FoldAccInit),
- finish_view_fold(Req, TotalRows, FoldResult);
-
- RedSrc ->
- {ok, View} = couch_view:get_reduce_view(
- {temp, DbName, Language, MapSrc, RedSrc}),
- output_reduce_view(Req, View)
- end;
-
-handle_db_request(_Req, _Method, {_DbName, _Db, [<<"_temp_view">>]}) ->
- throw({method_not_allowed, "POST"});
-
-% Document request handlers
-
-handle_db_request(Req, Method, {DbName, Db, [<<"_design">>, Name]}) ->
- % Special case to enable using an unencoded in the URL of design docs, as
- % slashes in document IDs must otherwise be URL encoded
- handle_db_request(Req, Method, {DbName, Db, [<<"_design/", Name/binary>>]});
-
-handle_db_request(Req, Method, {DbName, Db, [DocId]}) ->
- handle_doc_request(Req, Method, DbName, Db,DocId);
-
-handle_db_request(Req, Method, {DbName, Db, [DocId, FileName]}) ->
- handle_attachment_request(Req, Method, DbName, Db, DocId,
- FileName).
-
-output_reduce_view(Req, View) ->
- #view_query_args{
- start_key = StartKey,
- end_key = EndKey,
- count = Count,
- skip = Skip,
- direction = Dir,
- start_docid = StartDocId,
- end_docid = EndDocId,
- group_level = GroupLevel
- } = parse_view_query(Req),
- GroupRowsFun =
- fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 ->
- true;
- ({Key1,_}, {Key2,_})
- when is_integer(GroupLevel) and is_list(Key1) and is_list(Key2) ->
- lists:sublist(Key1, GroupLevel) == lists:sublist(Key2, GroupLevel);
- ({Key1,_}, {Key2,_}) ->
- Key1 == Key2
- end,
- Resp = start_json_response(Req, 200),
- Resp:write_chunk("{\"rows\":["),
- {ok, _} = couch_view:fold_reduce(View, Dir, {StartKey, StartDocId}, {EndKey, EndDocId},
- GroupRowsFun,
- fun(_Key, _Red, {AccSeparator,AccSkip,AccCount}) when AccSkip > 0 ->
- {ok, {AccSeparator,AccSkip-1,AccCount}};
- (_Key, _Red, {AccSeparator,0,AccCount}) when AccCount == 0 ->
- {stop, {AccSeparator,0,AccCount}};
- (_Key, Red, {AccSeparator,0,AccCount}) when GroupLevel == 0 ->
- Json = ?JSON_ENCODE({[{key, null}, {value, Red}]}),
- Resp:write_chunk(AccSeparator ++ Json),
- {ok, {",",0,AccCount-1}};
- (Key, Red, {AccSeparator,0,AccCount})
- when is_integer(GroupLevel)
- andalso is_list(Key) ->
- Json = ?JSON_ENCODE(
- {[{key, lists:sublist(Key, GroupLevel)},{value, Red}]}),
- Resp:write_chunk(AccSeparator ++ Json),
- {ok, {",",0,AccCount-1}};
- (Key, Red, {AccSeparator,0,AccCount}) ->
- Json = ?JSON_ENCODE({[{key, Key}, {value, Red}]}),
- Resp:write_chunk(AccSeparator ++ Json),
- {ok, {",",0,AccCount-1}}
- end, {"", Skip, Count}),
- Resp:write_chunk("]}"),
- end_json_response(Resp).
-
-
-handle_doc_request(Req, 'DELETE', _DbName, Db, DocId) ->
- case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
- missing_rev ->
- {missing_rev, "Document rev/etag must be specified to delete"};
- RevToDelete ->
- {ok, NewRev} = couch_db:delete_doc(Db, DocId, [RevToDelete]),
- send_json(Req, 200, {[
- {ok, true},
- {id, DocId},
- {rev, NewRev}
- ]})
- end;
-
-handle_doc_request(Req, 'GET', _DbName, Db, DocId) ->
- #doc_query_args{
- rev = Rev,
- open_revs = Revs,
- options = Options
- } = parse_doc_query(Req),
- case Revs of
- [] ->
- {Doc, DocRev} = couch_doc_open(Db, DocId, Rev, Options),
- Etag = none_match(Req, DocRev),
- AdditionalHeaders = case Doc#doc.meta of
- [] -> [{"Etag", Etag}]; % output etag when we have no meta
- _ -> []
- end,
- JsonDoc = couch_doc:to_json_obj(Doc, Options),
- send_json(Req, 200, AdditionalHeaders, JsonDoc);
- _ ->
- {ok, Results} = couch_db:open_doc_revs(Db, DocId, Revs, Options),
- Resp = start_json_response(Req, 200),
- Resp:write_chunk("["),
- % We loop through the docs. The first time through the separator
- % is whitespace, then a comma on subsequent iterations.
- lists:foldl(
- fun(Result, AccSeparator) ->
- case Result of
- {ok, Doc} ->
- JsonDoc = couch_doc:to_json_obj(Doc, Options),
- Json = ?JSON_ENCODE({[{ok, JsonDoc}]}),
- Resp:write_chunk(AccSeparator ++ Json);
- {{not_found, missing}, RevId} ->
- Json = ?JSON_ENCODE({[{"missing", RevId}]}),
- Resp:write_chunk(AccSeparator ++ Json)
- end,
- "," % AccSeparator now has a comma
- end,
- "", Results),
- Resp:write_chunk("]"),
- end_json_response(Resp)
- end;
-
-handle_doc_request(Req, 'POST', _DbName, Db, DocId) ->
- Form = mochiweb_multipart:parse_form(Req),
- Rev = list_to_binary(proplists:get_value("_rev", Form)),
- Doc = case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
- {ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]};
- {ok, [Error]} -> throw(Error)
- end,
-
- NewAttachments = [
- {list_to_binary(Name), {list_to_binary(ContentType), Content}} ||
- {Name, {ContentType, _}, Content} <-
- proplists:get_all_values("_attachments", Form)
- ],
- #doc{attachments=Attachments} = Doc,
- NewDoc = Doc#doc{
- attachments = Attachments ++ NewAttachments
- },
- {ok, NewRev} = couch_db:update_doc(Db, NewDoc, []),
-
- send_json(Req, 201, [{"Etag", "\"" ++ NewRev ++ "\""}], {obj, [
- {ok, true},
- {id, DocId},
- {rev, NewRev}
- ]});
-
-handle_doc_request(Req, 'PUT', _DbName, Db, DocId) ->
- Json = ?JSON_DECODE(Req:recv_body(?MAX_DOC_SIZE)),
- Doc = couch_doc:from_json_obj(Json),
- ExplicitRev =
- case Doc#doc.revs of
- [Rev0|_] -> Rev0;
- [] -> undefined
- end,
- case extract_header_rev(Req, ExplicitRev) of
- missing_rev ->
- Revs = [];
- Rev ->
- Revs = [Rev]
- end,
- {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=Revs}, []),
- send_json(Req, 201, [{"Etag", <<"\"", NewRev/binary, "\"">>}], {[
- {ok, true},
- {id, DocId},
- {rev, NewRev}
- ]});
-
-handle_doc_request(Req, 'COPY', _DbName, Db, SourceDocId) ->
- SourceRev =
- case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
- missing_rev -> [];
- Rev -> Rev
- end,
-
- {TargetDocId, TargetRev} = parse_copy_destination_header(Req),
-
- % open revision Rev or Current
- {Doc, _DocRev} = couch_doc_open(Db, SourceDocId, SourceRev, []),
-
- % save new doc
- {ok, NewTargetRev} = couch_db:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRev}, []),
-
- send_json(Req, 201, [{"Etag", "\"" ++ binary_to_list(NewTargetRev) ++ "\""}], {[
- {ok, true},
- {id, TargetDocId},
- {rev, NewTargetRev}
- ]});
-
-handle_doc_request(Req, 'MOVE', _DbName, Db, SourceDocId) ->
- SourceRev =
- case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
- missing_rev ->
- throw({
- bad_request,
- "MOVE requires a specified rev parameter for the origin resource."}
- );
- Rev -> Rev
- end,
-
- {TargetDocId, TargetRev} = parse_copy_destination_header(Req),
- % open revision Rev or Current
- {Doc, _DocRev} = couch_doc_open(Db, SourceDocId, SourceRev, []),
-
- % save new doc & delete old doc in one operation
- Docs = [
- Doc#doc{id=TargetDocId, revs=TargetRev},
- #doc{id=SourceDocId, revs=[SourceRev], deleted=true}
- ],
-
- {ok, ResultRevs} = couch_db:update_docs(Db, Docs, []),
-
- DocResults = lists:zipwith(
- fun(FDoc, NewRev) ->
- {[{id, FDoc#doc.id}, {rev, NewRev}]}
- end,
- Docs, ResultRevs),
- send_json(Req, 201, {[
- {ok, true},
- {new_revs, DocResults}
- ]});
-
-handle_doc_request(_Req, _Method, _DbName, _Db, _DocId) ->
- throw({method_not_allowed, "DELETE,GET,HEAD,POST,PUT,COPY,MOVE"}).
-
-% Useful for debugging
-% couch_doc_open(Db, DocId) ->
-% couch_doc_open(Db, DocId, [], []).
-
-couch_doc_open(Db, DocId, Rev, Options) ->
- case Rev of
- "" -> % open most recent rev
- case couch_db:open_doc(Db, DocId, Options) of
- {ok, #doc{revs=[DocRev|_]}=Doc} ->
- {Doc, DocRev};
- Error ->
- throw(Error)
- end;
- _ -> % open a specific rev (deletions come back as stubs)
- case couch_db:open_doc_revs(Db, DocId, [Rev], Options) of
- {ok, [{ok, Doc}]} ->
- {Doc, Rev};
- {ok, [Else]} ->
- throw(Else)
- end
- end.
-
-% Attachment request handlers
-
-handle_attachment_request(Req, 'GET', _DbName, Db, DocId, FileName) ->
- case couch_db:open_doc(Db, DocId, []) of
- {ok, #doc{attachments=Attachments}} ->
- case proplists:get_value(FileName, Attachments) of
- undefined ->
- throw({not_found, missing});
- {Type, Bin} ->
- Resp = Req:respond({200, [
- {"Cache-Control", "must-revalidate"},
- {"Content-Type", binary_to_list(Type)},
- {"Content-Length", integer_to_list(couch_doc:bin_size(Bin))}
- ] ++ server_header(), chunked}),
- couch_doc:bin_foldl(Bin,
- fun(BinSegment, []) ->
- ok = Resp:write_chunk(BinSegment),
- {ok, []}
- end,
- []
- ),
- Resp:write_chunk(""),
- {ok, Resp}
- end;
- Error ->
- throw(Error)
- end;
-
-handle_attachment_request(Req, Method, _DbName, Db, DocId, FileName)
- when (Method == 'PUT') or (Method == 'DELETE') ->
-
- NewAttachment = case Method of
- 'DELETE' ->
- [];
- _ ->
- [{FileName, {
- list_to_binary(Req:get_header_value("Content-Type")),
- Req:recv_body(?MAX_DOC_SIZE)
- }}]
+ % Non standard HTTP verbs aren't atoms (COPY, MOVE etc) so convert when
+ % possible (if any module references the atom, then it's existing).
+ Meth -> try list_to_existing_atom(Meth) catch _ -> Meth end
end,
-
- Doc = case extract_header_rev(Req, proplists:get_value("rev", Req:parse_qs())) of
- missing_rev -> % make the new doc
- #doc{id=DocId};
- Rev ->
- case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
- {ok, [{ok, Doc0}]} -> Doc0#doc{revs=[Rev]};
- {ok, [Error]} -> throw(Error)
- end
+ HttpReq = #httpd{
+ mochi_req = MochiReq,
+ method = Method,
+ path_parts = [list_to_binary(couch_httpd:unquote(Part))
+ || Part <- string:tokens(Path, "/")],
+ db_url_handlers = DbUrlHandlers
+ },
+
+ DefaultFun = fun couch_httpd_db:handle_request/1,
+ HandlerFun = couch_util:dict_find(HandlerKey, UrlHandlers, DefaultFun),
+
+ {ok, Resp} =
+ try
+ HandlerFun(HttpReq)
+ catch
+ Error ->
+ send_error(HttpReq, Error)
end,
- #doc{attachments=Attachments} = Doc,
- DocEdited = Doc#doc{
- attachments = NewAttachment ++ proplists:delete(FileName, Attachments)
- },
- {ok, UpdatedRev} = couch_db:update_doc(Db, DocEdited, []),
- send_json(Req, case Method of 'DELETE' -> 200; _ -> 201 end, {[
- {ok, true},
- {id, DocId},
- {rev, UpdatedRev}
- ]});
-
-handle_attachment_request(_Req, _Method, _DbName, _Db, _DocId, _FileName) ->
- throw({method_not_allowed, "GET,HEAD,DELETE,PUT"}).
-
-% Config request handlers
+ ?LOG_INFO("~s - - ~p ~s ~B", [
+ MochiReq:get(peer),
+ MochiReq:get(method),
+ RawUri,
+ Resp:get(code)
+ ]),
+ {ok, Resp}.
-handle_config_request(_Req, Method, {config, Config}) ->
- Parts = string:tokens(Config, "/"),
- handle_config_request(_Req, Method, {Parts});
-% GET /_config
-handle_config_request(Req, 'GET', {[]}) ->
- send_json(Req, 200, {dict:to_list(dict:map(
- fun(_, Value) -> {Value} end,
- lists:foldl(
- fun({{Section, Option}, Value}, Acc) ->
- SecBin = list_to_binary(Section),
- OptBin = list_to_binary(Option),
- ValBin = list_to_binary(Value),
- dict:append(SecBin, {OptBin, ValBin}, Acc)
- end,
- dict:new(),
- couch_config:all()
- )
- ))});
-% GET /_config/Section
-handle_config_request(Req, 'GET', {[Section]}) ->
- KVs = [
- {list_to_binary(Key), list_to_binary(Value)} ||
- {Key, Value} <-
- couch_config:get(Section)
- ],
- send_json(Req, 200, {KVs});
+% Utilities
-% PUT /_config/Section/Key
-% "value"
-handle_config_request(Req, 'PUT', {[Section, Key]}) ->
- Value = binary_to_list(Req:recv_body()),
- ok = couch_config:set(Section, Key, Value),
- send_json(Req, 200, {[
- {ok, true}
- ]});
+partition(Path) ->
+ mochiweb_util:partition(Path, "/").
-% GET /_config/Section/Key
-handle_config_request(Req, 'GET', {[Section, Key]}) ->
- case couch_config:get(Section, Key, null) of
- null ->
- throw({not_found, unknown_config_value});
- Value ->
- send_json(Req, 200, list_to_binary(Value))
- end;
+header_value(#httpd{mochi_req=MochiReq}, Key) ->
+ MochiReq:get_header_value(Key).
-% DELETE /_config/Section/Key
-handle_config_request(Req, 'DELETE', {[Section, Key]}) ->
- case couch_config:get(Section, Key, null) of
- null ->
- throw({not_found, unknown_config_value});
- OldValue ->
- couch_config:delete(Section, Key),
- send_json(Req, 200, list_to_binary(OldValue))
+header_value(#httpd{mochi_req=MochiReq}, Key, Default) ->
+ case MochiReq:get_header_value(Key) of
+ undefined -> Default;
+ Value -> Value
end.
+primary_header_value(#httpd{mochi_req=MochiReq}, Key) ->
+ MochiReq:get_primary_header_value(Key).
+
+serve_file(#httpd{mochi_req=MochiReq}, RelativePath, DocumentRoot) ->
+ {ok, MochiReq:serve_file(RelativePath, DocumentRoot, server_header())}.
-% View request handling internals
-
-reverse_key_default(nil) -> {};
-reverse_key_default({}) -> nil;
-reverse_key_default(Key) -> Key.
-
-parse_view_query(Req) ->
- QueryList = Req:parse_qs(),
- lists:foldl(fun({Key,Value}, Args) ->
- case {Key, Value} of
- {"", _} ->
- Args;
- {"key", Value} ->
- JsonKey = ?JSON_DECODE(Value),
- Args#view_query_args{start_key=JsonKey,end_key=JsonKey};
- {"startkey_docid", DocId} ->
- Args#view_query_args{start_docid=list_to_binary(DocId)};
- {"endkey_docid", DocId} ->
- Args#view_query_args{end_docid=list_to_binary(DocId)};
- {"startkey", Value} ->
- Args#view_query_args{start_key=?JSON_DECODE(Value)};
- {"endkey", Value} ->
- Args#view_query_args{end_key=?JSON_DECODE(Value)};
- {"count", Value} ->
- case (catch list_to_integer(Value)) of
- Count when is_integer(Count) ->
- if Count < 0 ->
- Args#view_query_args {
- direction =
- if Args#view_query_args.direction == rev -> fwd;
- true -> rev
- end,
- count=Count,
- start_key = reverse_key_default(Args#view_query_args.start_key),
- start_docid = reverse_key_default(Args#view_query_args.start_docid),
- end_key = reverse_key_default(Args#view_query_args.end_key),
- end_docid = reverse_key_default(Args#view_query_args.end_docid)};
- true ->
- Args#view_query_args{count=Count}
- end;
- _Error ->
- Msg = io_lib:format("Bad URL query value, number expected: count=~s", [Value]),
- throw({query_parse_error, Msg})
- end;
- {"update", "false"} ->
- Args#view_query_args{update=false};
- {"descending", "true"} ->
- case Args#view_query_args.direction of
- fwd ->
- Args#view_query_args {
- direction = rev,
- start_key = reverse_key_default(Args#view_query_args.start_key),
- start_docid = reverse_key_default(Args#view_query_args.start_docid),
- end_key = reverse_key_default(Args#view_query_args.end_key),
- end_docid = reverse_key_default(Args#view_query_args.end_docid)};
- _ ->
- Args %already reversed
- end;
- {"descending", "false"} ->
- % The descending=false behaviour is the default behaviour, so we
- % simpply ignore it. This is only for convenience when playing with
- % the HTTP API, so that a user doesn't get served an error when
- % flipping true to false in the descending option.
- Args;
- {"skip", Value} ->
- case (catch list_to_integer(Value)) of
- Count when is_integer(Count) ->
- Args#view_query_args{skip=Count};
- _Error ->
- Msg = lists:flatten(io_lib:format(
- "Bad URL query value, number expected: skip=~s", [Value])),
- throw({query_parse_error, Msg})
- end;
- {"group", "true"} ->
- Args#view_query_args{group_level=exact};
- {"group_level", LevelStr} ->
- Args#view_query_args{group_level=list_to_integer(LevelStr)};
- {"reduce", "true"} ->
- Args#view_query_args{reduce=true};
- {"reduce", "false"} ->
- Args#view_query_args{reduce=false};
- _ -> % unknown key
- Msg = lists:flatten(io_lib:format(
- "Bad URL query key:~s", [Key])),
- throw({query_parse_error, Msg})
- end
- end, #view_query_args{}, QueryList).
-
+qs_value(Req, Key) ->
+ qs_value(Req, Key, undefined).
+
+qs_value(Req, Key, Default) ->
+ proplists:get_value(Key, qs(Req), Default).
-make_view_fold_fun(Req, QueryArgs, TotalViewCount, ReduceCountFun) ->
- #view_query_args{
- end_key = EndKey,
- end_docid = EndDocId,
- direction = Dir,
- count = Count
- } = QueryArgs,
+qs(#httpd{mochi_req=MochiReq}) ->
+ MochiReq:parse_qs().
- PassedEndFun =
- case Dir of
- fwd ->
- fun(ViewKey, ViewId) ->
- couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId])
- end;
- rev->
- fun(ViewKey, ViewId) ->
- couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId])
- end
- end,
+path(#httpd{mochi_req=MochiReq}) ->
+ MochiReq:get(path).
- NegCountFun = fun({{Key, DocId}, Value}, OffsetReds,
- {AccCount, AccSkip, Resp, AccRevRows}) ->
- Offset = ReduceCountFun(OffsetReds),
- PassedEnd = PassedEndFun(Key, DocId),
- case {PassedEnd, AccCount, AccSkip, Resp} of
- {true, _, _, _} -> % The stop key has been passed, stop looping.
- {stop, {AccCount, AccSkip, Resp, AccRevRows}};
- {_, 0, _, _} -> % we've done "count" rows, stop foldling
- {stop, {0, 0, Resp, AccRevRows}};
- {_, _, AccSkip, _} when AccSkip > 0 ->
- {ok, {AccCount, AccSkip - 1, Resp, AccRevRows}};
- {_, _, _, undefined} ->
- Resp2 = start_json_response(Req, 200),
- Offset2 = TotalViewCount - Offset -
- lists:min([TotalViewCount - Offset, - AccCount]),
- JsonBegin = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
- [TotalViewCount, Offset2]),
- Resp2:write_chunk(JsonBegin),
- JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
- {ok, {AccCount + 1, 0, Resp2, [?JSON_ENCODE(JsonObj) | AccRevRows]}};
- {_, AccCount, _, Resp} ->
-
- JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
- {ok, {AccCount + 1, 0, Resp, [?JSON_ENCODE(JsonObj), ",\r\n" | AccRevRows]}}
- end
- end,
+unquote(UrlEncodedString) ->
+ mochiweb_util:unquote(UrlEncodedString).
- PosCountFun = fun({{Key, DocId}, Value}, OffsetReds,
- {AccCount, AccSkip, Resp, AccRevRows}) ->
- Offset = ReduceCountFun(OffsetReds), % I think we only need this call once per view
- PassedEnd = PassedEndFun(Key, DocId),
- case {PassedEnd, AccCount, AccSkip, Resp} of
- {true, _, _, _} ->
- % The stop key has been passed, stop looping.
- {stop, {AccCount, AccSkip, Resp, AccRevRows}};
- {_, 0, _, _} ->
- % we've done "count" rows, stop foldling
- {stop, {0, 0, Resp, AccRevRows}};
- {_, _, AccSkip, _} when AccSkip > 0 ->
- {ok, {AccCount, AccSkip - 1, Resp, AccRevRows}};
- {_, _, _, undefined} ->
- Resp2 = start_json_response(Req, 200),
- JsonBegin = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
- [TotalViewCount, Offset]),
- JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
-
- Resp2:write_chunk(JsonBegin ++ ?JSON_ENCODE(JsonObj)),
- {ok, {AccCount - 1, 0, Resp2, AccRevRows}};
- {_, AccCount, _, Resp} when (AccCount > 0) ->
- JsonObj = {[{id, DocId}, {key, Key}, {value, Value}]},
- Resp:write_chunk(",\r\n" ++ ?JSON_ENCODE(JsonObj)),
- {ok, {AccCount - 1, 0, Resp, AccRevRows}}
- end
- end,
- case Count > 0 of
- true -> PosCountFun;
- false -> NegCountFun
- end.
+parse_form(#httpd{mochi_req=MochiReq}) ->
+ mochiweb_multipart:parse_form(MochiReq).
-finish_view_fold(Req, TotalRows, FoldResult) ->
- case FoldResult of
- {ok, {_, _, undefined, _}} ->
- % nothing found in the view, nothing has been returned
- % send empty view
- send_json(Req, 200, {[
- {total_rows, TotalRows},
- {rows, []}
- ]});
- {ok, {_, _, Resp, AccRevRows}} ->
- % end the view
- Resp:write_chunk(AccRevRows ++ "\r\n]}"),
- end_json_response(Resp);
- Error ->
- throw(Error)
- end.
-
-% Document request handling internals
+body(#httpd{mochi_req=MochiReq}) ->
+ MochiReq:recv_body(?MAX_DOC_SIZE).
-parse_doc_query(Req) ->
- lists:foldl(fun({Key,Value}, Args) ->
- case {Key, Value} of
- {"attachments", "true"} ->
- Options = [attachments | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"meta", "true"} ->
- Options = [revs_info, conflicts, deleted_conflicts | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"revs", "true"} ->
- Options = [revs | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"revs_info", "true"} ->
- Options = [revs_info | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"conflicts", "true"} ->
- Options = [conflicts | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"deleted_conflicts", "true"} ->
- Options = [deleted_conflicts | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"rev", Rev} ->
- Args#doc_query_args{rev=list_to_binary(Rev)};
- {"open_revs", "all"} ->
- Args#doc_query_args{open_revs=all};
- {"open_revs", RevsJsonStr} ->
- JsonArray = ?JSON_DECODE(RevsJsonStr),
- Args#doc_query_args{open_revs=JsonArray};
- _Else -> % unknown key value pair, ignore.
- Args
- end
- end, #doc_query_args{}, Req:parse_qs()).
+json_body(#httpd{mochi_req=MochiReq}) ->
+ ?JSON_DECODE(MochiReq:recv_body(?MAX_DOC_SIZE)).
-% Utilities
+doc_etag(#doc{revs=[DiskRev|_]}) ->
+ "\"" ++ binary_to_list(DiskRev) ++ "\"".
-none_match(Req, Tag) ->
- Etag = "\"" ++ binary_to_list(Tag) ++ "\"",
- Etags = case Req:get_header_value("If-None-Match") of
- undefined ->
- [];
- Tags ->
- string:tokens(Tags, ", ")
- end,
- case lists:member(Etag, Etags) of
- true ->
- throw({not_modified, Etag});
- false ->
- Etag
- end.
+start_chunked_response(#httpd{mochi_req=MochiReq}, Code, Headers) ->
+ {ok, MochiReq:respond({Code, Headers ++ server_header(), chunked})}.
-error_to_json(Error) ->
- {HttpCode, Atom, Reason} = error_to_json0(Error),
- FormattedReason =
- case (catch io_lib:format("~s", [Reason])) of
- List when is_list(List) ->
- lists:flatten(List);
- _ ->
- lists:flatten(io_lib:format("~p", [Reason])) % else term to text
- end,
- Json = {[
- {error, Atom},
- {reason, list_to_binary(FormattedReason)}
- ]},
- {HttpCode, Json}.
-
-error_to_json0(bad_request) ->
- {400, bad_request, "Bad request"};
-error_to_json0({bad_request, Reason}) ->
- {400, bad_request, Reason};
-error_to_json0(not_found) ->
- {404, not_found, "missing"};
-error_to_json0({missing_rev, Msg}) ->
- {412, missing_rev, Msg};
-error_to_json0({not_found, Reason}) ->
- {404, not_found, Reason};
-error_to_json0({database_already_exists, Reason}) ->
- {409, database_already_exists, Reason};
-error_to_json0(conflict) ->
- {412, conflict, "Update conflict"};
-error_to_json0({doc_validation, Msg}) ->
- {406, doc_validation, Msg};
-error_to_json0({Id, Reason}) when is_atom(Id) ->
- {500, Id, Reason};
-error_to_json0(Error) ->
- {500, error, Error}.
+send_chunk(Resp, Data) ->
+ Resp:write_chunk(Data),
+ {ok, Resp}.
-extract_header_rev(Req, ExplictRev) when is_list(ExplictRev)->
- extract_header_rev(Req, list_to_binary(ExplictRev));
-extract_header_rev(Req, ExplictRev) ->
- Etag = case Req:get_header_value("If-Match") of
- undefined -> undefined;
- Tag -> string:strip(Tag, both, $")
+send_response(#httpd{mochi_req=MochiReq}, Code, Headers, Body) ->
+ if Code >= 400 ->
+ ?LOG_DEBUG("HTTPd ~p error response:~n ~s", [Code, Body]);
+ true -> ok
end,
- case {ExplictRev, Etag} of
- {undefined, undefined} -> missing_rev;
- {_, undefined} -> ExplictRev;
- {undefined, _} -> list_to_binary(Etag);
- _ when ExplictRev == Etag -> list_to_binary(Etag);
- _ ->
- throw({bad_request, "Document rev and etag have different values"})
- end.
+ {ok, MochiReq:respond({Code, Headers ++ server_header(), Body})}.
-parse_copy_destination_header(Req) ->
- Destination = Req:get_header_value("Destination"),
- case regexp:match(Destination, "\\?") of
- nomatch ->
- {list_to_binary(Destination), []};
- {match, _, _} ->
- {ok, [DocId, RevQueryOptions]} = regexp:split(Destination, "\\?"),
- {ok, [_RevQueryKey, Rev]} = regexp:split(RevQueryOptions, "="),
- {list_to_binary(DocId), [list_to_binary(Rev)]}
- end.
-
-send_error(Req, {method_not_allowed, Methods}) ->
- {ok, Req:respond({405, [{"Allow", Methods}] ++ server_header(), <<>>})};
-send_error(Req, {modified, Etag}) ->
- {ok, Req:respond({412, [{"Etag", Etag}] ++ server_header(), <<>>})};
-send_error(Req, {not_modified, Etag}) ->
- {ok, Req:respond({304, [{"Etag", Etag}] ++ server_header(), <<>>})};
-send_error(Req, Error) ->
- {Code, Json} = error_to_json(Error),
- ?LOG_INFO("HTTP Error (code ~w): ~p", [Code, Error]),
- send_error(Req, Code, Json).
-
-send_error(Req, Code, Json) ->
- send_json(Req, Code, Json).
+send_method_not_allowed(Req, Methods) ->
+ send_response(Req, 405, [{"Allow", Methods}], <<>>).
send_json(Req, Value) ->
send_json(Req, 200, Value).
@@ -1201,10 +222,8 @@ send_json(Req, Code, Headers, Value) ->
DefaultHeaders = [
{"Content-Type", negotiate_content_type(Req)},
{"Cache-Control", "must-revalidate"}
- ] ++ server_header(),
- Body = ?JSON_ENCODE(Value),
- Resp = Req:respond({Code, DefaultHeaders ++ Headers, Body}),
- {ok, Resp}.
+ ],
+ send_response(Req, Code, DefaultHeaders ++ Headers, ?JSON_ENCODE(Value)).
start_json_response(Req, Code) ->
start_json_response(Req, Code, []).
@@ -1213,19 +232,62 @@ start_json_response(Req, Code, Headers) ->
DefaultHeaders = [
{"Content-Type", negotiate_content_type(Req)},
{"Cache-Control", "must-revalidate"}
- ] ++ server_header(),
- Req:respond({Code, DefaultHeaders ++ Headers, chunked}).
+ ],
+ start_chunked_response(Req, Code, DefaultHeaders ++ Headers).
end_json_response(Resp) ->
- Resp:write_chunk(""),
- {ok, Resp}.
+ send_chunk(Resp, []).
+
+
+
+send_error(Req, bad_request) ->
+ send_error(Req, 400, <<"bad_request">>, <<>>);
+send_error(Req, {bad_request, Reason}) ->
+ send_error(Req, 400, <<"bad_request">>, Reason);
+send_error(Req, not_found) ->
+ send_error(Req, 404, <<"not_found">>, <<"Missing">>);
+send_error(Req, {not_found, Reason}) ->
+ send_error(Req, 404, <<"not_found">>, Reason);
+send_error(Req, conflict) ->
+ send_error(Req, 412, <<"conflict">>, <<"Document update conflict.">>);
+send_error(Req, {doc_validation, Msg}) ->
+ send_error(Req, 406, <<"doc_validation">>, Msg);
+send_error(Req, file_exists) ->
+ send_error(Req, 409, <<"file_exists">>, <<"The database could not be"
+ "created, the file already exists.">>);
+send_error(Req, {Error, Reason}) ->
+ send_error(Req, 500, Error, Reason);
+send_error(Req, Error) ->
+ send_error(Req, 500, <<"error">>, Error).
+
+
+
+send_error(Req, Code, Error, Msg) when is_atom(Error) ->
+ send_error(Req, Code, list_to_binary(atom_to_list(Error)), Msg);
+send_error(Req, Code, Error, Msg) when is_list(Msg) ->
+ case (catch list_to_binary(Msg)) of
+ Bin when is_binary(Bin) ->
+ send_error(Req, Code, Error, Bin);
+ _ ->
+ send_error(Req, Code, Error, io_lib:format("~p", [Msg]))
+ end;
+send_error(Req, Code, Error, Msg) when not is_binary(Error) ->
+ send_error(Req, Code, list_to_binary(io_lib:format("~p", [Error])), Msg);
+send_error(Req, Code, Error, Msg) when not is_binary(Msg) ->
+ send_error(Req, Code, Error, list_to_binary(io_lib:format("~p", [Msg])));
+send_error(Req, Code, Error, <<>>) ->
+ send_json(Req, Code, {[{error, Error}]});
+send_error(Req, Code, Error, Msg) ->
+ send_json(Req, Code, {[{error, Error}, {reason, Msg}]}).
+
+
-negotiate_content_type(Req) ->
+negotiate_content_type(#httpd{mochi_req=MochiReq}) ->
%% Determine the appropriate Content-Type header for a JSON response
%% depending on the Accept header in the request. A request that explicitly
%% lists the correct JSON MIME type will get that type, otherwise the
%% response will have the generic MIME type "text/plain"
- AcceptedTypes = case Req:get_header_value("Accept") of
+ AcceptedTypes = case MochiReq:get_header_value("Accept") of
undefined -> [];
AcceptHeader -> string:tokens(AcceptHeader, ", ")
end,
diff --git a/src/couchdb/couch_server.erl b/src/couchdb/couch_server.erl
index 04189764..95d51fc7 100644
--- a/src/couchdb/couch_server.erl
+++ b/src/couchdb/couch_server.erl
@@ -229,7 +229,7 @@ handle_call({create, DbName, Options}, {FromPid,_}, Server) ->
{reply, Error, Server}
end;
[_AlreadyRunningDb] ->
- {reply, {error, file_exists}, Server}
+ {reply, file_exists, Server}
end;
Error ->
{reply, Error, Server}
diff --git a/src/couchdb/couch_server_sup.erl b/src/couchdb/couch_server_sup.erl
index 23004e74..b75747d7 100644
--- a/src/couchdb/couch_server_sup.erl
+++ b/src/couchdb/couch_server_sup.erl
@@ -144,8 +144,7 @@ start_primary_services() ->
start_secondary_services() ->
DaemonChildSpecs = [
begin
- {ok, Tokens, _} = erl_scan:string(SpecStr ++ "."),
- {ok, {Module, Fun, Args}} = erl_parse:parse_term(Tokens),
+ {ok, {Module, Fun, Args}} = couch_util:parse_term(SpecStr),
{list_to_atom(Name),
{Module, Fun, Args},
diff --git a/src/couchdb/couch_util.erl b/src/couchdb/couch_util.erl
index 0f10c904..e6d6226b 100644
--- a/src/couchdb/couch_util.erl
+++ b/src/couchdb/couch_util.erl
@@ -16,7 +16,7 @@
-export([should_flush/0, should_flush/1]).
-export([new_uuid/0, rand32/0, implode/2, collate/2, collate/3]).
-export([abs_pathname/1,abs_pathname/2, trim/1, ascii_lower/1]).
--export([encodeBase64/1, decodeBase64/1, to_hex/1]).
+-export([encodeBase64/1, decodeBase64/1, to_hex/1,parse_term/1,dict_find/3]).
-include("couch_db.hrl").
@@ -45,6 +45,13 @@ to_hex([H|T]) ->
to_digit(N) when N < 10 -> $0 + N;
to_digit(N) -> $a + N-10.
+
+
+parse_term(Bin) when is_binary(Bin)->
+ parse_term(binary_to_list(Bin));
+parse_term(List) ->
+ {ok, Tokens, _} = erl_scan:string(List ++ "."),
+ erl_parse:parse_term(Tokens).
% returns a random integer
@@ -249,3 +256,12 @@ enc(C) ->
dec(C) ->
62*?st(C,43) + ?st(C,47) + (C-59)*?st(C,48) - 69*?st(C,65) - 6*?st(C,97).
+
+
+dict_find(Key, Dict, DefaultValue) ->
+ case dict:find(Key, Dict) of
+ {ok, Value} ->
+ Value;
+ error ->
+ DefaultValue
+ end.
diff --git a/src/couchdb/couch_view.erl b/src/couchdb/couch_view.erl
index a1afe49f..e3623dde 100644
--- a/src/couchdb/couch_view.erl
+++ b/src/couchdb/couch_view.erl
@@ -791,13 +791,6 @@ view_compute(#group{def_lang=DefLang, query_server=QueryServerIn}=Group, Docs) -
{Group#group{query_server=QueryServer}, Results}.
-dict_find(Key, DefaultValue, Dict) ->
- case dict:find(Key, Dict) of
- {ok, Value} ->
- Value;
- error ->
- DefaultValue
- end.
write_changes(Group, ViewKeyValuesToAdd, DocIdViewIdKeys, NewSeq) ->
#group{id_btree=IdBtree} = Group,
@@ -824,7 +817,7 @@ write_changes(Group, ViewKeyValuesToAdd, DocIdViewIdKeys, NewSeq) ->
Views2 = [
begin
- KeysToRemove = dict_find(View#view.id_num, [], KeysToRemoveByView),
+ KeysToRemove = couch_util:dict_find(View#view.id_num, KeysToRemoveByView, []),
{ok, ViewBtree2} = couch_btree:add_remove(View#view.btree, AddKeyValues, KeysToRemove),
View#view{btree = ViewBtree2}
end