summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdam Kocoloski <adam@cloudant.com>2010-06-09 11:41:57 -0400
committerAdam Kocoloski <adam@cloudant.com>2010-06-09 11:41:57 -0400
commit8b187422a888a94b8f1d48eee52f100510a83141 (patch)
tree01556f7f26a4732a3c8bcc63a0127a195f00b61f
new app for dedicated cluster HTTP interface
-rw-r--r--ebin/chttpd.app23
-rw-r--r--include/chttpd.hrl7
-rw-r--r--src/chttpd.erl602
-rw-r--r--src/chttpd_app.erl11
-rw-r--r--src/chttpd_auth.erl481
-rw-r--r--src/chttpd_db.erl939
-rw-r--r--src/chttpd_external.erl166
-rw-r--r--src/chttpd_misc.erl232
-rw-r--r--src/chttpd_oauth.erl168
-rw-r--r--src/chttpd_server.erl3
-rw-r--r--src/chttpd_show.erl496
-rw-r--r--src/chttpd_stats.erl62
-rw-r--r--src/chttpd_sup.erl15
-rw-r--r--src/chttpd_view.erl738
14 files changed, 3943 insertions, 0 deletions
diff --git a/ebin/chttpd.app b/ebin/chttpd.app
new file mode 100644
index 00000000..445b0969
--- /dev/null
+++ b/ebin/chttpd.app
@@ -0,0 +1,23 @@
+{application, chttpd, [
+ {description, "HTTP interface for CouchDB cluster"},
+ {vsn, "1.0"},
+ {modules, [
+ chttpd,
+ chttpd_app,
+ chttpd_auth,
+ chttpd_db,
+ chttpd_external,
+ chttpd_misc,
+ chttpd_oauth,
+ chttpd_server,
+ chttpd_show,
+ chttpd_stats,
+ chttpd_sup,
+ chttpd_view,
+ cloudant_auth
+ ]},
+ {registered, []},
+ {applications, [kernel, stdlib, couch, fabric]},
+ {mod, {chttpd_app,[]}},
+ {start_phases, []}
+]}. \ No newline at end of file
diff --git a/include/chttpd.hrl b/include/chttpd.hrl
new file mode 100644
index 00000000..49c0f74a
--- /dev/null
+++ b/include/chttpd.hrl
@@ -0,0 +1,7 @@
+-define(FABRIC, true).
+
+-ifndef(COUCH).
+-include("../../couch/src/couch_db.hrl").
+-endif.
+
+-include_lib("eunit/include/eunit.hrl").
diff --git a/src/chttpd.erl b/src/chttpd.erl
new file mode 100644
index 00000000..a062f89a
--- /dev/null
+++ b/src/chttpd.erl
@@ -0,0 +1,602 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd).
+-include("chttpd.hrl").
+
+-export([start_link/0, stop/0, handle_request/5, config_change/2]).
+
+-export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,path/1,absolute_uri/2,body_length/1]).
+-export([verify_is_server_admin/1,unquote/1,quote/1,recv/2,recv_chunked/4,error_info/1]).
+-export([parse_form/1,json_body/1,json_body_obj/1,body/1,doc_etag/1, make_etag/1, etag_respond/3]).
+-export([primary_header_value/2,partition/1,serve_file/3, server_header/0]).
+-export([start_chunked_response/3,send_chunk/2]).
+-export([start_response_length/4, send/2]).
+-export([start_json_response/2, start_json_response/3, end_json_response/1]).
+-export([send_response/4,send_method_not_allowed/2,send_error/4, send_redirect/2,send_chunked_error/2]).
+-export([send_json/2,send_json/3,send_json/4]).
+
+start_link() ->
+ BindAddress = couch_config:get("chttpd", "bind_address", any),
+ Port = couch_config:get("chttpd", "port", "5984"),
+ Backlog = list_to_integer(couch_config:get("chttpd", "backlog", "128")),
+
+ Default = fun chttpd_db:handle_request/1,
+
+ UrlHandlers = [
+ {<<"/">>, fun chttpd_misc:handle_welcome_req/1},
+ {<<"favicon.ico">>, fun chttpd_misc:handle_favicon_req/1},
+ {<<"_utils">>, fun chttpd_misc:handle_utils_dir_req/1},
+ {<<"_all_dbs">>, fun chttpd_misc:handle_all_dbs_req/1},
+ {<<"_active_tasks">>, fun chttpd_misc:handle_task_status_req/1},
+ {<<"_config">>, fun chttpd_misc:handle_config_req/1},
+ {<<"_replicate">>, fun chttpd_misc:handle_replicate_req/1},
+ {<<"_uuids">>, fun chttpd_misc:handle_uuids_req/1},
+ {<<"_log">>, fun chttpd_misc:handle_log_req/1},
+ {<<"_sleep">>, fun chttpd_misc:handle_sleep_req/1},
+ {<<"_session">>, fun chttpd_auth:handle_session_req/1},
+ {<<"_user">>, fun chttpd_auth:handle_user_req/1},
+ {<<"_oauth">>, fun chttpd_oauth:handle_oauth_req/1},
+ {<<"_stats">>, fun chttpd_stats:handle_stats_req/1},
+ {<<"_restart">>, fun showroom_http:handle_restart_req/1},
+ {<<"_cloudant">>, fun showroom_httpd_admin:handle_cloudant_req/1}
+ ],
+
+ DbHandlers = [
+ {<<"_view_cleanup">>, fun chttpd_view:handle_view_cleanup_req/2},
+ {<<"_compact">>, fun chttpd_db:handle_compact_req/2},
+ {<<"_design">>, fun chttpd_db:handle_design_req/2},
+ {<<"_view">>, fun chttpd_db:handle_db_view_req/2},
+ {<<"_temp_view">>, fun chttpd_db:handle_temp_view_req/2},
+ {<<"_changes">>, fun chttpd_db:handle_changes_req/2}
+ ],
+
+ DesignHandlers = [
+ {<<"_view">>, fun chttpd_view:handle_view_req/2},
+ {<<"_show">>, fun chttpd_show:handle_doc_show_req/2},
+ {<<"_list">>, fun chttpd_show:handle_view_list_req/2},
+ {<<"_update">>, fun chttpd_show:handle_doc_update_req/2},
+ {<<"_info">>, fun chttpd_db:handle_design_info_req/2}
+ ],
+
+ Loop = fun(Req)-> ?MODULE:handle_request(Req, Default, UrlHandlers,
+ DbHandlers, DesignHandlers) end,
+
+ {ok, Pid} = case mochiweb_http:start([
+ {loop, Loop},
+ {name, ?MODULE},
+ {ip, BindAddress},
+ {port, Port},
+ {backlog, Backlog}
+ ]) of
+ {ok, MochiPid} -> {ok, MochiPid};
+ {error, Reason} ->
+ io:format("Failure to start Mochiweb: ~s~n",[Reason]),
+ throw({error, Reason})
+ end,
+
+ ok = couch_config:register(fun ?MODULE:config_change/2, Pid),
+
+ {ok, Pid}.
+
+config_change("chttpd", "bind_address") ->
+ ?MODULE:stop();
+config_change("chttpd", "port") ->
+ ?MODULE:stop();
+config_change("chttpd", "backlog") ->
+ ?MODULE:stop().
+
+stop() ->
+ mochiweb_http:stop(?MODULE).
+
+handle_request(MochiReq, DefaultFun,
+ UrlHandlers, DbUrlHandlers, DesignUrlHandlers) ->
+ Begin = now(),
+
+ AuthenticationFuns = [
+ fun chttpd_auth:cookie_authentication_handler/1,
+ fun chttpd_auth:default_authentication_handler/1
+ ],
+
+ % for the path, use the raw path with the query string and fragment
+ % removed, but URL quoting left intact
+ RawUri = MochiReq:get(raw_path),
+ Customer = cloudant_util:customer_name(MochiReq:get_header_value("X-Cloudant-User"), MochiReq:get_header_value("Host")),
+ Path = ?COUCH:db_path(RawUri, Customer),
+
+ HandlerKey =
+ case mochiweb_util:partition(Path, "/") of
+ {"", "", ""} ->
+ <<"/">>; % Special case the root url handler
+ {FirstPart, _, _} ->
+ list_to_binary(FirstPart)
+ end,
+ LogForClosedSocket = io_lib:format("mochiweb_recv_error for ~s - ~p ~s", [
+ MochiReq:get(peer),
+ MochiReq:get(method),
+ RawUri
+ ]),
+
+ Method1 =
+ case MochiReq:get(method) of
+ % already an atom
+ Meth when is_atom(Meth) -> Meth;
+
+ % Non standard HTTP verbs aren't atoms (COPY, MOVE etc) so convert when
+ % possible (if any module references the atom, then it's existing).
+ Meth -> couch_util:to_existing_atom(Meth)
+ end,
+ increment_method_stats(Method1),
+ % alias HEAD to GET as mochiweb takes care of stripping the body
+ Method = case Method1 of
+ 'HEAD' -> 'GET';
+ Other -> Other
+ end,
+
+ HttpReq = #httpd{
+ mochi_req = MochiReq,
+ method = Method,
+ path_parts = [list_to_binary(chttpd:unquote(Part))
+ || Part <- string:tokens(Path, "/")],
+ db_url_handlers = DbUrlHandlers,
+ design_url_handlers = DesignUrlHandlers
+ },
+
+ HandlerFun = couch_util:get_value(HandlerKey, UrlHandlers, DefaultFun),
+ {ok, Resp} =
+ try
+ erase(cookie_auth_failed),
+ case authenticate_request(HttpReq, AuthenticationFuns) of
+ #httpd{} = Req ->
+ HandlerFun(cloudant_auth:authorize_request(Req));
+ Response ->
+ Response
+ end
+ catch
+ throw:{http_head_abort, Resp0} ->
+ {ok, Resp0};
+ throw:{invalid_json, S} ->
+ ?LOG_ERROR("attempted upload of invalid JSON ~s", [S]),
+ send_error(HttpReq, {bad_request, "invalid UTF-8 JSON"});
+ exit:{mochiweb_recv_error, E} ->
+ ?LOG_INFO("mochiweb_recv_error: ~p", [E]),
+ showroom_log:message(notice, LogForClosedSocket, []),
+ exit(normal);
+ throw:Error ->
+ % ?LOG_DEBUG("Minor error in HTTP request: ~p",[Error]),
+ % ?LOG_DEBUG("Stacktrace: ~p",[erlang:get_stacktrace()]),
+ send_error(HttpReq, Error);
+ error:badarg ->
+ ?LOG_ERROR("Badarg error in HTTP request",[]),
+ ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]),
+ send_error(HttpReq, badarg);
+ error:function_clause ->
+ ?LOG_ERROR("function_clause error in HTTP request",[]),
+ ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]),
+ send_error(HttpReq, function_clause);
+ Tag:Error ->
+ ?LOG_ERROR("Uncaught error in HTTP request: ~p",[{Tag, Error}]),
+ ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]),
+ send_error(HttpReq, Error)
+ end,
+
+ RequestTime = round(timer:now_diff(now(), Begin)/1000),
+ showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", [
+ MochiReq:get(peer),
+ MochiReq:get_header_value("Host"),
+ atom_to_list(Method1),
+ RawUri,
+ Resp:get(code),
+ RequestTime
+ ]),
+ couch_stats_collector:record({couchdb, request_time}, RequestTime),
+ couch_stats_collector:increment({httpd, requests}),
+ {ok, Resp}.
+
+% Try authentication handlers in order until one returns a result
+authenticate_request(#httpd{user_ctx=#user_ctx{}} = Req, _AuthFuns) ->
+ Req;
+authenticate_request(#httpd{} = Req, [AuthFun|Rest]) ->
+ authenticate_request(AuthFun(Req), Rest);
+authenticate_request(#httpd{} = Req, []) ->
+ case couch_config:get("chttpd_auth", "require_valid_user", "false") of
+ "true" ->
+ throw({unauthorized, <<"Authentication required.">>});
+ "false" ->
+ case couch_config:get("admins") of
+ [] ->
+ Ctx = #user_ctx{roles=[<<"_reader">>, <<"_writer">>, <<"_admin">>]},
+ Req#httpd{user_ctx = Ctx};
+ _ ->
+ Req#httpd{user_ctx=#user_ctx{}}
+ end
+ end;
+authenticate_request(Response, _AuthFuns) ->
+ Response.
+
+increment_method_stats(Method) ->
+ couch_stats_collector:increment({httpd_request_methods, Method}).
+
+
+% Utilities
+
+partition(Path) ->
+ mochiweb_util:partition(Path, "/").
+
+header_value(#httpd{mochi_req=MochiReq}, Key) ->
+ MochiReq:get_header_value(Key).
+
+header_value(#httpd{mochi_req=MochiReq}, Key, Default) ->
+ case MochiReq:get_header_value(Key) of
+ undefined -> Default;
+ Value -> Value
+ end.
+
+primary_header_value(#httpd{mochi_req=MochiReq}, Key) ->
+ MochiReq:get_primary_header_value(Key).
+
+serve_file(#httpd{mochi_req=MochiReq}=Req, RelativePath, DocumentRoot) ->
+ {ok, MochiReq:serve_file(RelativePath, DocumentRoot,
+ server_header() ++ chttpd_auth:cookie_auth_header(Req, []))}.
+
+qs_value(Req, Key) ->
+ qs_value(Req, Key, undefined).
+
+qs_value(Req, Key, Default) ->
+ couch_util:get_value(Key, qs(Req), Default).
+
+qs(#httpd{mochi_req=MochiReq}) ->
+ MochiReq:parse_qs().
+
+path(#httpd{mochi_req=MochiReq}) ->
+ MochiReq:get(path).
+
+absolute_uri(#httpd{mochi_req=MochiReq}, Path) ->
+ XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"),
+ Host = case MochiReq:get_header_value(XHost) of
+ undefined ->
+ case MochiReq:get_header_value("Host") of
+ undefined ->
+ {ok, {Address, Port}} = inet:sockname(MochiReq:get(socket)),
+ inet_parse:ntoa(Address) ++ ":" ++ integer_to_list(Port);
+ Value1 ->
+ Value1
+ end;
+ Value -> Value
+ end,
+ XSsl = couch_config:get("httpd", "x_forwarded_ssl", "X-Forwarded-Ssl"),
+ Scheme = case MochiReq:get_header_value(XSsl) of
+ "on" -> "https";
+ _ ->
+ XProto = couch_config:get("httpd", "x_forwarded_proto", "X-Forwarded-Proto"),
+ case MochiReq:get_header_value(XProto) of
+ % Restrict to "https" and "http" schemes only
+ "https" -> "https";
+ _ -> "http"
+ end
+ end,
+ Customer = cloudant_util:customer_name(MochiReq:get_header_value("X-Cloudant-User"),
+ Host),
+ CustomerRegex = ["^/", Customer, "[/%2F]+"],
+ NewPath = re:replace(Path, CustomerRegex, "/"),
+ Scheme ++ "://" ++ Host ++ NewPath.
+
+unquote(UrlEncodedString) ->
+ mochiweb_util:unquote(UrlEncodedString).
+
+quote(UrlDecodedString) ->
+ mochiweb_util:quote_plus(UrlDecodedString).
+
+parse_form(#httpd{mochi_req=MochiReq}) ->
+ mochiweb_multipart:parse_form(MochiReq).
+
+recv(#httpd{mochi_req=MochiReq}, Len) ->
+ MochiReq:recv(Len).
+
+recv_chunked(#httpd{mochi_req=MochiReq}, MaxChunkSize, ChunkFun, InitState) ->
+ % Fun is called once with each chunk
+ % Fun({Length, Binary}, State)
+ % called with Length == 0 on the last time.
+ MochiReq:stream_body(MaxChunkSize, ChunkFun, InitState).
+
+body_length(Req) ->
+ case header_value(Req, "Transfer-Encoding") of
+ undefined ->
+ case header_value(Req, "Content-Length") of
+ undefined -> undefined;
+ Length -> list_to_integer(Length)
+ end;
+ "chunked" -> chunked;
+ Unknown -> {unknown_transfer_encoding, Unknown}
+ end.
+
+body(#httpd{mochi_req=MochiReq, req_body=ReqBody}) ->
+ case ReqBody of
+ undefined ->
+ % Maximum size of document PUT request body (4GB)
+ MaxSize = list_to_integer(
+ couch_config:get("couchdb", "max_document_size", "4294967296")),
+ MochiReq:recv_body(MaxSize);
+ _Else ->
+ ReqBody
+ end.
+
+json_body(Httpd) ->
+ ?JSON_DECODE(body(Httpd)).
+
+json_body_obj(Httpd) ->
+ case json_body(Httpd) of
+ {Props} -> {Props};
+ _Else ->
+ throw({bad_request, "Request body must be a JSON object"})
+ end.
+
+
+doc_etag(#doc{revs={Start, [DiskRev|_]}}) ->
+ "\"" ++ ?b2l(couch_doc:rev_to_str({Start, DiskRev})) ++ "\"".
+
+make_etag(Term) ->
+ <<SigInt:128/integer>> = erlang:md5(term_to_binary(Term)),
+ list_to_binary("\"" ++ lists:flatten(io_lib:format("~.36B",[SigInt])) ++ "\"").
+
+etag_match(Req, CurrentEtag) when is_binary(CurrentEtag) ->
+ etag_match(Req, binary_to_list(CurrentEtag));
+
+etag_match(Req, CurrentEtag) ->
+ EtagsToMatch = string:tokens(
+ chttpd:header_value(Req, "If-None-Match", ""), ", "),
+ lists:member(CurrentEtag, EtagsToMatch).
+
+etag_respond(Req, CurrentEtag, RespFun) ->
+ case etag_match(Req, CurrentEtag) of
+ true ->
+ % the client has this in their cache.
+ chttpd:send_response(Req, 304, [{"Etag", CurrentEtag}], <<>>);
+ false ->
+ % Run the function.
+ RespFun()
+ end.
+
+verify_is_server_admin(#httpd{user_ctx=#user_ctx{roles=Roles}}) ->
+ case lists:member(<<"_admin">>, Roles) of
+ true -> ok;
+ false -> throw({unauthorized, <<"You are not a server admin.">>})
+ end.
+
+start_response_length(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Length) ->
+ couch_stats_collector:increment({httpd_status_codes, Code}),
+ Resp = MochiReq:start_response_length({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), Length}),
+ case MochiReq:get(method) of
+ 'HEAD' -> throw({http_head_abort, Resp});
+ _ -> ok
+ end,
+ {ok, Resp}.
+
+send(Resp, Data) ->
+ Resp:send(Data),
+ {ok, Resp}.
+
+start_chunked_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) ->
+ couch_stats_collector:increment({httpd_status_codes, Code}),
+ Resp = MochiReq:respond({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), chunked}),
+ case MochiReq:get(method) of
+ 'HEAD' -> throw({http_head_abort, Resp});
+ _ -> ok
+ end,
+ {ok, Resp}.
+
+send_chunk(Resp, Data) ->
+ Resp:write_chunk(Data),
+ {ok, Resp}.
+
+send_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Body) ->
+ couch_stats_collector:increment({httpd_status_codes, Code}),
+ if Code >= 400 ->
+ ?LOG_DEBUG("httpd ~p error response:~n ~s", [Code, Body]);
+ true -> ok
+ end,
+ {ok, MochiReq:respond({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), Body})}.
+
+send_method_not_allowed(Req, Methods) ->
+ send_error(Req, 405, [{"Allow", Methods}], <<"method_not_allowed">>, ?l2b("Only " ++ Methods ++ " allowed")).
+
+send_json(Req, Value) ->
+ send_json(Req, 200, Value).
+
+send_json(Req, Code, Value) ->
+ send_json(Req, Code, [], Value).
+
+send_json(Req, Code, Headers, Value) ->
+ DefaultHeaders = [
+ {"Content-Type", negotiate_content_type(Req)},
+ {"Cache-Control", "must-revalidate"}
+ ],
+ Body = list_to_binary(
+ [start_jsonp(Req), ?JSON_ENCODE(Value), end_jsonp(), $\n]
+ ),
+ send_response(Req, Code, DefaultHeaders ++ Headers, Body).
+
+start_json_response(Req, Code) ->
+ start_json_response(Req, Code, []).
+
+start_json_response(Req, Code, Headers) ->
+ DefaultHeaders = [
+ {"Content-Type", negotiate_content_type(Req)},
+ {"Cache-Control", "must-revalidate"}
+ ],
+ start_jsonp(Req), % Validate before starting chunked.
+ %start_chunked_response(Req, Code, DefaultHeaders ++ Headers).
+ {ok, Resp} = start_chunked_response(Req, Code, DefaultHeaders ++ Headers),
+ case start_jsonp(Req) of
+ [] -> ok;
+ Start -> send_chunk(Resp, Start)
+ end,
+ {ok, Resp}.
+
+end_json_response(Resp) ->
+ send_chunk(Resp, end_jsonp() ++ [$\r,$\n]),
+ %send_chunk(Resp, [$\n]),
+ send_chunk(Resp, []).
+
+start_jsonp(Req) ->
+ case get(jsonp) of
+ undefined -> put(jsonp, qs_value(Req, "callback", no_jsonp));
+ _ -> ok
+ end,
+ case get(jsonp) of
+ no_jsonp -> [];
+ [] -> [];
+ CallBack ->
+ try
+ validate_callback(CallBack),
+ CallBack ++ "("
+ catch
+ Error ->
+ put(jsonp, no_jsonp),
+ throw(Error)
+ end
+ end.
+
+end_jsonp() ->
+ Resp = case get(jsonp) of
+ no_jsonp -> [];
+ [] -> [];
+ _ -> ");"
+ end,
+ put(jsonp, undefined),
+ Resp.
+
+validate_callback(CallBack) when is_binary(CallBack) ->
+ validate_callback(binary_to_list(CallBack));
+validate_callback([]) ->
+ ok;
+validate_callback([Char | Rest]) ->
+ case Char of
+ _ when Char >= $a andalso Char =< $z -> ok;
+ _ when Char >= $A andalso Char =< $Z -> ok;
+ _ when Char >= $0 andalso Char =< $9 -> ok;
+ _ when Char == $. -> ok;
+ _ when Char == $_ -> ok;
+ _ when Char == $[ -> ok;
+ _ when Char == $] -> ok;
+ _ ->
+ throw({bad_request, invalid_callback})
+ end,
+ validate_callback(Rest).
+
+
+error_info({Error, Reason}) when is_list(Reason) ->
+ error_info({Error, ?l2b(Reason)});
+error_info(bad_request) ->
+ {400, <<"bad_request">>, <<>>};
+error_info({bad_request, Reason}) ->
+ {400, <<"bad_request">>, Reason};
+error_info({query_parse_error, Reason}) ->
+ {400, <<"query_parse_error">>, Reason};
+error_info(not_found) ->
+ {404, <<"not_found">>, <<"missing">>};
+error_info({not_found, Reason}) ->
+ {404, <<"not_found">>, Reason};
+error_info({not_acceptable, Reason}) ->
+ {406, <<"not_acceptable">>, Reason};
+error_info(conflict) ->
+ {409, <<"conflict">>, <<"Document update conflict.">>};
+error_info({forbidden, Msg}) ->
+ {403, <<"forbidden">>, Msg};
+error_info({credentials_expired, Msg}) ->
+ {403, <<"credentials_expired">>, Msg};
+error_info({unauthorized, Msg}) ->
+ {401, <<"unauthorized">>, Msg};
+error_info(file_exists) ->
+ {412, <<"file_exists">>, <<"The database could not be "
+ "created, the file already exists.">>};
+error_info({r_quorum_not_met, Reason}) ->
+ {412, <<"read_quorum_not_met">>, Reason};
+error_info({w_quorum_not_met, Reason}) ->
+ {500, <<"write_quorum_not_met">>, Reason};
+error_info({bad_ctype, Reason}) ->
+ {415, <<"bad_content_type">>, Reason};
+error_info({error, illegal_database_name}) ->
+ {400, <<"illegal_database_name">>, <<"Only lowercase characters (a-z), "
+ "digits (0-9), and any of the characters _, $, (, ), +, -, and / "
+ "are allowed">>};
+error_info(not_implemented) ->
+ {501, <<"not_implemented">>, <<"this feature is not yet implemented">>};
+error_info({Error, Reason}) ->
+ {500, couch_util:to_binary(Error), couch_util:to_binary(Reason)};
+error_info(Error) ->
+ {500, <<"unknown_error">>, couch_util:to_binary(Error)}.
+
+send_error(_Req, {already_sent, Resp, _Error}) ->
+ {ok, Resp};
+
+send_error(#httpd{mochi_req=MochiReq}=Req, Error) ->
+ {Code, ErrorStr, ReasonStr} = error_info(Error),
+ Headers = if Code == 401 ->
+ case MochiReq:get_header_value("X-CouchDB-WWW-Authenticate") of
+ undefined ->
+ case couch_config:get("httpd", "WWW-Authenticate", nil) of
+ nil ->
+ [];
+ Type ->
+ [{"WWW-Authenticate", Type}]
+ end;
+ Type ->
+ [{"WWW-Authenticate", Type}]
+ end;
+ true ->
+ []
+ end,
+ send_error(Req, Code, Headers, ErrorStr, ReasonStr).
+
+send_error(Req, Code, ErrorStr, ReasonStr) ->
+ send_error(Req, Code, [], ErrorStr, ReasonStr).
+
+send_error(Req, Code, Headers, ErrorStr, ReasonStr) ->
+ send_json(Req, Code, Headers,
+ {[{<<"error">>, ErrorStr},
+ {<<"reason">>, ReasonStr}]}).
+
+% give the option for list functions to output html or other raw errors
+send_chunked_error(Resp, {_Error, {[{<<"body">>, Reason}]}}) ->
+ send_chunk(Resp, Reason),
+ send_chunk(Resp, []);
+
+send_chunked_error(Resp, Error) ->
+ {Code, ErrorStr, ReasonStr} = error_info(Error),
+ JsonError = {[{<<"code">>, Code},
+ {<<"error">>, ErrorStr},
+ {<<"reason">>, ReasonStr}]},
+ send_chunk(Resp, ?l2b([$\n,?JSON_ENCODE(JsonError),$\n])),
+ send_chunk(Resp, []).
+
+send_redirect(Req, Path) ->
+ Headers = [{"Location", chttpd:absolute_uri(Req, Path)}],
+ send_response(Req, 301, Headers, <<>>).
+
+negotiate_content_type(#httpd{mochi_req=MochiReq}) ->
+ %% Determine the appropriate Content-Type header for a JSON response
+ %% depending on the Accept header in the request. A request that explicitly
+ %% lists the correct JSON MIME type will get that type, otherwise the
+ %% response will have the generic MIME type "text/plain"
+ AcceptedTypes = case MochiReq:get_header_value("Accept") of
+ undefined -> [];
+ AcceptHeader -> string:tokens(AcceptHeader, ", ")
+ end,
+ case lists:member("application/json", AcceptedTypes) of
+ true -> "application/json";
+ false -> "text/plain;charset=utf-8"
+ end.
+
+server_header() ->
+ OTPVersion = erlang:system_info(otp_release),
+ [{"Server", "CouchDB/0.11.0 (Erlang OTP/" ++ OTPVersion ++ ")"}].
diff --git a/src/chttpd_app.erl b/src/chttpd_app.erl
new file mode 100644
index 00000000..4b8356fb
--- /dev/null
+++ b/src/chttpd_app.erl
@@ -0,0 +1,11 @@
+-module(chttpd_app).
+-behaviour(application).
+-export([start/2, stop/1]).
+
+-include("chttpd.hrl").
+
+start(_Type, StartArgs) ->
+ chttpd_sup:start_link(StartArgs).
+
+stop(_State) ->
+ ok.
diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl
new file mode 100644
index 00000000..3916f7cf
--- /dev/null
+++ b/src/chttpd_auth.erl
@@ -0,0 +1,481 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_auth).
+-include("chttpd.hrl").
+
+-export([special_test_authentication_handler/1, null_authentication_handler/1,
+ cookie_authentication_handler/1, default_authentication_handler/1,
+ handle_session_req/1, handle_user_req/1, cookie_auth_header/2]).
+
+% used by OAuth handler
+-export([get_user/1, ensure_users_db_exists/1]).
+
+-import(chttpd, [send_json/2, send_json/4, send_method_not_allowed/2]).
+
+special_test_authentication_handler(Req) ->
+ case chttpd:header_value(Req, "WWW-Authenticate") of
+ "X-Couch-Test-Auth " ++ NamePass ->
+ % NamePass is a colon separated string: "joe schmoe:a password".
+ [Name, Pass] = re:split(NamePass, ":", [{return, list}]),
+ case {Name, Pass} of
+ {"Jan Lehnardt", "apple"} -> ok;
+ {"Christopher Lenz", "dog food"} -> ok;
+ {"Noah Slater", "biggiesmalls endian"} -> ok;
+ {"Chris Anderson", "mp3"} -> ok;
+ {"Damien Katz", "pecan pie"} -> ok;
+ {_, _} ->
+ throw({unauthorized, <<"Name or password is incorrect.">>})
+ end,
+ Req#httpd{user_ctx=#user_ctx{name=?l2b(Name)}};
+ _ ->
+ % No X-Couch-Test-Auth credentials sent, give admin access so the
+ % previous authentication can be restored after the test
+ Req#httpd{user_ctx=#user_ctx{roles=[<<"_admin">>]}}
+ end.
+
+null_authentication_handler(Req) ->
+ Ctx = #user_ctx{roles=[<<"_reader">>, <<"writer">>, <<"_admin">>]},
+ Req#httpd{user_ctx=Ctx}.
+
+default_authentication_handler(Req) ->
+ case basic_username_pw(Req) of
+ {Username, Password} ->
+ case get_user(Username) of
+ nil ->
+ throw({unauthorized, <<"unknown username">>});
+ Props ->
+ ExpectedHash = couch_util:get_value(<<"password_sha">>, Props),
+ Salt = couch_util:get_value(<<"salt">>, Props),
+ case hash_password(?l2b(Password), Salt) of
+ ExpectedHash ->
+ Ctx = #user_ctx{
+ name = couch_util:get_value(<<"username">>, Props),
+ roles = couch_util:get_value(<<"roles">>, Props)
+ },
+ Req#httpd{user_ctx=Ctx};
+ _ ->
+ throw({unauthorized, <<"password is incorrect">>})
+ end
+ end;
+ nil ->
+ Req
+ end.
+
+cookie_authentication_handler(#httpd{path_parts=[<<"_session">>],
+ method='POST'} = Req) ->
+ % ignore any cookies sent with login request
+ Req;
+cookie_authentication_handler(Req) ->
+ case cookie_auth_user(Req) of
+ nil ->
+ Req;
+ cookie_auth_failed ->
+ put(cookie_auth_failed, true),
+ Req#httpd{auth=cookie_auth_failed};
+ Req2 ->
+ Req2
+ end.
+
+cookie_auth_header(#httpd{auth=cookie_auth_failed}, Headers) ->
+ % check for an AuthSession cookie from login handler
+ CookieHeader = couch_util:get_value("Set-Cookie", Headers, ""),
+ Cookies = mochiweb_cookies:parse_cookie(CookieHeader),
+ AuthSession = couch_util:get_value("AuthSession", Cookies),
+ if AuthSession == undefined ->
+ [generate_cookie_buster()];
+ true ->
+ []
+ end;
+cookie_auth_header(#httpd{user_ctx=#user_ctx{name=null}}, _Headers) ->
+ [];
+cookie_auth_header(#httpd{user_ctx=Ctx, auth={Secret,true}}, Headers) ->
+ % Note: we only set the AuthSession cookie if:
+ % * a valid AuthSession cookie has been received
+ % * we are outside a 10% timeout window
+ % * and if an AuthSession cookie hasn't already been set e.g. by a login
+ % or logout handler.
+ % The login and logout handlers set the AuthSession cookie themselves.
+ CookieHeader = couch_util:get_value("Set-Cookie", Headers, ""),
+ Cookies = mochiweb_cookies:parse_cookie(CookieHeader),
+ AuthSession = couch_util:get_value("AuthSession", Cookies),
+ if AuthSession == undefined ->
+ [generate_cookie(Ctx#user_ctx.name, Secret, timestamp())];
+ true ->
+ []
+ end;
+cookie_auth_header(Req, Headers) ->
+ case get(cookie_auth_failed) of
+ true ->
+ cookie_auth_header(Req#httpd{auth=cookie_auth_failed}, Headers);
+ _ ->
+ []
+ end.
+
+handle_session_req(#httpd{method='POST', mochi_req=MochiReq, user_ctx=Ctx}=Req) ->
+ % login
+ Form = parse_form(MochiReq),
+ UserName = extract_username(Form),
+ case get_user(UserName) of
+ nil ->
+ throw({forbidden, <<"unknown username">>});
+ User ->
+ UserSalt = couch_util:get_value(<<"salt">>, User),
+ case lists:member(<<"_admin">>, Ctx#user_ctx.roles) of
+ true ->
+ ok;
+ false ->
+ Password = extract_password(Form),
+ ExpectedHash = couch_util:get_value(<<"password_sha">>, User),
+ case hash_password(Password, UserSalt) of
+ ExpectedHash ->
+ ok;
+ _Else ->
+ throw({forbidden, <<"Name or password is incorrect.">>})
+ end
+ end,
+ Secret = ?l2b(couch_config:get("chttpd_auth", "secret")),
+ SecretAndSalt = <<Secret/binary, UserSalt/binary>>,
+ Cookie = generate_cookie(UserName, SecretAndSalt, timestamp()),
+ send_response(Req, [Cookie])
+ end;
+handle_session_req(#httpd{method='GET', user_ctx=UserCtx}=Req) ->
+ % whoami
+ #user_ctx{name = Name, roles = Roles} = UserCtx,
+ ForceLogin = chttpd:qs_value(Req, "basic", "false"),
+ case {Name, ForceLogin} of
+ {null, "true"} ->
+ throw({unauthorized, <<"Please login.">>});
+ _False ->
+ send_json(Req, {[{ok,true}, {name,Name}, {roles,Roles}]})
+ end;
+handle_session_req(#httpd{method='DELETE'}=Req) ->
+ % logout
+ send_response(Req, [generate_cookie_buster()]);
+handle_session_req(Req) ->
+ send_method_not_allowed(Req, "GET,HEAD,POST,DELETE").
+
+handle_user_req(#httpd{method='POST'}=Req) ->
+ DbName = couch_config:get("chttpd_auth", "authentication_db"),
+ {ok, Db} = ensure_users_db_exists(?l2b(DbName)),
+ Result = create_user(Req, Db),
+ couch_db:close(Db),
+ Result;
+handle_user_req(#httpd{method=Method, path_parts=[_]}=_Req) when
+ Method == 'PUT' orelse Method == 'DELETE' ->
+ throw({bad_request, <<"Username is missing">>});
+handle_user_req(#httpd{method='PUT', path_parts=[_, UserName]}=Req) ->
+ DbName = couch_config:get("chttpd_auth", "authentication_db"),
+ {ok, Db} = ensure_users_db_exists(?l2b(DbName)),
+ Result = update_user(Req, Db, UserName),
+ couch_db:close(Db),
+ Result;
+handle_user_req(#httpd{method='DELETE', path_parts=[_, UserName]}=Req) ->
+ DbName = couch_config:get("chttpd_auth", "authentication_db"),
+ {ok, Db} = ensure_users_db_exists(?l2b(DbName)),
+ Result = delete_user(Req, Db, UserName),
+ couch_db:close(Db),
+ Result;
+handle_user_req(Req) ->
+ send_method_not_allowed(Req, "DELETE,POST,PUT").
+
+get_user(UserName) when is_list(UserName) ->
+ get_user(?l2b(UserName));
+get_user(UserName) ->
+ case couch_config:get("admins", ?b2l(UserName)) of
+ "-hashed-" ++ HashedPwdAndSalt ->
+ [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","),
+ [
+ {<<"username">>, UserName},
+ {<<"roles">>, [<<"_reader">>, <<"_writer">>, <<"_admin">>]},
+ {<<"salt">>, ?l2b(Salt)},
+ {<<"password_sha">>, ?l2b(HashedPwd)}
+ ];
+ _ ->
+ try ets:lookup(users_cache, UserName) of
+ [{UserName, Props}] ->
+ Props;
+ [] ->
+ load_user_from_db(UserName)
+ catch error:badarg ->
+ load_user_from_db(UserName)
+ end
+ end.
+
+load_user_from_db(UserName) ->
+ DbName = couch_config:get("chttpd_auth", "authentication_db"),
+ {ok, Db} = ensure_users_db_exists(?l2b(DbName)),
+ UserProps = case couch_db:open_doc(Db, UserName, []) of
+ {ok, Doc} ->
+ ?LOG_INFO("cache miss on username ~s", [UserName]),
+ {Props} = couch_doc:to_json_obj(Doc, []),
+ Props;
+ _Else ->
+ ?LOG_INFO("no record of user ~s", [UserName]),
+ nil
+ end,
+ couch_db:close(Db),
+ UserProps.
+
+ensure_users_db_exists(DbName) ->
+ Options = [{user_ctx, #user_ctx{roles=[<<"_admin">>]}}],
+ case couch_db:open(DbName, Options) of
+ {ok, Db} ->
+ {ok, Db};
+ {error, _} ->
+ couch_db:create(DbName, Options)
+ end.
+
+% internal functions
+
+basic_username_pw(Req) ->
+ case chttpd:header_value(Req, "Authorization") of
+ "Basic " ++ Base64Value ->
+ case string:tokens(?b2l(couch_util:decodeBase64(Base64Value)),":") of
+ [User, Pass] ->
+ {User, Pass};
+ [User] ->
+ {User, ""};
+ _ ->
+ nil
+ end;
+ _ ->
+ nil
+ end.
+
+cookie_auth_user(#httpd{mochi_req=MochiReq}=Req) ->
+ case MochiReq:get_cookie_value("AuthSession") of
+ undefined ->
+ nil;
+ Cookie ->
+ AuthSession = couch_util:decodeBase64Url(Cookie),
+ [User, TimeStr | HashParts] = string:tokens(?b2l(AuthSession), ":"),
+ % Verify expiry and hash
+ case couch_config:get("chttpd_auth", "secret") of
+ undefined ->
+ ?LOG_DEBUG("AuthSession cookie, but no secret in config!", []),
+ cookie_auth_failed;
+ SecretStr ->
+ case get_user(User) of
+ nil ->
+ ?LOG_DEBUG("no record of user ~s", [User]),
+ cookie_auth_failed;
+ Result ->
+ Secret = ?l2b(SecretStr),
+ UserSalt = couch_util:get_value(<<"salt">>, Result),
+ FullSecret = <<Secret/binary, UserSalt/binary>>,
+ ExpectedHash = crypto:sha_mac(FullSecret, [User, ":", TimeStr]),
+ case ?l2b(string:join(HashParts, ":")) of
+ ExpectedHash ->
+ TimeStamp = erlang:list_to_integer(TimeStr, 16),
+ Timeout = erlang:list_to_integer(couch_config:get(
+ "chttpd_auth", "timeout", "600")),
+ CurrentTime = timestamp(),
+ if CurrentTime < TimeStamp + Timeout ->
+ TimeLeft = TimeStamp + Timeout - CurrentTime,
+ Req#httpd{user_ctx=#user_ctx{
+ name=?l2b(User),
+ roles=couch_util:get_value(<<"roles">>, Result, [])
+ }, auth={FullSecret, TimeLeft < Timeout*0.9}};
+ true ->
+ ?LOG_DEBUG("cookie for ~s was expired", [User]),
+ put(cookie_auth_failed, true),
+ Msg = lists:concat(["Your session has expired after ",
+ Timeout div 60, " minutes of inactivity"]),
+ throw({credentials_expired, ?l2b(Msg)})
+ end;
+ _Else ->
+ ?LOG_DEBUG("cookie password hash was incorrect", []),
+ cookie_auth_failed
+ end
+ end
+ end
+ end.
+
+create_user(#httpd{method='POST', mochi_req=MochiReq}=Req, Db) ->
+ Form = parse_form(MochiReq),
+ {UserName, Password} = extract_username_password(Form),
+ case get_user(UserName) of
+ nil ->
+ Roles = [?l2b(R) || R <- proplists:get_all_values("roles", Form)],
+ if Roles /= [] ->
+ chttpd:verify_is_server_admin(Req);
+ true -> ok end,
+ Active = chttpd_view:parse_bool_param(couch_util:get_value("active",
+ Form, "true")),
+ UserSalt = couch_util:new_uuid(),
+ UserDoc = #doc{
+ id = UserName,
+ body = {[
+ {<<"active">>, Active},
+ {<<"email">>, ?l2b(couch_util:get_value("email", Form, ""))},
+ {<<"password_sha">>, hash_password(Password, UserSalt)},
+ {<<"roles">>, Roles},
+ {<<"salt">>, UserSalt},
+ {<<"type">>, <<"user">>},
+ {<<"username">>, UserName}
+ ]}
+ },
+ {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []),
+ ?LOG_DEBUG("User ~s (~s) with password, ~s created.", [UserName,
+ UserName, Password]),
+ send_response(Req);
+ _Result ->
+ ?LOG_DEBUG("Can't create ~s: already exists", [UserName]),
+ throw({forbidden, <<"User already exists.">>})
+ end.
+
+delete_user(#httpd{user_ctx=UserCtx}=Req, Db, UserName) ->
+ case get_user(UserName) of
+ nil ->
+ throw({not_found, <<"User doesn't exist">>});
+ User ->
+ case lists:member(<<"_admin">>,UserCtx#user_ctx.roles) of
+ true ->
+ ok;
+ false when UserCtx#user_ctx.name == UserName ->
+ ok;
+ false ->
+ throw({forbidden, <<"You aren't allowed to delete the user">>})
+ end,
+ {Pos,Rev} = couch_doc:parse_rev(couch_util:get_value(<<"_rev">>,User)),
+ UserDoc = #doc{
+ id = UserName,
+ revs = {Pos, [Rev]},
+ deleted = true
+ },
+ {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []),
+ send_response(Req)
+ end.
+
+extract_username(Form) ->
+ try ?l2b(couch_util:get_value("username", Form))
+ catch error:badarg ->
+ throw({bad_request, <<"user accounts must have a username">>})
+ end.
+
+extract_password(Form) ->
+ try ?l2b(couch_util:get_value("password", Form))
+ catch error:badarg ->
+ throw({bad_request, <<"user accounts must have a password">>})
+ end.
+
+extract_username_password(Form) ->
+ try
+ {?l2b(couch_util:get_value("username", Form)),
+ ?l2b(couch_util:get_value("password", Form))}
+ catch error:badarg ->
+ Msg = <<"user accounts must have a username and password">>,
+ throw({bad_request, Msg})
+ end.
+
+generate_cookie_buster() ->
+ T0 = calendar:now_to_datetime({0,86400,0}),
+ Opts = [{max_age,0}, {path,"/"}, {local_time,T0}],
+ mochiweb_cookies:cookie("AuthSession", "", Opts).
+
+generate_cookie(User, Secret, TimeStamp) ->
+ SessionData = ?b2l(User) ++ ":" ++ erlang:integer_to_list(TimeStamp, 16),
+ Hash = crypto:sha_mac(Secret, SessionData),
+ Cookie = couch_util:encodeBase64Url(SessionData ++ ":" ++ ?b2l(Hash)),
+ % MaxAge = erlang:list_to_integer(couch_config:get("chttpd_auth",
+ % "timeout", "600")),
+ % TODO add {secure, true} to options when SSL is detected
+ mochiweb_cookies:cookie("AuthSession", Cookie, [{path, "/"}]).
+ % {max_age, MaxAge}]).
+
+hash_password(Password, Salt) ->
+ ?l2b(couch_util:to_hex(crypto:sha(<<Password/binary, Salt/binary>>))).
+
+parse_form(MochiReq) ->
+ case MochiReq:get_primary_header_value("content-type") of
+ "application/x-www-form-urlencoded" ++ _ ->
+ ReqBody = MochiReq:recv_body(),
+ mochiweb_util:parse_qs(ReqBody);
+ _ ->
+ throw({bad_request, <<"you must specify "
+ "application/x-www-form-urlencoded as the primary content-type">>})
+ end.
+
+send_response(Req) ->
+ send_response(Req, []).
+
+send_response(Req, ExtraHeaders) ->
+ {Code, Headers} = case chttpd:qs_value(Req, "next", nil) of
+ nil -> {200, []};
+ Redirect ->
+ {302, [{"Location", chttpd:absolute_uri(Req, Redirect)}]}
+ end,
+ send_json(Req, Code, Headers ++ ExtraHeaders, {[{ok, true}]}).
+
+timestamp() ->
+ {MegaSeconds, Seconds, _} = erlang:now(),
+ MegaSeconds * 1000000 + Seconds.
+
+update_user(#httpd{mochi_req=MochiReq, user_ctx=UserCtx}=Req, Db, UserName) ->
+ case get_user(UserName) of
+ nil ->
+ throw({not_found, <<"User doesn't exist">>});
+ User ->
+ Form = parse_form(MochiReq),
+ NewPassword = ?l2b(couch_util:get_value("password", Form, "")),
+ OldPassword = ?l2b(couch_util:get_value("old_password", Form, "")),
+
+ UserSalt = couch_util:get_value(<<"salt">>, User),
+ CurrentPasswordHash = couch_util:get_value(<<"password_sha">>, User),
+
+ Roles = [?l2b(R) || R <- proplists:get_all_values("roles", Form)],
+ if Roles /= [] ->
+ chttpd:verify_is_server_admin(Req);
+ true -> ok end,
+
+ PasswordHash = case NewPassword of
+ <<>> ->
+ CurrentPasswordHash;
+ _Else ->
+ case lists:member(<<"_admin">>,UserCtx#user_ctx.roles) of
+ true ->
+ hash_password(NewPassword, UserSalt);
+ false when UserCtx#user_ctx.name == UserName ->
+ %% for user we test old password before allowing change
+ case hash_password(OldPassword, UserSalt) of
+ CurrentPasswordHash ->
+ hash_password(NewPassword, UserSalt);
+ _ ->
+ throw({forbidden, <<"Old password is incorrect.">>})
+ end;
+ _ ->
+ Msg = <<"You aren't allowed to change this password.">>,
+ throw({forbidden, Msg})
+ end
+ end,
+
+ Active = chttpd_view:parse_bool_param(couch_util:get_value("active",
+ Form, "true")),
+ {Pos,Rev} = couch_doc:parse_rev(couch_util:get_value(<<"_rev">>,User)),
+ UserDoc = #doc{
+ id = UserName,
+ revs = {Pos,[Rev]},
+ body = {[
+ {<<"active">>, Active},
+ {<<"email">>, ?l2b(couch_util:get_value("email", Form, ""))},
+ {<<"password_sha">>, PasswordHash},
+ {<<"roles">>, Roles},
+ {<<"salt">>, UserSalt},
+ {<<"type">>, <<"user">>},
+ {<<"username">>, UserName}
+ ]}
+ },
+ {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []),
+ ?LOG_DEBUG("User ~s updated.", [UserName]),
+ send_response(Req)
+ end.
diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl
new file mode 100644
index 00000000..94a43e20
--- /dev/null
+++ b/src/chttpd_db.erl
@@ -0,0 +1,939 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_db).
+-include("chttpd.hrl").
+
+-export([handle_request/1, handle_compact_req/2, handle_design_req/2,
+ db_req/2, couch_doc_open/4,handle_changes_req/2,
+ update_doc_result_to_json/1, update_doc_result_to_json/2,
+ handle_design_info_req/2, handle_view_cleanup_req/2]).
+
+-import(chttpd,
+ [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
+ start_json_response/2,send_chunk/2,end_json_response/1,
+ start_chunked_response/3, absolute_uri/2, send/2,
+ start_response_length/4]).
+
+-record(doc_query_args, {
+ options = [],
+ rev = nil,
+ open_revs = [],
+ show = nil
+}).
+
+% Database request handlers
+handle_request(#httpd{path_parts=[DbName|RestParts],method=Method,
+ db_url_handlers=DbUrlHandlers}=Req)->
+ case {Method, RestParts} of
+ {'PUT', []} ->
+ create_db_req(Req, DbName);
+ {'DELETE', []} ->
+ delete_db_req(Req, DbName);
+ {_, []} ->
+ do_db_req(Req, fun db_req/2);
+ {_, [SecondPart|_]} ->
+ Handler = couch_util:get_value(SecondPart, DbUrlHandlers, fun db_req/2),
+ do_db_req(Req, Handler)
+ end.
+
+handle_changes_req(#httpd{method='GET'}=Req, Db) ->
+ MakeCallback = fun(Resp) ->
+ fun({change, Change, _}, "continuous") ->
+ send_chunk(Resp, [?JSON_ENCODE(Change) | "\n"]);
+ ({change, Change, Prepend}, _) ->
+ send_chunk(Resp, [Prepend, ?JSON_ENCODE(Change)]);
+ (start, "continuous") ->
+ ok;
+ (start, _) ->
+ send_chunk(Resp, "{\"results\":[\n");
+ ({stop, EndSeq}, "continuous") ->
+ send_chunk(
+ Resp,
+ [?JSON_ENCODE({[{<<"last_seq">>, EndSeq}]}) | "\n"]
+ ),
+ end_json_response(Resp);
+ ({stop, EndSeq}, _) ->
+ send_chunk(
+ Resp,
+ io_lib:format("\n],\n\"last_seq\":~w}\n", [EndSeq])
+ ),
+ end_json_response(Resp);
+ (timeout, _) ->
+ send_chunk(Resp, "\n")
+ end
+ end,
+ ChangesArgs = parse_changes_query(Req),
+ ChangesFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
+ case ChangesArgs#changes_args.feed of
+ "normal" ->
+ {ok, Info} = couch_db:get_db_info(Db),
+ CurrentEtag = chttpd:make_etag(Info),
+ chttpd:etag_respond(
+ Req,
+ CurrentEtag,
+ fun() ->
+ {ok, Resp} = chttpd:start_json_response(
+ Req, 200, [{"Etag", CurrentEtag}]
+ ),
+ ChangesFun(MakeCallback(Resp))
+ end
+ );
+ _ ->
+ % "longpoll" or "continuous"
+ {ok, Resp} = chttpd:start_json_response(Req, 200),
+ ChangesFun(MakeCallback(Resp))
+ end;
+
+handle_changes_req(#httpd{path_parts=[_,<<"_changes">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "GET,HEAD").
+
+handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, _Db) ->
+ ok = ?COUCH:compact_view_group(DbName, Id),
+ send_json(Req, 202, {[{ok, true}]});
+
+handle_compact_req(#httpd{method='POST'}=Req, Db) ->
+ StartSeq = chttpd:qs_value(Req, "start_seq", "0"),
+ ok = ?COUCH:compact_db(Db, list_to_integer(StartSeq)),
+ send_json(Req, 202, {[{ok, true}]});
+
+handle_compact_req(Req, _Db) ->
+ send_method_not_allowed(Req, "POST").
+
+handle_view_cleanup_req(#httpd{method='POST'}=Req, _Db) ->
+ % delete unreferenced index files
+ % ok = ?COUCH:cleanup_view_index_files(Db),
+ send_json(Req, 202, {[{ok, true}]});
+
+handle_view_cleanup_req(Req, _Db) ->
+ send_method_not_allowed(Req, "POST").
+
+
+handle_design_req(#httpd{
+ path_parts=[_DbName,_Design,_DesName, <<"_",_/binary>> = Action | _Rest],
+ design_url_handlers = DesignUrlHandlers
+ }=Req, Db) ->
+ Handler = couch_util:get_value(Action, DesignUrlHandlers, fun db_req/2),
+ Handler(Req, Db);
+
+handle_design_req(Req, Db) ->
+ db_req(Req, Db).
+
+handle_design_info_req(#httpd{
+ method='GET',
+ path_parts=[_DbName, _Design, DesignName, _]
+ }=Req, Db) ->
+ DesignId = <<"_design/", DesignName/binary>>,
+ {ok, GroupInfoList} = ?COUCH:get_view_group_info(Db, DesignId),
+ send_json(Req, 200, {[
+ {name, DesignName},
+ {view_index, {GroupInfoList}}
+ ]});
+
+handle_design_info_req(Req, _Db) ->
+ send_method_not_allowed(Req, "GET").
+
+create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) ->
+ N = chttpd:qs_value(Req, "n"),
+ Q = chttpd:qs_value(Req, "q"),
+ case ?COUCH:create_db(DbName, [{user_ctx, UserCtx},{n,N},{q,Q}]) of
+ ok ->
+ DocUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)),
+ send_json(Req, 201, [{"Location", DocUrl}], {[{ok, true}]});
+ Error ->
+ throw(Error)
+ end.
+
+delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) ->
+ case ?COUCH:delete_db(DbName, [{user_ctx, UserCtx}]) of
+ ok ->
+ send_json(Req, 200, {[{ok, true}]});
+ Error ->
+ throw(Error)
+ end.
+
+do_db_req(#httpd{path_parts=[DbName|_]}=Req, Fun) ->
+ Fun(Req, #db{name=DbName}).
+
+db_req(#httpd{method='GET',path_parts=[DbName]}=Req, _Db) ->
+ Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"),
+ chttpd:header_value(Req, "Host")),
+ {ok, DbInfo} = ?COUCH:get_db_info(DbName, Customer),
+ send_json(Req, {DbInfo});
+
+db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) ->
+ Doc = couch_doc:from_json_obj(chttpd:json_body(Req)),
+ Doc2 = case Doc#doc.id of
+ <<"">> ->
+ Doc#doc{id=couch_util:new_uuid(), revs={0, []}};
+ _ ->
+ Doc
+ end,
+ DocId = Doc2#doc.id,
+ case chttpd:qs_value(Req, "batch") of
+ "ok" ->
+ % batch
+ ok = couch_batch_save:eventually_save_doc(
+ Db#db.name, Doc2, Db#db.user_ctx),
+ send_json(Req, 202, [], {[
+ {ok, true},
+ {id, DocId}
+ ]});
+ _Normal ->
+ % normal
+ {ok, NewRev} = ?COUCH:update_doc(Db, Doc2, []),
+ DocUrl = absolute_uri(
+ Req, binary_to_list(<<"/",DbName/binary,"/", DocId/binary>>)),
+ send_json(Req, 201, [{"Location", DocUrl}], {[
+ {ok, true},
+ {id, DocId},
+ {rev, couch_doc:rev_to_str(NewRev)}
+ ]})
+ end;
+
+
+db_req(#httpd{path_parts=[_DbName]}=Req, _Db) ->
+ send_method_not_allowed(Req, "DELETE,GET,HEAD,POST");
+
+db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, Db) ->
+ UpdateSeq = ?COUCH:get_update_seq(Db),
+ CommittedSeq = ?COUCH:get_committed_update_seq(Db),
+ {ok, StartTime} =
+ case chttpd:qs_value(Req, "seq") of
+ undefined ->
+ committed = couch_batch_save:commit_now(Db#db.name, Db#db.user_ctx),
+ ?COUCH:ensure_full_commit(Db);
+ RequiredStr ->
+ RequiredSeq = list_to_integer(RequiredStr),
+ if RequiredSeq > UpdateSeq ->
+ throw({bad_request,
+ "can't do a full commit ahead of current update_seq"});
+ RequiredSeq > CommittedSeq ->
+ % user asked for an explicit sequence, don't commit any batches
+ ?COUCH:ensure_full_commit(Db);
+ true ->
+ %% hack to make sure we always get cluster max time - APK
+ ?COUCH:ensure_full_commit(Db)
+ % {ok, Db#db.instance_start_time}
+ end
+ end,
+ send_json(Req, 201, {[
+ {ok, true},
+ {instance_start_time, StartTime}
+ ]});
+
+db_req(#httpd{path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "POST");
+
+db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
+ couch_stats_collector:increment({httpd, bulk_requests}),
+ {JsonProps} = chttpd:json_body_obj(Req),
+ DocsArray = couch_util:get_value(<<"docs">>, JsonProps),
+ case chttpd:header_value(Req, "X-Couch-Full-Commit") of
+ "true" ->
+ Options = [full_commit];
+ "false" ->
+ Options = [delay_commit];
+ _ ->
+ Options = []
+ end,
+ case couch_util:get_value(<<"new_edits">>, JsonProps, true) of
+ true ->
+ Docs = lists:map(
+ fun({ObjProps} = JsonObj) ->
+ Doc = couch_doc:from_json_obj(JsonObj),
+ validate_attachment_names(Doc),
+ Id = case Doc#doc.id of
+ <<>> -> couch_util:new_uuid();
+ Id0 -> Id0
+ end,
+ case couch_util:get_value(<<"_rev">>, ObjProps) of
+ undefined ->
+ Revs = {0, []};
+ Rev ->
+ {Pos, RevId} = couch_doc:parse_rev(Rev),
+ Revs = {Pos, [RevId]}
+ end,
+ Doc#doc{id=Id,revs=Revs}
+ end,
+ DocsArray),
+ Options2 =
+ case couch_util:get_value(<<"all_or_nothing">>, JsonProps) of
+ true -> [all_or_nothing|Options];
+ _ -> Options
+ end,
+ case ?COUCH:update_docs(Db, Docs, Options2) of
+ {ok, Results} ->
+ % output the results
+ DocResults = lists:zipwith(fun update_doc_result_to_json/2,
+ Docs, Results),
+ send_json(Req, 201, DocResults);
+ {aborted, Errors} ->
+ ErrorsJson =
+ lists:map(fun update_doc_result_to_json/1, Errors),
+ send_json(Req, 417, ErrorsJson)
+ end;
+ false ->
+ Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray],
+ {ok, Errors} = ?COUCH:update_docs(Db, Docs, Options, replicated_changes),
+ ErrorsJson =
+ lists:map(fun update_doc_result_to_json/1, Errors),
+ send_json(Req, 201, ErrorsJson)
+ end;
+db_req(#httpd{path_parts=[_,<<"_bulk_docs">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "POST");
+
+db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) ->
+ {IdsRevs} = chttpd:json_body_obj(Req),
+ IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs],
+
+ case ?COUCH:purge_docs(Db, IdsRevs2) of
+ {ok, PurgeSeq, PurgedIdsRevs} ->
+ PurgedIdsRevs2 = [{Id, couch_doc:rev_to_strs(Revs)} || {Id, Revs} <- PurgedIdsRevs],
+ send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs2}}]});
+ Error ->
+ throw(Error)
+ end;
+
+db_req(#httpd{path_parts=[_,<<"_purge">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "POST");
+
+db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) ->
+ all_docs_view(Req, Db, nil);
+
+db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) ->
+ {Fields} = chttpd:json_body_obj(Req),
+ Keys = couch_util:get_value(<<"keys">>, Fields, nil),
+ case Keys of
+ Keys when is_list(Keys) -> ok;
+ nil -> ?LOG_DEBUG("POST to _all_docs with no keys member.", []);
+ _ -> throw({bad_request, "`keys` member must be a array."})
+ end,
+ all_docs_view(Req, Db, Keys);
+
+db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "GET,HEAD,POST");
+
+db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) ->
+ throw(not_implemented),
+ #view_query_args{
+ start_key = StartKey,
+ limit = Limit,
+ skip = SkipCount,
+ direction = Dir
+ } = QueryArgs = chttpd_view:parse_view_params(Req, nil, map),
+
+ Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"),
+ chttpd:header_value(Req, "Host")),
+ {ok, Info} = ?COUCH:get_db_info(Db, Customer),
+ CurrentEtag = chttpd:make_etag(Info),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ TotalRowCount = couch_util:get_value(doc_count, Info),
+ FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db,
+ TotalRowCount, #view_fold_helper_funs{
+ reduce_count = fun ?COUCH:enum_docs_since_reduce_to_count/1
+ }),
+ StartKey2 = case StartKey of
+ nil -> 0;
+ <<>> -> 100000000000;
+ {} -> 100000000000;
+ StartKey when is_integer(StartKey) -> StartKey
+ end,
+ {ok, FoldResult} = ?COUCH:enum_docs_since(Db, StartKey2, Dir,
+ fun(DocInfo, Offset, Acc) ->
+ #doc_info{
+ id=Id,
+ high_seq=Seq,
+ revs=[#rev_info{rev=Rev,deleted=Deleted} | RestInfo]
+ } = DocInfo,
+ ConflictRevs = couch_doc:rev_to_strs(
+ [Rev1 || #rev_info{deleted=false, rev=Rev1} <- RestInfo]),
+ DelConflictRevs = couch_doc:rev_to_strs(
+ [Rev1 || #rev_info{deleted=true, rev=Rev1} <- RestInfo]),
+ Json = {
+ [{<<"rev">>, couch_doc:rev_to_str(Rev)}] ++
+ case ConflictRevs of
+ [] -> [];
+ _ -> [{<<"conflicts">>, ConflictRevs}]
+ end ++
+ case DelConflictRevs of
+ [] -> [];
+ _ -> [{<<"deleted_conflicts">>, DelConflictRevs}]
+ end ++
+ case Deleted of
+ true -> [{<<"deleted">>, true}];
+ false -> []
+ end
+ },
+ FoldlFun({{Seq, Id}, Json}, Offset, Acc)
+ end, {Limit, SkipCount, undefined, [], nil}),
+ chttpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult})
+ end);
+
+db_req(#httpd{path_parts=[_,<<"_all_docs_by_seq">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "GET,HEAD");
+
+db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) ->
+ {JsonDocIdRevs} = chttpd:json_body_obj(Req),
+ JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs],
+ {ok, Results} = ?COUCH:get_missing_revs(Db, JsonDocIdRevs2),
+ Results2 = [{Id, [couch_doc:rev_to_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results],
+ send_json(Req, {[
+ {missing_revs, {Results2}}
+ ]});
+
+db_req(#httpd{path_parts=[_,<<"_missing_revs">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "POST");
+
+db_req(#httpd{method='PUT',path_parts=[_,<<"_admins">>]}=Req,
+ Db) ->
+ Admins = chttpd:json_body(Req),
+ ok = ?COUCH:set_admins(Db, Admins),
+ send_json(Req, {[{<<"ok">>, true}]});
+
+db_req(#httpd{method='GET',path_parts=[_,<<"_admins">>]}=Req, Db) ->
+ send_json(Req, ?COUCH:get_admins(Db));
+
+db_req(#httpd{path_parts=[_,<<"_admins">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "PUT,GET");
+
+db_req(#httpd{method='PUT',path_parts=[_,<<"_revs_limit">>]}=Req,
+ Db) ->
+ Limit = chttpd:json_body(Req),
+ ok = ?COUCH:set_revs_limit(Db, Limit),
+ send_json(Req, {[{<<"ok">>, true}]});
+
+db_req(#httpd{method='GET',path_parts=[_,<<"_revs_limit">>]}=Req, Db) ->
+ send_json(Req, ?COUCH:get_revs_limit(Db));
+
+db_req(#httpd{path_parts=[_,<<"_revs_limit">>]}=Req, _Db) ->
+ send_method_not_allowed(Req, "PUT,GET");
+
+% Special case to enable using an unencoded slash in the URL of design docs,
+% as slashes in document IDs must otherwise be URL encoded.
+db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name]}=Req, Db) ->
+ db_doc_req(Req, Db, <<"_design/",Name/binary>>);
+
+db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name|FileNameParts]}=Req, Db) ->
+ db_attachment_req(Req, Db, <<"_design/",Name/binary>>, FileNameParts);
+
+
+% Special case to allow for accessing local documents without %2F
+% encoding the docid. Throws out requests that don't have the second
+% path part or that specify an attachment name.
+db_req(#httpd{path_parts=[_DbName, <<"_local">>, Name]}=Req, Db) ->
+ db_doc_req(Req, Db, <<"_local/", Name/binary>>);
+
+db_req(#httpd{path_parts=[_DbName, <<"_local">>]}, _Db) ->
+ throw({bad_request, <<"Missing _local document id.">>});
+
+db_req(#httpd{path_parts=[_DbName, <<"_local/">>]}, _Db) ->
+ throw({bad_request, <<"Missing _local document id.">>});
+
+db_req(#httpd{path_parts=[_DbName, <<"_local">> | _Rest]}, _Db) ->
+ throw({bad_request, <<"_local documents do not accept attachments.">>});
+
+db_req(#httpd{path_parts=[_DbName, <<"_local/", _/binary>>, _ | _]}, _Db) ->
+ throw({bad_request, <<"_local documents do not accept attachments.">>});
+
+db_req(#httpd{path_parts=[_, DocId]}=Req, Db) ->
+ db_doc_req(Req, Db, DocId);
+
+db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) ->
+ db_attachment_req(Req, Db, DocId, FileNameParts).
+
+all_docs_view(Req, Db, Keys) ->
+ Etag = couch_util:new_uuid(),
+ QueryArgs = chttpd_view:parse_view_params(Req, nil, map),
+ chttpd:etag_respond(Req, Etag, fun() ->
+ {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]),
+ {ok, Total, Result} = ?COUCH:all_docs_view(Resp, Db, Keys, QueryArgs),
+ send_chunk(Resp, all_docs_final_chunk(Total, Result)),
+ end_json_response(Resp)
+ end).
+
+all_docs_final_chunk(Total, {_, _, undefined, _, nil}) ->
+ ?JSON_ENCODE({[{total_rows, Total}, {offset, Total}, {rows, []}]});
+all_docs_final_chunk(Total, {_, _, undefined, _, Offset}) ->
+ ?JSON_ENCODE({[{total_rows, Total}, {offset, Offset}, {rows, []}]});
+all_docs_final_chunk(_, {_, _, _, _, _}) ->
+ "\r\n]}";
+all_docs_final_chunk(_, Error) ->
+ throw(Error).
+
+db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) ->
+ % check for the existence of the doc to handle the 404 case.
+ couch_doc_open(Db, DocId, nil, []),
+ case chttpd:qs_value(Req, "rev") of
+ undefined ->
+ update_doc(Req, Db, DocId, {[{<<"_deleted">>,true}]});
+ Rev ->
+ update_doc(Req, Db, DocId, {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]})
+ end;
+
+db_doc_req(#httpd{method='GET'}=Req, Db, DocId) ->
+ #doc_query_args{
+ show = Format,
+ rev = Rev,
+ open_revs = Revs,
+ options = Options
+ } = parse_doc_query(Req),
+ case Format of
+ nil ->
+ case Revs of
+ [] ->
+ Doc = couch_doc_open(Db, DocId, Rev, Options),
+ DiskEtag = chttpd:doc_etag(Doc),
+ case Doc#doc.meta of
+ [] ->
+ % output etag only when we have no meta
+ chttpd:etag_respond(Req, DiskEtag, fun() ->
+ send_json(Req, 200, [{"Etag", DiskEtag}], couch_doc:to_json_obj(Doc, Options))
+ end);
+ _ ->
+ send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options))
+ end;
+ _ ->
+ {ok, Results} = ?COUCH:open_revs(Db, DocId, Revs, Options),
+ {ok, Resp} = start_json_response(Req, 200),
+ send_chunk(Resp, "["),
+ % We loop through the docs. The first time through the separator
+ % is whitespace, then a comma on subsequent iterations.
+ lists:foldl(
+ fun(Result, AccSeparator) ->
+ case Result of
+ {ok, Doc} ->
+ JsonDoc = couch_doc:to_json_obj(Doc, Options),
+ Json = ?JSON_ENCODE({[{ok, JsonDoc}]}),
+ send_chunk(Resp, AccSeparator ++ Json);
+ {{not_found, missing}, RevId} ->
+ Json = ?JSON_ENCODE({[{"missing", RevId}]}),
+ send_chunk(Resp, AccSeparator ++ Json)
+ end,
+ "," % AccSeparator now has a comma
+ end,
+ "", Results),
+ send_chunk(Resp, "]"),
+ end_json_response(Resp)
+ end;
+ _ ->
+ {DesignName, ShowName} = Format,
+ chttpd_show:handle_doc_show(Req, DesignName, ShowName, DocId, Db)
+ end;
+
+db_doc_req(#httpd{method='POST'}=Req, Db, DocId) ->
+ couch_doc:validate_docid(DocId),
+ case chttpd:header_value(Req, "content-type") of
+ "multipart/form-data" ++ _Rest ->
+ ok;
+ _Else ->
+ throw({bad_ctype, <<"Invalid Content-Type header for form upload">>})
+ end,
+ Form = chttpd:parse_form(Req),
+ Rev = couch_doc:parse_rev(list_to_binary(couch_util:get_value("_rev", Form))),
+ {ok, [{ok, Doc}]} = ?COUCH:open_revs(Db, DocId, [Rev], []),
+
+ UpdatedAtts = [
+ #att{name=validate_attachment_name(Name),
+ type=list_to_binary(ContentType),
+ data=Content} ||
+ {Name, {ContentType, _}, Content} <-
+ proplists:get_all_values("_attachments", Form)
+ ],
+ #doc{atts=OldAtts} = Doc,
+ OldAtts2 = lists:flatmap(
+ fun(#att{name=OldName}=Att) ->
+ case [1 || A <- UpdatedAtts, A#att.name == OldName] of
+ [] -> [Att]; % the attachment wasn't in the UpdatedAtts, return it
+ _ -> [] % the attachment was in the UpdatedAtts, drop it
+ end
+ end, OldAtts),
+ NewDoc = Doc#doc{
+ atts = UpdatedAtts ++ OldAtts2
+ },
+ {ok, NewRev} = ?COUCH:update_doc(Db, NewDoc, []),
+
+ send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewRev)) ++ "\""}], {[
+ {ok, true},
+ {id, DocId},
+ {rev, couch_doc:rev_to_str(NewRev)}
+ ]});
+
+db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
+ couch_doc:validate_docid(DocId),
+ Json = chttpd:json_body(Req),
+ case chttpd:qs_value(Req, "batch") of
+ "ok" ->
+ % batch
+ Doc = couch_doc_from_req(Req, DocId, Json),
+ ok = couch_batch_save:eventually_save_doc(Db#db.name, Doc, Db#db.user_ctx),
+ send_json(Req, 202, [], {[
+ {ok, true},
+ {id, DocId}
+ ]});
+ _Normal ->
+ % normal
+ DbName = couch_db:name(Db),
+ Location = absolute_uri(Req, <<"/", DbName/binary, "/", DocId/binary>>),
+ update_doc(Req, Db, DocId, Json, [{"Location", Location}])
+ end;
+
+db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) ->
+ SourceRev =
+ case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of
+ missing_rev -> nil;
+ Rev -> Rev
+ end,
+ {TargetDocId, TargetRevs} = parse_copy_destination_header(Req),
+ % open old doc
+ Doc = couch_doc_open(Db, SourceDocId, SourceRev, []),
+ % save new doc
+ {ok, NewTargetRev} = ?COUCH:update_doc(Db,
+ Doc#doc{id=TargetDocId, revs=TargetRevs}, []),
+ % respond
+ send_json(Req, 201,
+ [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewTargetRev)) ++ "\""}],
+ update_doc_result_to_json(TargetDocId, {ok, NewTargetRev}));
+
+db_doc_req(Req, _Db, _DocId) ->
+ send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY").
+
+
+update_doc_result_to_json({{Id, Rev}, Error}) ->
+ {_Code, Err, Msg} = chttpd:error_info(Error),
+ {[{id, Id}, {rev, couch_doc:rev_to_str(Rev)},
+ {error, Err}, {reason, Msg}]}.
+
+update_doc_result_to_json(#doc{id=DocId}, Result) ->
+ update_doc_result_to_json(DocId, Result);
+update_doc_result_to_json(DocId, {ok, NewRev}) ->
+ {[{id, DocId}, {rev, couch_doc:rev_to_str(NewRev)}]};
+update_doc_result_to_json(DocId, Error) ->
+ {_Code, ErrorStr, Reason} = chttpd:error_info(Error),
+ {[{id, DocId}, {error, ErrorStr}, {reason, Reason}]}.
+
+
+update_doc(Req, Db, DocId, Json) ->
+ update_doc(Req, Db, DocId, Json, []).
+
+update_doc(Req, Db, DocId, Json, Headers) ->
+ #doc{deleted=Deleted} = Doc = couch_doc_from_req(Req, DocId, Json),
+
+ case chttpd:header_value(Req, "X-Couch-Full-Commit") of
+ "true" ->
+ Options = [full_commit];
+ "false" ->
+ Options = [delay_commit];
+ _ ->
+ Options = []
+ end,
+ {Status, NewRev} = case ?COUCH:update_doc(Db, Doc, Options) of
+ {ok, NewRev1} -> {201, NewRev1};
+ {accepted, NewRev1} -> {202, NewRev1}
+ end,
+ NewRevStr = couch_doc:rev_to_str(NewRev),
+ ResponseHeaders = [{"Etag", <<"\"", NewRevStr/binary, "\"">>}] ++ Headers,
+ send_json(Req, if Deleted -> 200; true -> Status end,
+ ResponseHeaders, {[
+ {ok, true},
+ {id, DocId},
+ {rev, NewRevStr}]}).
+
+couch_doc_from_req(Req, DocId, Json) ->
+ Doc = couch_doc:from_json_obj(Json),
+ validate_attachment_names(Doc),
+ ExplicitDocRev =
+ case Doc#doc.revs of
+ {Start,[RevId|_]} -> {Start, RevId};
+ _ -> undefined
+ end,
+ case extract_header_rev(Req, ExplicitDocRev) of
+ missing_rev ->
+ Revs = {0, []};
+ {Pos, Rev} ->
+ Revs = {Pos, [Rev]}
+ end,
+ Doc#doc{id=DocId, revs=Revs}.
+
+
+% Useful for debugging
+% couch_doc_open(Db, DocId) ->
+% couch_doc_open(Db, DocId, nil, []).
+
+couch_doc_open(Db, DocId, Rev, Options) ->
+ case Rev of
+ nil -> % open most recent rev
+ case ?COUCH:open_doc(Db, DocId, Options) of
+ {ok, Doc} ->
+ Doc;
+ Error ->
+ throw(Error)
+ end;
+ _ -> % open a specific rev (deletions come back as stubs)
+ case ?COUCH:open_revs(Db, DocId, [Rev], Options) of
+ {ok, [{ok, Doc}]} ->
+ Doc;
+ {ok, [Else]} ->
+ throw(Else)
+ end
+ end.
+
+% Attachment request handlers
+
+db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) ->
+ FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, FileNameParts),"/")),
+ #doc_query_args{
+ rev=Rev,
+ options=Options
+ } = parse_doc_query(Req),
+ #doc{
+ atts=Atts
+ } = Doc = couch_doc_open(Db, DocId, Rev, Options),
+ case [A || A <- Atts, A#att.name == FileName] of
+ [] ->
+ throw({not_found, "Document is missing attachment"});
+ [#att{type=Type, len=Len}=Att] ->
+ Etag = chttpd:doc_etag(Doc),
+ chttpd:etag_respond(Req, Etag, fun() ->
+ {ok, Resp} = start_response_length(Req, 200, [
+ {"ETag", Etag},
+ {"Cache-Control", "must-revalidate"},
+ {"Content-Type", binary_to_list(Type)}
+ ], integer_to_list(Len)),
+ couch_doc:att_foldl(Att, fun(BinSegment, _) ->
+ send(Resp, BinSegment)
+ end, {ok, Resp})
+ end)
+ end;
+
+
+db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts)
+ when (Method == 'PUT') or (Method == 'DELETE') ->
+ FileName = validate_attachment_name(
+ mochiweb_util:join(
+ lists:map(fun binary_to_list/1,
+ FileNameParts),"/")),
+
+ NewAtt = case Method of
+ 'DELETE' ->
+ [];
+ _ ->
+ [#att{
+ name=FileName,
+ type = case chttpd:header_value(Req,"Content-Type") of
+ undefined ->
+ % We could throw an error here or guess by the FileName.
+ % Currently, just giving it a default.
+ <<"application/octet-stream">>;
+ CType ->
+ list_to_binary(CType)
+ end,
+ data = ?COUCH:att_receiver(Req, chttpd:body_length(Req)),
+ len = case chttpd:header_value(Req,"Content-Length") of
+ undefined ->
+ undefined;
+ Length ->
+ list_to_integer(Length)
+ end
+ }]
+ end,
+
+ Doc = case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of
+ missing_rev -> % make the new doc
+ couch_doc:validate_docid(DocId),
+ #doc{id=DocId};
+ Rev ->
+ case ?COUCH:open_revs(Db, DocId, [Rev], []) of
+ {ok, [{ok, Doc0}]} -> Doc0;
+ {ok, [Error]} -> throw(Error)
+ end
+ end,
+
+ #doc{atts=Atts} = Doc,
+ DocEdited = Doc#doc{
+ atts = NewAtt ++ [A || A <- Atts, A#att.name /= FileName]
+ },
+ {ok, UpdatedRev} = ?COUCH:update_doc(Db, DocEdited, []),
+ DbName = couch_db:name(Db),
+
+ {Status, Headers} = case Method of
+ 'DELETE' ->
+ {200, []};
+ _ ->
+ {201, [{"Location", absolute_uri(Req, "/" ++
+ binary_to_list(DbName) ++ "/" ++
+ binary_to_list(DocId) ++ "/" ++
+ binary_to_list(FileName)
+ )}]}
+ end,
+ send_json(Req,Status, Headers, {[
+ {ok, true},
+ {id, DocId},
+ {rev, couch_doc:rev_to_str(UpdatedRev)}
+ ]});
+
+db_attachment_req(Req, _Db, _DocId, _FileNameParts) ->
+ send_method_not_allowed(Req, "DELETE,GET,HEAD,PUT").
+
+parse_doc_format(FormatStr) when is_binary(FormatStr) ->
+ parse_doc_format(?b2l(FormatStr));
+parse_doc_format(FormatStr) when is_list(FormatStr) ->
+ SplitFormat = lists:splitwith(fun($/) -> false; (_) -> true end, FormatStr),
+ case SplitFormat of
+ {DesignName, [$/ | ShowName]} -> {?l2b(DesignName), ?l2b(ShowName)};
+ _Else -> throw({bad_request, <<"Invalid doc format">>})
+ end;
+parse_doc_format(_BadFormatStr) ->
+ throw({bad_request, <<"Invalid doc format">>}).
+
+parse_doc_query(Req) ->
+ lists:foldl(fun({Key,Value}, Args) ->
+ case {Key, Value} of
+ {"attachments", "true"} ->
+ Options = [attachments | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"meta", "true"} ->
+ Options = [revs_info, conflicts, deleted_conflicts | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"revs", "true"} ->
+ Options = [revs | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"local_seq", "true"} ->
+ Options = [local_seq | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"revs_info", "true"} ->
+ Options = [revs_info | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"conflicts", "true"} ->
+ Options = [conflicts | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"deleted_conflicts", "true"} ->
+ Options = [deleted_conflicts | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"rev", Rev} ->
+ Args#doc_query_args{rev=couch_doc:parse_rev(Rev)};
+ {"open_revs", "all"} ->
+ Args#doc_query_args{open_revs=all};
+ {"open_revs", RevsJsonStr} ->
+ JsonArray = ?JSON_DECODE(RevsJsonStr),
+ Args#doc_query_args{open_revs=[couch_doc:parse_rev(Rev) || Rev <- JsonArray]};
+ {"show", FormatStr} ->
+ Args#doc_query_args{show=parse_doc_format(FormatStr)};
+ {"r", R} ->
+ Options = [{r,R} | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ {"w", W} ->
+ Options = [{w,W} | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
+ _Else -> % unknown key value pair, ignore.
+ Args
+ end
+ end, #doc_query_args{}, chttpd:qs(Req)).
+
+parse_changes_query(Req) ->
+ lists:foldl(fun({Key, Value}, Args) ->
+ case {Key, Value} of
+ {"feed", _} ->
+ Args#changes_args{feed=Value};
+ {"descending", "true"} ->
+ Args#changes_args{dir=rev};
+ {"since", _} ->
+ Args#changes_args{since=list_to_integer(Value)};
+ {"limit", _} ->
+ Args#changes_args{limit=list_to_integer(Value)};
+ {"style", _} ->
+ Args#changes_args{style=list_to_existing_atom(Value)};
+ {"heartbeat", "true"} ->
+ Args#changes_args{heartbeat=true};
+ {"heartbeat", _} ->
+ Args#changes_args{heartbeat=list_to_integer(Value)};
+ {"timeout", _} ->
+ Args#changes_args{timeout=list_to_integer(Value)};
+ {"include_docs", "true"} ->
+ Args#changes_args{include_docs=true};
+ {"filter", _} ->
+ Args#changes_args{filter=Value};
+ _Else -> % unknown key value pair, ignore.
+ Args
+ end
+ end, #changes_args{}, chttpd:qs(Req)).
+
+extract_header_rev(Req, ExplicitRev) when is_binary(ExplicitRev) or is_list(ExplicitRev)->
+ extract_header_rev(Req, couch_doc:parse_rev(ExplicitRev));
+extract_header_rev(Req, ExplicitRev) ->
+ Etag = case chttpd:header_value(Req, "If-Match") of
+ undefined -> undefined;
+ Value -> couch_doc:parse_rev(string:strip(Value, both, $"))
+ end,
+ case {ExplicitRev, Etag} of
+ {undefined, undefined} -> missing_rev;
+ {_, undefined} -> ExplicitRev;
+ {undefined, _} -> Etag;
+ _ when ExplicitRev == Etag -> Etag;
+ _ ->
+ throw({bad_request, "Document rev and etag have different values"})
+ end.
+
+
+parse_copy_destination_header(Req) ->
+ Destination = chttpd:header_value(Req, "Destination"),
+ case re:run(Destination, "\\?", [{capture, none}]) of
+ nomatch ->
+ {list_to_binary(Destination), {0, []}};
+ match ->
+ [DocId, RevQs] = re:split(Destination, "\\?", [{return, list}]),
+ [_RevQueryKey, Rev] = re:split(RevQs, "=", [{return, list}]),
+ {Pos, RevId} = couch_doc:parse_rev(Rev),
+ {list_to_binary(DocId), {Pos, [RevId]}}
+ end.
+
+validate_attachment_names(Doc) ->
+ lists:foreach(fun(#att{name=Name}) ->
+ validate_attachment_name(Name)
+ end, Doc#doc.atts).
+
+validate_attachment_name(Name) when is_list(Name) ->
+ validate_attachment_name(list_to_binary(Name));
+validate_attachment_name(<<"_",_/binary>>) ->
+ throw({bad_request, <<"Attachment name can't start with '_'">>});
+validate_attachment_name(Name) ->
+ case is_valid_utf8(Name) of
+ true -> Name;
+ false -> throw({bad_request, <<"Attachment name is not UTF-8 encoded">>})
+ end.
+
+%% borrowed from mochijson2:json_bin_is_safe()
+is_valid_utf8(<<>>) ->
+ true;
+is_valid_utf8(<<C, Rest/binary>>) ->
+ case C of
+ $\" ->
+ false;
+ $\\ ->
+ false;
+ $\b ->
+ false;
+ $\f ->
+ false;
+ $\n ->
+ false;
+ $\r ->
+ false;
+ $\t ->
+ false;
+ C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
+ false;
+ C when C < 16#7f ->
+ is_valid_utf8(Rest);
+ _ ->
+ false
+ end.
diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl
new file mode 100644
index 00000000..d096bff9
--- /dev/null
+++ b/src/chttpd_external.erl
@@ -0,0 +1,166 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_external).
+
+-export([handle_external_req/2, handle_external_req/3]).
+-export([send_external_response/2, json_req_obj/2]).
+-export([default_or_content_type/2, parse_external_response/1]).
+
+-import(chttpd,[send_error/4]).
+
+-include("chttpd.hrl").
+
+% handle_external_req/2
+% for the old type of config usage:
+% _external = {chttpd_external, handle_external_req}
+% with urls like
+% /db/_external/action/design/name
+handle_external_req(#httpd{
+ path_parts=[_DbName, _External, UrlName | _Path]
+ }=HttpReq, Db) ->
+ process_external_req(HttpReq, Db, UrlName);
+handle_external_req(#httpd{path_parts=[_, _]}=Req, _Db) ->
+ send_error(Req, 404, <<"external_server_error">>, <<"No server name specified.">>);
+handle_external_req(Req, _) ->
+ send_error(Req, 404, <<"external_server_error">>, <<"Broken assumption">>).
+
+% handle_external_req/3
+% for this type of config usage:
+% _action = {chttpd_external, handle_external_req, <<"action">>}
+% with urls like
+% /db/_action/design/name
+handle_external_req(HttpReq, Db, Name) ->
+ process_external_req(HttpReq, Db, Name).
+
+process_external_req(HttpReq, Db, Name) ->
+
+ Response = couch_external_manager:execute(binary_to_list(Name),
+ json_req_obj(HttpReq, Db)),
+
+ case Response of
+ {unknown_external_server, Msg} ->
+ send_error(HttpReq, 404, <<"external_server_error">>, Msg);
+ _ ->
+ send_external_response(HttpReq, Response)
+ end.
+
+json_req_obj(#httpd{mochi_req=Req,
+ method=Verb,
+ path_parts=Path,
+ req_body=ReqBody
+ }, Db) ->
+ Body = case ReqBody of
+ undefined -> Req:recv_body();
+ Else -> Else
+ end,
+ ParsedForm = case Req:get_primary_header_value("content-type") of
+ "application/x-www-form-urlencoded" ++ _ ->
+ mochiweb_util:parse_qs(Body);
+ _ ->
+ []
+ end,
+ Headers = Req:get(headers),
+ Hlist = mochiweb_headers:to_list(Headers),
+ Customer = cloudant_util:customer_name(
+ Req:get_header_value("X-Cloudant-User"), Req:get_header_value("Host")),
+ {ok, Info} = ?COUCH:get_db_info(Db, Customer),
+
+ % send correct path to customer - BugzID 6849
+ CustomerBin = list_to_binary(Customer),
+ Len = byte_size(CustomerBin),
+ FixedPath = case Path of
+ [<<CustomerBin:Len/binary, "/", DbName/binary>> | Rest] ->
+ [DbName | Rest];
+ NoCustomer ->
+ NoCustomer
+ end,
+
+ % add headers...
+ {[{<<"info">>, {Info}},
+ {<<"verb">>, Verb},
+ {<<"path">>, FixedPath},
+ {<<"query">>, to_json_terms(Req:parse_qs())},
+ {<<"headers">>, to_json_terms(Hlist)},
+ {<<"body">>, Body},
+ {<<"form">>, to_json_terms(ParsedForm)},
+ {<<"cookie">>, to_json_terms(Req:parse_cookie())},
+ {<<"userCtx">>, couch_util:json_user_ctx(Db)}]}.
+
+to_json_terms(Data) ->
+ to_json_terms(Data, []).
+to_json_terms([], Acc) ->
+ {lists:reverse(Acc)};
+to_json_terms([{Key, Value} | Rest], Acc) when is_atom(Key) ->
+ to_json_terms(Rest, [{list_to_binary(atom_to_list(Key)), list_to_binary(Value)} | Acc]);
+to_json_terms([{Key, Value} | Rest], Acc) ->
+ to_json_terms(Rest, [{list_to_binary(Key), list_to_binary(Value)} | Acc]).
+
+
+send_external_response(#httpd{mochi_req=MochiReq}, Response) ->
+ #extern_resp_args{
+ code = Code,
+ data = Data,
+ ctype = CType,
+ headers = Headers
+ } = parse_external_response(Response),
+ Resp = MochiReq:respond({Code,
+ default_or_content_type(CType, Headers ++ chttpd:server_header()), Data}),
+ {ok, Resp}.
+
+parse_external_response({Response}) ->
+ lists:foldl(fun({Key,Value}, Args) ->
+ case {Key, Value} of
+ {"", _} ->
+ Args;
+ {<<"code">>, Value} ->
+ Args#extern_resp_args{code=Value};
+ {<<"stop">>, true} ->
+ Args#extern_resp_args{stop=true};
+ {<<"json">>, Value} ->
+ Args#extern_resp_args{
+ data=?JSON_ENCODE(Value),
+ ctype="application/json"};
+ {<<"body">>, Value} ->
+ Args#extern_resp_args{data=Value, ctype="text/html; charset=utf-8"};
+ {<<"base64">>, Value} ->
+ Args#extern_resp_args{
+ data=couch_util:decodeBase64(Value),
+ ctype="application/binary"
+ };
+ {<<"headers">>, {Headers}} ->
+ NewHeaders = lists:map(fun({Header, HVal}) ->
+ {binary_to_list(Header), binary_to_list(HVal)}
+ end, Headers),
+ Args#extern_resp_args{headers=NewHeaders};
+ _ -> % unknown key
+ Msg = lists:flatten(io_lib:format("Invalid data from external server: ~p", [{Key, Value}])),
+ throw({external_response_error, Msg})
+ end
+ end, #extern_resp_args{}, Response).
+
+default_or_content_type(DefaultContentType, Headers) ->
+ {ContentType, OtherHeaders} = lists:partition(
+ fun({HeaderName, _}) ->
+ HeaderName == "Content-Type"
+ end, Headers),
+
+ % XXX: What happens if we were passed multiple content types? We add another?
+ case ContentType of
+ [{"Content-Type", SetContentType}] ->
+ TrueContentType = SetContentType;
+ _Else ->
+ TrueContentType = DefaultContentType
+ end,
+
+ HeadersWithContentType = lists:append(OtherHeaders, [{"Content-Type", TrueContentType}]),
+ HeadersWithContentType.
diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl
new file mode 100644
index 00000000..8867dfbe
--- /dev/null
+++ b/src/chttpd_misc.erl
@@ -0,0 +1,232 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_misc).
+
+-export([handle_welcome_req/2,handle_favicon_req/2,handle_utils_dir_req/2,
+ handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1,
+ handle_uuids_req/1,handle_config_req/1,handle_log_req/1,
+ handle_task_status_req/1,handle_sleep_req/1,handle_welcome_req/1,
+ handle_utils_dir_req/1]).
+
+-export([increment_update_seq_req/2]).
+
+
+-include("chttpd.hrl").
+
+-import(chttpd,
+ [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
+ start_json_response/2,send_chunk/2,end_json_response/1,
+ start_chunked_response/3, send_error/4]).
+
+% httpd global handlers
+
+handle_welcome_req(Req) ->
+ handle_welcome_req(Req, <<"Welcome">>).
+
+handle_welcome_req(#httpd{method='GET'}=Req, WelcomeMessage) ->
+ send_json(Req, {[
+ {couchdb, WelcomeMessage},
+ {version, <<"0.10.1+">>},
+ {cloudant_build, get_version()}
+ ]});
+handle_welcome_req(Req, _) ->
+ send_method_not_allowed(Req, "GET,HEAD").
+
+get_version() ->
+ Releases = release_handler:which_releases(),
+ Version = case [V || {"dbcore", V, _, current} <- Releases] of
+ [] ->
+ case [V || {"dbcore", V, _, permanent} <- Releases] of
+ [] ->
+ "dev";
+ [Permanent] ->
+ Permanent
+ end;
+ [Current] ->
+ Current
+ end,
+ list_to_binary(Version).
+
+handle_favicon_req(#httpd{method='GET'}=Req, DocumentRoot) ->
+ chttpd:serve_file(Req, "favicon.ico", DocumentRoot);
+handle_favicon_req(Req, _) ->
+ send_method_not_allowed(Req, "GET,HEAD").
+
+handle_utils_dir_req(Req) ->
+ handle_utils_dir_req(Req, couch_config:get("chttpd", "docroot")).
+
+handle_utils_dir_req(#httpd{method='GET'}=Req, DocumentRoot) ->
+ "/" ++ UrlPath = chttpd:path(Req),
+ case chttpd:partition(UrlPath) of
+ {_ActionKey, "/", RelativePath} ->
+ % GET /_utils/path or GET /_utils/
+ chttpd:serve_file(Req, RelativePath, DocumentRoot);
+ {_ActionKey, "", _RelativePath} ->
+ % GET /_utils
+ RedirectPath = chttpd:path(Req) ++ "/",
+ chttpd:send_redirect(Req, RedirectPath)
+ end;
+handle_utils_dir_req(Req, _) ->
+ send_method_not_allowed(Req, "GET,HEAD").
+
+handle_sleep_req(#httpd{method='GET'}=Req) ->
+ Time = list_to_integer(chttpd:qs_value(Req, "time")),
+ receive snicklefart -> ok after Time -> ok end,
+ send_json(Req, {[{ok, true}]});
+handle_sleep_req(Req) ->
+ send_method_not_allowed(Req, "GET,HEAD").
+
+handle_all_dbs_req(#httpd{method='GET'}=Req) ->
+ Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"),
+ chttpd:header_value(Req, "Host")),
+ {ok, DbNames} = fabric:all_dbs(Customer),
+ send_json(Req, DbNames);
+handle_all_dbs_req(Req) ->
+ send_method_not_allowed(Req, "GET,HEAD").
+
+
+handle_task_status_req(#httpd{method='GET'}=Req) ->
+ % convert the list of prop lists to a list of json objects
+ send_json(Req, [{Props} || Props <- couch_task_status:all()]);
+handle_task_status_req(Req) ->
+ send_method_not_allowed(Req, "GET,HEAD").
+
+handle_replicate_req(#httpd{method='POST'}=Req) ->
+ PostBody = get(post_body),
+ try ?COUCH:replicate_db(PostBody, Req#httpd.user_ctx) of
+ {ok, {continuous, RepId}} ->
+ send_json(Req, 202, {[{ok, true}, {<<"_local_id">>, RepId}]});
+ {ok, {JsonResults}} ->
+ send_json(Req, {[{ok, true} | JsonResults]});
+ {error, {Type, Details}} ->
+ send_json(Req, 500, {[{error, Type}, {reason, Details}]});
+ {error, Reason} ->
+ send_json(Req, 500, {[{error, Reason}]})
+ catch
+ throw:{db_not_found, Msg} ->
+ send_json(Req, 404, {[{error, db_not_found}, {reason, Msg}]});
+ throw:{node_not_connected, Msg} ->
+ send_json(Req, 404, {[{error, node_not_connected}, {reason, Msg}]})
+ end;
+handle_replicate_req(Req) ->
+ send_method_not_allowed(Req, "POST").
+
+
+handle_restart_req(#httpd{method='POST'}=Req) ->
+ couch_server_sup:restart_core_server(),
+ send_json(Req, 200, {[{ok, true}]});
+handle_restart_req(Req) ->
+ send_method_not_allowed(Req, "POST").
+
+
+handle_uuids_req(#httpd{method='GET'}=Req) ->
+ Count = list_to_integer(chttpd:qs_value(Req, "count", "1")),
+ % generate the uuids
+ UUIDs = [ couch_util:new_uuid() || _ <- lists:seq(1,Count)],
+ % send a JSON response
+ Etag = chttpd:make_etag(UUIDs),
+ chttpd:etag_respond(Req, Etag, fun() ->
+ CacheBustingHeaders = [
+ {"Date", httpd_util:rfc1123_date()},
+ {"Cache-Control", "no-cache"},
+ % Past date, ON PURPOSE!
+ {"Expires", "Fri, 01 Jan 1990 00:00:00 GMT"},
+ {"Pragma", "no-cache"},
+ {"ETag", Etag}
+ ],
+ send_json(Req, 200, CacheBustingHeaders, {[{<<"uuids">>, UUIDs}]})
+ end);
+handle_uuids_req(Req) ->
+ send_method_not_allowed(Req, "GET").
+
+
+% Config request handler
+
+
+% GET /_config/
+% GET /_config
+handle_config_req(#httpd{method='GET', path_parts=[_]}=Req) ->
+ Grouped = lists:foldl(fun({{Section, Key}, Value}, Acc) ->
+ case dict:is_key(Section, Acc) of
+ true ->
+ dict:append(Section, {list_to_binary(Key), list_to_binary(Value)}, Acc);
+ false ->
+ dict:store(Section, [{list_to_binary(Key), list_to_binary(Value)}], Acc)
+ end
+ end, dict:new(), couch_config:all()),
+ KVs = dict:fold(fun(Section, Values, Acc) ->
+ [{list_to_binary(Section), {Values}} | Acc]
+ end, [], Grouped),
+ send_json(Req, 200, {KVs});
+% GET /_config/Section
+handle_config_req(#httpd{method='GET', path_parts=[_,Section]}=Req) ->
+ KVs = [{list_to_binary(Key), list_to_binary(Value)}
+ || {Key, Value} <- couch_config:get(Section)],
+ send_json(Req, 200, {KVs});
+% PUT /_config/Section/Key
+% "value"
+handle_config_req(#httpd{method='PUT', path_parts=[_, Section, Key]}=Req) ->
+ Value = chttpd:json_body(Req),
+ Persist = chttpd:header_value(Req, "X-Couch-Persist") /= "false",
+ OldValue = couch_config:get(Section, Key, ""),
+ ok = couch_config:set(Section, Key, ?b2l(Value), Persist),
+ send_json(Req, 200, list_to_binary(OldValue));
+% GET /_config/Section/Key
+handle_config_req(#httpd{method='GET', path_parts=[_, Section, Key]}=Req) ->
+ case couch_config:get(Section, Key, null) of
+ null ->
+ throw({not_found, unknown_config_value});
+ Value ->
+ send_json(Req, 200, list_to_binary(Value))
+ end;
+% DELETE /_config/Section/Key
+handle_config_req(#httpd{method='DELETE',path_parts=[_,Section,Key]}=Req) ->
+ Persist = chttpd:header_value(Req, "X-Couch-Persist") /= "false",
+ case couch_config:get(Section, Key, null) of
+ null ->
+ throw({not_found, unknown_config_value});
+ OldValue ->
+ couch_config:delete(Section, Key, Persist),
+ send_json(Req, 200, list_to_binary(OldValue))
+ end;
+handle_config_req(Req) ->
+ send_method_not_allowed(Req, "GET,PUT,DELETE").
+
+
+% httpd db handlers
+
+increment_update_seq_req(#httpd{method='POST'}=Req, Db) ->
+ {ok, NewSeq} = ?COUCH:increment_update_seq(Db),
+ send_json(Req, {[{ok, true},
+ {update_seq, NewSeq}
+ ]});
+increment_update_seq_req(Req, _Db) ->
+ send_method_not_allowed(Req, "POST").
+
+% httpd log handlers
+
+handle_log_req(#httpd{method='GET'}=Req) ->
+ Bytes = list_to_integer(chttpd:qs_value(Req, "bytes", "1000")),
+ Offset = list_to_integer(chttpd:qs_value(Req, "offset", "0")),
+ Chunk = couch_log:read(Bytes, Offset),
+ {ok, Resp} = start_chunked_response(Req, 200, [
+ % send a plaintext response
+ {"Content-Type", "text/plain; charset=utf-8"},
+ {"Content-Length", integer_to_list(length(Chunk))}
+ ]),
+ send_chunk(Resp, Chunk),
+ send_chunk(Resp, "");
+handle_log_req(Req) ->
+ send_method_not_allowed(Req, "GET").
+
+
diff --git a/src/chttpd_oauth.erl b/src/chttpd_oauth.erl
new file mode 100644
index 00000000..58fafbb8
--- /dev/null
+++ b/src/chttpd_oauth.erl
@@ -0,0 +1,168 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_oauth).
+-include("chttpd.hrl").
+
+-export([oauth_authentication_handler/1, handle_oauth_req/1, consumer_lookup/2]).
+
+% OAuth auth handler using per-node user db
+oauth_authentication_handler(#httpd{mochi_req=MochiReq}=Req) ->
+ serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
+ AccessToken = couch_util:get_value("oauth_token", Params),
+ TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
+ case oauth:verify(Signature, atom_to_list(MochiReq:get(method)), URL, Params, Consumer, TokenSecret) of
+ true ->
+ set_user_ctx(Req, AccessToken);
+ false ->
+ Req
+ end
+ end, true).
+
+% Look up the consumer key and get the roles to give the consumer
+set_user_ctx(Req, AccessToken) ->
+ DbName = couch_config:get("chttpd_auth", "authentication_db"),
+ {ok, _Db} = chttpd_auth:ensure_users_db_exists(?l2b(DbName)),
+ Name = ?l2b(couch_config:get("oauth_token_users", AccessToken)),
+ case chttpd_auth:get_user(Name) of
+ nil -> Req;
+ User ->
+ Roles = couch_util:get_value(<<"roles">>, User, []),
+ Req#httpd{user_ctx=#user_ctx{name=Name, roles=Roles}}
+ end.
+
+% OAuth request_token
+handle_oauth_req(#httpd{path_parts=[_OAuth, <<"request_token">>], method=Method}=Req) ->
+ serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
+ AccessToken = couch_util:get_value("oauth_token", Params),
+ TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
+ case oauth:verify(Signature, atom_to_list(Method), URL, Params, Consumer, TokenSecret) of
+ true ->
+ ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>);
+ false ->
+ invalid_signature(Req)
+ end
+ end, false);
+handle_oauth_req(#httpd{path_parts=[_OAuth, <<"authorize">>]}=Req) ->
+ {ok, serve_oauth_authorize(Req)};
+handle_oauth_req(#httpd{path_parts=[_OAuth, <<"access_token">>], method='GET'}=Req) ->
+ serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
+ case oauth:token(Params) of
+ "requestkey" ->
+ case oauth:verify(Signature, "GET", URL, Params, Consumer, "requestsecret") of
+ true ->
+ ok(Req, <<"oauth_token=accesskey&oauth_token_secret=accesssecret">>);
+ false ->
+ invalid_signature(Req)
+ end;
+ _ ->
+ chttpd:send_error(Req, 400, <<"invalid_token">>, <<"Invalid OAuth token.">>)
+ end
+ end, false);
+handle_oauth_req(#httpd{path_parts=[_OAuth, <<"access_token">>]}=Req) ->
+ chttpd:send_method_not_allowed(Req, "GET").
+
+invalid_signature(Req) ->
+ chttpd:send_error(Req, 400, <<"invalid_signature">>, <<"Invalid signature value.">>).
+
+% This needs to be protected i.e. force user to login using HTTP Basic Auth or form-based login.
+serve_oauth_authorize(#httpd{method=Method}=Req) ->
+ case Method of
+ 'GET' ->
+ % Confirm with the User that they want to authenticate the Consumer
+ serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
+ AccessToken = couch_util:get_value("oauth_token", Params),
+ TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
+ case oauth:verify(Signature, "GET", URL, Params, Consumer, TokenSecret) of
+ true ->
+ ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>);
+ false ->
+ invalid_signature(Req)
+ end
+ end, false);
+ 'POST' ->
+ % If the User has confirmed, we direct the User back to the Consumer with a verification code
+ serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
+ AccessToken = couch_util:get_value("oauth_token", Params),
+ TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
+ case oauth:verify(Signature, "POST", URL, Params, Consumer, TokenSecret) of
+ true ->
+ %redirect(oauth_callback, oauth_token, oauth_verifier),
+ ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>);
+ false ->
+ invalid_signature(Req)
+ end
+ end, false);
+ _ ->
+ chttpd:send_method_not_allowed(Req, "GET,POST")
+ end.
+
+serve_oauth(#httpd{mochi_req=MochiReq}=Req, Fun, FailSilently) ->
+ % 1. In the HTTP Authorization header as defined in OAuth HTTP Authorization Scheme.
+ % 2. As the HTTP POST request body with a content-type of application/x-www-form-urlencoded.
+ % 3. Added to the URLs in the query part (as defined by [RFC3986] section 3).
+ AuthHeader = case MochiReq:get_header_value("authorization") of
+ undefined ->
+ "";
+ Else ->
+ [Head | Tail] = re:split(Else, "\\s", [{parts, 2}, {return, list}]),
+ case [string:to_lower(Head) | Tail] of
+ ["oauth", Rest] -> Rest;
+ _ -> ""
+ end
+ end,
+ HeaderParams = oauth_uri:params_from_header_string(AuthHeader),
+ %Realm = couch_util:get_value("realm", HeaderParams),
+ Params = proplists:delete("realm", HeaderParams) ++ MochiReq:parse_qs(),
+ ?LOG_DEBUG("OAuth Params: ~p", [Params]),
+ case couch_util:get_value("oauth_version", Params, "1.0") of
+ "1.0" ->
+ case couch_util:get_value("oauth_consumer_key", Params, undefined) of
+ undefined ->
+ case FailSilently of
+ true -> Req;
+ false -> chttpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer.">>)
+ end;
+ ConsumerKey ->
+ SigMethod = couch_util:get_value("oauth_signature_method", Params),
+ case consumer_lookup(ConsumerKey, SigMethod) of
+ none ->
+ chttpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer (key or signature method).">>);
+ Consumer ->
+ Signature = couch_util:get_value("oauth_signature", Params),
+ URL = chttpd:absolute_uri(Req, MochiReq:get(raw_path)),
+ Fun(URL, proplists:delete("oauth_signature", Params),
+ Consumer, Signature)
+ end
+ end;
+ _ ->
+ chttpd:send_error(Req, 400, <<"invalid_oauth_version">>, <<"Invalid OAuth version.">>)
+ end.
+
+consumer_lookup(Key, MethodStr) ->
+ SignatureMethod = case MethodStr of
+ "PLAINTEXT" -> plaintext;
+ "HMAC-SHA1" -> hmac_sha1;
+ %"RSA-SHA1" -> rsa_sha1;
+ _Else -> undefined
+ end,
+ case SignatureMethod of
+ undefined -> none;
+ _SupportedMethod ->
+ case couch_config:get("oauth_consumer_secrets", Key, undefined) of
+ undefined -> none;
+ Secret -> {Key, Secret, SignatureMethod}
+ end
+ end.
+
+ok(#httpd{mochi_req=MochiReq}, Body) ->
+ {ok, MochiReq:respond({200, [], Body})}.
diff --git a/src/chttpd_server.erl b/src/chttpd_server.erl
new file mode 100644
index 00000000..77fb5410
--- /dev/null
+++ b/src/chttpd_server.erl
@@ -0,0 +1,3 @@
+-module(chttpd_server).
+
+-include("chttpd.hrl"). \ No newline at end of file
diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl
new file mode 100644
index 00000000..bba05ec1
--- /dev/null
+++ b/src/chttpd_show.erl
@@ -0,0 +1,496 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_show).
+
+-export([handle_doc_show_req/2, handle_doc_update_req/2, handle_view_list_req/2,
+ handle_doc_show/5, handle_view_list/7, start_list_resp/5,
+ send_list_row/6]).
+
+-include("chttpd.hrl").
+
+-import(chttpd,
+ [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
+ start_json_response/2,send_chunk/2,send_chunked_error/2,
+ start_chunked_response/3, send_error/4]).
+
+handle_doc_show_req(#httpd{
+ method='GET',
+ path_parts=[_DbName, _Design, DesignName, _Show, ShowName, DocId]
+ }=Req, Db) ->
+ handle_doc_show(Req, DesignName, ShowName, DocId, Db);
+
+handle_doc_show_req(#httpd{
+ path_parts=[_DbName, _Design, DesignName, _Show, ShowName]
+ }=Req, Db) ->
+ handle_doc_show(Req, DesignName, ShowName, nil, Db);
+
+handle_doc_show_req(#httpd{method='GET'}=Req, _Db) ->
+ send_error(Req, 404, <<"show_error">>, <<"Invalid path.">>);
+
+handle_doc_show_req(Req, _Db) ->
+ send_method_not_allowed(Req, "GET,POST,HEAD").
+
+handle_doc_update_req(#httpd{
+ method = 'PUT',
+ path_parts=[_DbName, _Design, DesignName, _Update, UpdateName, DocId]
+ }=Req, Db) ->
+ DesignId = <<"_design/", DesignName/binary>>,
+ #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []),
+ Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
+ UpdateSrc = couch_util:get_nested_json_value({Props}, [<<"updates">>, UpdateName]),
+ Doc = try chttpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of
+ FoundDoc -> FoundDoc
+ catch
+ _ -> nil
+ end,
+ send_doc_update_response(Lang, UpdateSrc, DocId, Doc, Req, Db);
+
+handle_doc_update_req(#httpd{
+ method = 'POST',
+ path_parts=[_DbName, _Design, DesignName, _Update, UpdateName]
+ }=Req, Db) ->
+ DesignId = <<"_design/", DesignName/binary>>,
+ #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []),
+ Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
+ UpdateSrc = couch_util:get_nested_json_value({Props}, [<<"updates">>, UpdateName]),
+ send_doc_update_response(Lang, UpdateSrc, nil, nil, Req, Db);
+
+handle_doc_update_req(#httpd{
+ path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName, _DocId]
+ }=Req, _Db) ->
+ send_method_not_allowed(Req, "PUT");
+
+handle_doc_update_req(#httpd{
+ path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName]
+ }=Req, _Db) ->
+ send_method_not_allowed(Req, "POST");
+
+handle_doc_update_req(Req, _Db) ->
+ send_error(Req, 404, <<"update_error">>, <<"Invalid path.">>).
+
+
+
+
+handle_doc_show(Req, DesignName, ShowName, DocId, Db) ->
+ DesignId = <<"_design/", DesignName/binary>>,
+ #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []),
+ Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
+ ShowSrc = couch_util:get_nested_json_value({Props}, [<<"shows">>, ShowName]),
+ Doc = case DocId of
+ nil -> nil;
+ _ ->
+ try chttpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of
+ FoundDoc -> FoundDoc
+ catch
+ _ -> nil
+ end
+ end,
+ send_doc_show_response(Lang, ShowSrc, DocId, Doc, Req, Db).
+
+% view-list request with view and list from same design doc.
+handle_view_list_req(#httpd{method='GET',
+ path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db) ->
+ handle_view_list(Req, DesignName, ListName, DesignName, ViewName, Db, nil);
+
+% view-list request with view and list from different design docs.
+handle_view_list_req(#httpd{method='GET',
+ path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewDesignName, ViewName]}=Req, Db) ->
+ handle_view_list(Req, DesignName, ListName, ViewDesignName, ViewName, Db, nil);
+
+handle_view_list_req(#httpd{method='GET'}=Req, _Db) ->
+ send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>);
+
+handle_view_list_req(#httpd{method='POST',
+ path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db) ->
+ ReqBody = chttpd:body(Req),
+ {Props2} = ?JSON_DECODE(ReqBody),
+ Keys = couch_util:get_value(<<"keys">>, Props2, nil),
+ handle_view_list(Req#httpd{req_body=ReqBody}, DesignName, ListName, DesignName, ViewName, Db, Keys);
+
+handle_view_list_req(Req, _Db) ->
+ send_method_not_allowed(Req, "GET,POST,HEAD").
+
+handle_view_list(Req, ListDesignName, ListName, ViewDesignName, ViewName, Db, Keys) ->
+ ListDesignId = <<"_design/", ListDesignName/binary>>,
+ #doc{body={ListProps}} = chttpd_db:couch_doc_open(Db, ListDesignId, nil, []),
+ if
+ ViewDesignName == ListDesignName ->
+ ViewProps = ListProps,
+ ViewDesignId = ListDesignId;
+ true ->
+ ViewDesignId = <<"_design/", ViewDesignName/binary>>,
+ #doc{body={ViewProps}} = chttpd_db:couch_doc_open(Db, ViewDesignId, nil, [])
+ end,
+
+ ViewLang = couch_util:get_value(<<"language">>, ViewProps, <<"javascript">>),
+ ListSrc = couch_util:get_nested_json_value({ListProps}, [<<"lists">>, ListName]),
+ Group = couch_view_group:design_doc_to_view_group(Db, #doc{id=ViewDesignId,
+ body={ViewProps}}),
+ send_view_list_response(ViewLang, ListSrc, ViewName, ViewDesignId, Req, Db,
+ Group, Keys).
+ % send_view_list_response(ViewLang, ListSrc, ViewName, ViewDesignId, Req, Db, Keys).
+
+
+send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db, Group, Keys) ->
+ IsReduce = chttpd_view:get_reduce_type(Req),
+ ViewType = chttpd_view:extract_view_type(ViewName, Group#group.views,
+ IsReduce),
+ QueryArgs = chttpd_view:parse_view_params(Req, Keys, ViewType),
+ {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc),
+ StartListRespFun = make_map_start_resp_fun(QueryServer, Db),
+ Etag = couch_util:new_uuid(),
+ chttpd:etag_respond(Req, Etag, fun() ->
+ {ok, Total, Result} = ?COUCH:list_view(Req, Db, DesignId, ViewName,
+ Keys, QueryArgs, QueryServer),
+ finish_list(Req, QueryServer, Etag, Result, StartListRespFun, Total)
+ end).
+
+send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db, Keys) ->
+ Stale = chttpd_view:get_stale_type(Req),
+ Reduce = chttpd_view:get_reduce_type(Req),
+ case ?COUCH:get_map_view(Db, DesignId, ViewName, Stale) of
+ {ok, View, Group} ->
+ QueryArgs = chttpd_view:parse_view_params(Req, Keys, map),
+ output_map_list(Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys);
+ {not_found, _Reason} ->
+ case ?COUCH:get_reduce_view(Db, DesignId, ViewName, Stale) of
+ {ok, ReduceView, Group} ->
+ case Reduce of
+ false ->
+ QueryArgs = chttpd_view:parse_view_params(
+ Req, Keys, map_red
+ ),
+ MapView = ?COUCH:extract_map_view(ReduceView),
+ output_map_list(Req, Lang, ListSrc, MapView, Group, Db, QueryArgs, Keys);
+ _ ->
+ QueryArgs = chttpd_view:parse_view_params(
+ Req, Keys, reduce
+ ),
+ output_reduce_list(Req, Lang, ListSrc, ReduceView, Group, Db, QueryArgs, Keys)
+ end;
+ {not_found, Reason} ->
+ throw({not_found, Reason})
+ end
+ end.
+
+
+output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, nil) ->
+ #view_query_args{
+ limit = Limit,
+ direction = Dir,
+ skip = SkipCount,
+ start_key = StartKey,
+ start_docid = StartDocId
+ } = QueryArgs,
+ {ok, RowCount} = ?COUCH:get_row_count(View),
+ Start = {StartKey, StartDocId},
+ Headers = MReq:get(headers),
+ Hlist = mochiweb_headers:to_list(Headers),
+ Accept = couch_util:get_value('Accept', Hlist),
+ CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ % get the os process here
+ % pass it into the view fold with closures
+ {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc),
+
+ StartListRespFun = make_map_start_resp_fun(QueryServer, Db),
+ SendListRowFun = make_map_send_row_fun(QueryServer),
+
+ FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount,
+ #view_fold_helper_funs{
+ reduce_count = fun ?COUCH:reduce_to_count/1,
+ start_response = StartListRespFun,
+ send_row = SendListRowFun
+ }),
+ FoldAccInit = {Limit, SkipCount, undefined, [], nil},
+ {ok, FoldResult} = ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAccInit),
+ finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount)
+ end);
+
+output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys) ->
+ #view_query_args{
+ limit = Limit,
+ direction = Dir,
+ skip = SkipCount,
+ start_docid = StartDocId
+ } = QueryArgs,
+ {ok, RowCount} = ?COUCH:get_row_count(View),
+ Headers = MReq:get(headers),
+ Hlist = mochiweb_headers:to_list(Headers),
+ Accept = couch_util:get_value('Accept', Hlist),
+ CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ % get the os process here
+ % pass it into the view fold with closures
+ {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc),
+
+ StartListRespFun = make_map_start_resp_fun(QueryServer, Db),
+ SendListRowFun = make_map_send_row_fun(QueryServer),
+
+ FoldAccInit = {Limit, SkipCount, undefined, [], nil},
+ {ok, FoldResult} = lists:foldl(
+ fun(Key, {ok, FoldAcc}) ->
+ FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs#view_query_args{
+ start_key = Key,
+ end_key = Key
+ }, CurrentEtag, Db, RowCount,
+ #view_fold_helper_funs{
+ reduce_count = fun ?COUCH:reduce_to_count/1,
+ start_response = StartListRespFun,
+ send_row = SendListRowFun
+ }),
+ ?COUCH:view_fold(View, {Key, StartDocId}, Dir, FoldlFun, FoldAcc)
+ end, {ok, FoldAccInit}, Keys),
+ finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount)
+ end).
+
+make_map_start_resp_fun(QueryServer, Db) ->
+ fun(Req, Etag, TotalRows, Offset, _Acc) ->
+ Head = {[{<<"total_rows">>, TotalRows}, {<<"offset">>, Offset}]},
+ start_list_resp(QueryServer, Req, Db, Head, Etag)
+ end.
+
+make_reduce_start_resp_fun(QueryServer, _Req, Db, _CurrentEtag) ->
+ fun(Req2, Etag, _Acc) ->
+ start_list_resp(QueryServer, Req2, Db, {[]}, Etag)
+ end.
+
+start_list_resp(QueryServer, Req, Db, Head, Etag) ->
+ [<<"start">>,Chunks,JsonResp] = couch_query_servers:render_list_head(QueryServer,
+ Req, Db, Head),
+ JsonResp2 = apply_etag(JsonResp, Etag),
+ #extern_resp_args{
+ code = Code,
+ ctype = CType,
+ headers = ExtHeaders
+ } = chttpd_external:parse_external_response(JsonResp2),
+ JsonHeaders = chttpd_external:default_or_content_type(CType, ExtHeaders),
+ {ok, Resp} = start_chunked_response(Req, Code, JsonHeaders),
+ {ok, Resp, ?b2l(?l2b(Chunks))}.
+
+make_map_send_row_fun(QueryServer) ->
+ fun(Resp, Db, Row, IncludeDocs, RowFront) ->
+ send_list_row(Resp, QueryServer, Db, Row, RowFront, IncludeDocs)
+ end.
+
+make_reduce_send_row_fun(QueryServer, Db) ->
+ fun(Resp, Row, RowFront) ->
+ send_list_row(Resp, QueryServer, Db, Row, RowFront, false)
+ end.
+
+send_list_row(Resp, QueryServer, Db, Row, RowFront, IncludeDoc) ->
+ try
+ [Go,Chunks] = couch_query_servers:render_list_row(QueryServer, Db, Row, IncludeDoc),
+ Chunk = RowFront ++ ?b2l(?l2b(Chunks)),
+ send_non_empty_chunk(Resp, Chunk),
+ case Go of
+ <<"chunks">> ->
+ {ok, ""};
+ <<"end">> ->
+ {stop, stop}
+ end
+ catch
+ throw:Error ->
+ send_chunked_error(Resp, Error),
+ throw({already_sent, Resp, Error})
+ end.
+
+send_non_empty_chunk(Resp, Chunk) ->
+ case Chunk of
+ [] -> ok;
+ _ -> send_chunk(Resp, Chunk)
+ end.
+
+output_reduce_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, nil) ->
+ #view_query_args{
+ limit = Limit,
+ direction = Dir,
+ skip = SkipCount,
+ start_key = StartKey,
+ start_docid = StartDocId,
+ end_key = EndKey,
+ end_docid = EndDocId,
+ group_level = GroupLevel
+ } = QueryArgs,
+ Headers = MReq:get(headers),
+ Hlist = mochiweb_headers:to_list(Headers),
+ Accept = couch_util:get_value('Accept', Hlist),
+ CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ % get the os process here
+ % pass it into the view fold with closures
+ {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc),
+ StartListRespFun = make_reduce_start_resp_fun(QueryServer, Req, Db, CurrentEtag),
+ SendListRowFun = make_reduce_send_row_fun(QueryServer, Db),
+
+ {ok, GroupRowsFun, RespFun} = chttpd_view:make_reduce_fold_funs(Req,
+ GroupLevel, QueryArgs, CurrentEtag,
+ #reduce_fold_helper_funs{
+ start_response = StartListRespFun,
+ send_row = SendListRowFun
+ }),
+ FoldAccInit = {Limit, SkipCount, undefined, []},
+ {ok, FoldResult} = ?COUCH:view_fold_reduce(View, Dir, {StartKey, StartDocId},
+ {EndKey, EndDocId}, GroupRowsFun, RespFun,
+ FoldAccInit),
+ finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, null)
+ end);
+
+output_reduce_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys) ->
+ #view_query_args{
+ limit = Limit,
+ direction = Dir,
+ skip = SkipCount,
+ start_docid = StartDocId,
+ end_docid = EndDocId,
+ group_level = GroupLevel
+ } = QueryArgs,
+ Headers = MReq:get(headers),
+ Hlist = mochiweb_headers:to_list(Headers),
+ Accept = couch_util:get_value('Accept', Hlist),
+ CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx, Keys}),
+
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ % get the os process here
+ % pass it into the view fold with closures
+ {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc),
+ StartListRespFun = make_reduce_start_resp_fun(QueryServer, Req, Db, CurrentEtag),
+ SendListRowFun = make_reduce_send_row_fun(QueryServer, Db),
+
+ {ok, GroupRowsFun, RespFun} = chttpd_view:make_reduce_fold_funs(Req,
+ GroupLevel, QueryArgs, CurrentEtag,
+ #reduce_fold_helper_funs{
+ start_response = StartListRespFun,
+ send_row = SendListRowFun
+ }),
+ FoldAccInit = {Limit, SkipCount, undefined, []},
+ {ok, FoldResult} = lists:foldl(
+ fun(Key, {ok, FoldAcc}) ->
+ ?COUCH:view_fold_reduce(View, Dir, {Key, StartDocId},
+ {Key, EndDocId}, GroupRowsFun, RespFun, FoldAcc)
+ end, {ok, FoldAccInit}, Keys),
+ finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, null)
+ end).
+
+finish_list(Req, QueryServer, Etag, FoldResult, StartFun, TotalRows) ->
+ FoldResult2 = case FoldResult of
+ {Limit, SkipCount, Response, RowAcc} ->
+ {Limit, SkipCount, Response, RowAcc, nil};
+ Else ->
+ Else
+ end,
+ case FoldResult2 of
+ {_, _, undefined, _, _} ->
+ {ok, Resp, BeginBody} =
+ render_head_for_empty_list(StartFun, Req, Etag, TotalRows),
+ [<<"end">>, Chunks] = couch_query_servers:render_list_tail(QueryServer),
+ Chunk = BeginBody ++ ?b2l(?l2b(Chunks)),
+ send_non_empty_chunk(Resp, Chunk);
+ {_, _, Resp, stop, _} ->
+ ok;
+ {_, _, Resp, _, _} ->
+ [<<"end">>, Chunks] = couch_query_servers:render_list_tail(QueryServer),
+ send_non_empty_chunk(Resp, ?b2l(?l2b(Chunks)))
+ end,
+ couch_query_servers:stop_doc_map(QueryServer),
+ send_chunk(Resp, []).
+
+
+render_head_for_empty_list(StartListRespFun, Req, Etag, null) ->
+ StartListRespFun(Req, Etag, []); % for reduce
+render_head_for_empty_list(StartListRespFun, Req, Etag, TotalRows) ->
+ StartListRespFun(Req, Etag, TotalRows, null, []).
+
+send_doc_show_response(Lang, ShowSrc, DocId, nil, #httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Db) ->
+ % compute etag with no doc
+ Headers = MReq:get(headers),
+ Hlist = mochiweb_headers:to_list(Headers),
+ Accept = couch_util:get_value('Accept', Hlist),
+ CurrentEtag = chttpd:make_etag({Lang, ShowSrc, nil, Accept, UserCtx}),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ [<<"resp">>, ExternalResp] = couch_query_servers:render_doc_show(Lang, ShowSrc,
+ DocId, nil, Req, Db),
+ JsonResp = apply_etag(ExternalResp, CurrentEtag),
+ chttpd_external:send_external_response(Req, JsonResp)
+ end);
+
+send_doc_show_response(Lang, ShowSrc, DocId, #doc{revs=Revs}=Doc, #httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Db) ->
+ % calculate the etag
+ Headers = MReq:get(headers),
+ Hlist = mochiweb_headers:to_list(Headers),
+ Accept = couch_util:get_value('Accept', Hlist),
+ CurrentEtag = chttpd:make_etag({Lang, ShowSrc, Revs, Accept, UserCtx}),
+ % We know our etag now
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ [<<"resp">>, ExternalResp] = couch_query_servers:render_doc_show(Lang, ShowSrc,
+ DocId, Doc, Req, Db),
+ JsonResp = apply_etag(ExternalResp, CurrentEtag),
+ chttpd_external:send_external_response(Req, JsonResp)
+ end).
+
+send_doc_update_response(Lang, UpdateSrc, DocId, Doc, Req, Db) ->
+ case couch_query_servers:render_doc_update(Lang, UpdateSrc,
+ DocId, Doc, Req, Db) of
+ [<<"up">>, {NewJsonDoc}, JsonResp] ->
+ Options = case chttpd:header_value(Req, "X-Couch-Full-Commit", "false") of
+ "true" ->
+ [full_commit];
+ _ ->
+ []
+ end,
+ NewDoc = couch_doc:from_json_obj({NewJsonDoc}),
+ Code = 201,
+ % todo set location field
+ {ok, _NewRev} = ?COUCH:update_doc(Db, NewDoc, Options);
+ [<<"up">>, _Other, JsonResp] ->
+ Code = 200,
+ ok
+ end,
+ JsonResp2 = json_apply_field({<<"code">>, Code}, JsonResp),
+ chttpd_external:send_external_response(Req, JsonResp2).
+
+% Maybe this is in the proplists API
+% todo move to couch_util
+json_apply_field(H, {L}) ->
+ json_apply_field(H, L, []).
+json_apply_field({Key, NewValue}, [{Key, _OldVal} | Headers], Acc) ->
+ % drop matching keys
+ json_apply_field({Key, NewValue}, Headers, Acc);
+json_apply_field({Key, NewValue}, [{OtherKey, OtherVal} | Headers], Acc) ->
+ % something else is next, leave it alone.
+ json_apply_field({Key, NewValue}, Headers, [{OtherKey, OtherVal} | Acc]);
+json_apply_field({Key, NewValue}, [], Acc) ->
+ % end of list, add ours
+ {[{Key, NewValue}|Acc]}.
+
+apply_etag({ExternalResponse}, CurrentEtag) ->
+ % Here we embark on the delicate task of replacing or creating the
+ % headers on the JsonResponse object. We need to control the Etag and
+ % Vary headers. If the external function controls the Etag, we'd have to
+ % run it to check for a match, which sort of defeats the purpose.
+ case couch_util:get_value(<<"headers">>, ExternalResponse, nil) of
+ nil ->
+ % no JSON headers
+ % add our Etag and Vary headers to the response
+ {[{<<"headers">>, {[{<<"Etag">>, CurrentEtag}, {<<"Vary">>, <<"Accept">>}]}} | ExternalResponse]};
+ JsonHeaders ->
+ {[case Field of
+ {<<"headers">>, JsonHeaders} -> % add our headers
+ JsonHeadersEtagged = json_apply_field({<<"Etag">>, CurrentEtag}, JsonHeaders),
+ JsonHeadersVaried = json_apply_field({<<"Vary">>, <<"Accept">>}, JsonHeadersEtagged),
+ {<<"headers">>, JsonHeadersVaried};
+ _ -> % skip non-header fields
+ Field
+ end || Field <- ExternalResponse]}
+ end.
+
diff --git a/src/chttpd_stats.erl b/src/chttpd_stats.erl
new file mode 100644
index 00000000..513a80f3
--- /dev/null
+++ b/src/chttpd_stats.erl
@@ -0,0 +1,62 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_stats).
+-include("chttpd.hrl").
+-include("../../couch/src/couch_stats.hrl").
+
+-export([handle_stats_req/1]).
+-import(chttpd,
+ [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
+ start_json_response/2,send_chunk/2,end_json_response/1,
+ start_chunked_response/3, send_error/4]).
+
+-define(b2a(V), list_to_atom(binary_to_list(V))).
+
+-record(stats_query_args, {
+ range='0',
+ flush=false
+}).
+
+handle_stats_req(#httpd{method='GET', path_parts=[_]}=Req) ->
+ send_json(Req, couch_stats_aggregator:all());
+
+handle_stats_req(#httpd{method='GET', path_parts=[_Stats, Module, Key]}=Req) ->
+ #stats_query_args{
+ range=Range,
+ flush=Flush
+ } = parse_stats_query(Req),
+
+ case Flush of
+ true ->
+ couch_stats_aggregator:time_passed();
+ _ -> ok
+ end,
+
+ Stats = couch_stats_aggregator:get_json({?b2a(Module), ?b2a(Key)}, Range),
+ Response = {[{Module, {[{Key, Stats}]}}]},
+ send_json(Req, Response);
+
+handle_stats_req(Req) ->
+ send_method_not_allowed(Req, "GET").
+
+parse_stats_query(Req) ->
+ lists:foldl(fun({Key,Value}, Args) ->
+ case {Key, Value} of
+ {"range", Range} ->
+ Args#stats_query_args{range=list_to_atom(Range)};
+ {"flush", "true"} ->
+ Args#stats_query_args{flush=true};
+ _Else -> % unknown key value pair, ignore.
+ Args
+ end
+ end, #stats_query_args{}, chttpd:qs(Req)).
diff --git a/src/chttpd_sup.erl b/src/chttpd_sup.erl
new file mode 100644
index 00000000..c710ec37
--- /dev/null
+++ b/src/chttpd_sup.erl
@@ -0,0 +1,15 @@
+-module(chttpd_sup).
+-behaviour(supervisor).
+-export([init/1]).
+
+-export([start_link/1]).
+
+-include("chttpd.hrl").
+
+start_link(Args) ->
+ supervisor:start_link({local,?MODULE}, ?MODULE, Args).
+
+init([]) ->
+ Mod = chttpd,
+ Spec = {Mod, {Mod,start_link,[]}, permanent, 100, worker, [Mod]},
+ {ok, {{one_for_one, 3, 10}, [Spec]}}.
diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl
new file mode 100644
index 00000000..c9fb1c9f
--- /dev/null
+++ b/src/chttpd_view.erl
@@ -0,0 +1,738 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_view).
+-include("chttpd.hrl").
+
+-export([handle_view_req/2,handle_temp_view_req/2,handle_db_view_req/2]).
+
+-export([get_stale_type/1, get_reduce_type/1, parse_view_params/3]).
+-export([make_view_fold_fun/6, finish_view_fold/3, view_row_obj/3]).
+-export([view_group_etag/2, view_group_etag/3, make_reduce_fold_funs/5]).
+-export([design_doc_view/5, parse_bool_param/1, extract_view_type/3]).
+
+-import(chttpd,
+ [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,send_chunk/2,
+ start_json_response/2, start_json_response/3, end_json_response/1,
+ send_chunked_error/2]).
+
+design_doc_view(Req, Db, Id, ViewName, Keys) ->
+ DesignId = <<"_design/", Id/binary>>,
+ {_ViewGroup, QueryArgs} = case ?COUCH:open_doc(Db, DesignId, []) of
+ {ok, Doc} ->
+ Group = couch_view_group:design_doc_to_view_group(Db, Doc),
+ IsReduce = get_reduce_type(Req),
+ ViewType = extract_view_type(ViewName, Group#group.views, IsReduce),
+ {Group, parse_view_params(Req, Keys, ViewType)};
+ {not_found, Reason} ->
+ throw({not_found, Reason})
+ end,
+ % this etag is wrong as current_seq == 0 right now, so no caching allowed
+ % Etag = view_group_etag(ViewGroup, Db, Keys),
+ Etag = couch_util:new_uuid(),
+ couch_stats_collector:increment({httpd, view_reads}),
+ chttpd:etag_respond(Req, Etag, fun() ->
+ {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]),
+ case ?COUCH:design_view(Resp, Db, DesignId, ViewName, Keys, QueryArgs) of
+ {ok, Total, Result} ->
+ send_chunk(Resp, final_chunk(Total, Result)),
+ end_json_response(Resp);
+ {ok, Resp} ->
+ {ok, Resp}
+ end
+ end).
+
+extract_view_type(_ViewName, [], _IsReduce) ->
+ throw({not_found, missing_named_view});
+extract_view_type(ViewName, [View|Rest], IsReduce) ->
+ case lists:member(ViewName, [Name || {Name, _} <- View#view.reduce_funs]) of
+ true ->
+ if IsReduce -> reduce; true -> red_map end;
+ false ->
+ case lists:member(ViewName, View#view.map_names) of
+ true -> map;
+ false -> extract_view_type(ViewName, Rest, IsReduce)
+ end
+ end.
+
+final_chunk(Total, {_, _, undefined, _, nil}) ->
+ ?JSON_ENCODE({[{total_rows, Total}, {offset, Total}, {rows, []}]});
+final_chunk(Total, {_, _, undefined, _, Offset}) ->
+ ?JSON_ENCODE({[{total_rows, Total}, {offset, Offset}, {rows, []}]});
+final_chunk(_, {_, _, _, _, _}) ->
+ "\r\n]}";
+final_chunk(_, Error) ->
+ throw(Error).
+
+handle_view_req(#httpd{method='GET',
+ path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) ->
+ design_doc_view(Req, Db, DName, ViewName, nil);
+
+handle_view_req(#httpd{method='POST',
+ path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) ->
+ {Fields} = chttpd:json_body_obj(Req),
+ case couch_util:get_value(<<"keys">>, Fields, nil) of
+ nil ->
+ Fmt = "POST to view ~p/~p in database ~p with no keys member.",
+ ?LOG_DEBUG(Fmt, [DName, ViewName, Db]),
+ design_doc_view(Req, Db, DName, ViewName, nil);
+ Keys when is_list(Keys) ->
+ design_doc_view(Req, Db, DName, ViewName, Keys);
+ _ ->
+ throw({bad_request, "`keys` member must be a array."})
+ end;
+
+handle_view_req(Req, _Db) ->
+ send_method_not_allowed(Req, "GET,POST,HEAD").
+
+handle_db_view_req(#httpd{method='GET',
+ path_parts=[_Db, _View, DName, ViewName]}=Req, Db) ->
+ QueryArgs = chttpd_view:parse_view_params(Req, nil, nil),
+ #view_query_args{
+ list = ListName
+ } = QueryArgs,
+ ?LOG_DEBUG("ici ~p", [ListName]),
+ case ListName of
+ nil -> chttpd_view:design_doc_view(Req, Db, DName, ViewName, nil);
+ _ ->
+ chttpd_show:handle_view_list(Req, DName, ListName, DName, ViewName, Db, nil)
+ end;
+
+handle_db_view_req(#httpd{method='POST',
+ path_parts=[_Db, _View, DName, ViewName]}=Req, Db) ->
+ QueryArgs = chttpd_view:parse_view_params(Req, nil, nil),
+ #view_query_args{
+ list = ListName
+ } = QueryArgs,
+ case ListName of
+ nil ->
+ {Fields} = chttpd:json_body_obj(Req),
+ case couch_util:get_value(<<"keys">>, Fields, nil) of
+ nil ->
+ Fmt = "POST to view ~p/~p in database ~p with no keys member.",
+ ?LOG_DEBUG(Fmt, [DName, ViewName, Db]),
+ chttpd_view:design_doc_view(Req, Db, DName, ViewName, nil);
+ Keys when is_list(Keys) ->
+ chttpd_view:design_doc_view(Req, Db, DName, ViewName, Keys);
+ _ ->
+ throw({bad_request, "`keys` member must be a array."})
+ end;
+ _ ->
+ ReqBody = chttpd:body(Req),
+ {Props2} = ?JSON_DECODE(ReqBody),
+ Keys = couch_util:get_value(<<"keys">>, Props2, nil),
+ chttpd_show:handle_view_list(Req#httpd{req_body=ReqBody},
+ DName, ListName, DName, ViewName, Db, Keys)
+ end;
+
+handle_db_view_req(Req, _Db) ->
+ send_method_not_allowed(Req, "GET,POST,HEAD").
+
+handle_temp_view_req(#httpd{method='POST'}=Req, Db) ->
+ throw(not_implemented),
+ couch_stats_collector:increment({httpd, temporary_view_reads}),
+ {Props} = chttpd:json_body_obj(Req),
+ Language = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
+ {DesignOptions} = couch_util:get_value(<<"options">>, Props, {[]}),
+ MapSrc = couch_util:get_value(<<"map">>, Props),
+ Keys = couch_util:get_value(<<"keys">>, Props, nil),
+ case couch_util:get_value(<<"reduce">>, Props, null) of
+ null ->
+ QueryArgs = parse_view_params(Req, Keys, map),
+ {ok, View, Group} = ?COUCH:get_temp_map_view(Db, Language,
+ DesignOptions, MapSrc),
+ output_map_view(Req, View, Group, Db, QueryArgs, Keys);
+ RedSrc ->
+ QueryArgs = parse_view_params(Req, Keys, reduce),
+ {ok, View, Group} = ?COUCH:get_temp_reduce_view(Db, Language,
+ DesignOptions, MapSrc, RedSrc),
+ output_reduce_view(Req, Db, View, Group, QueryArgs, Keys)
+ end;
+
+handle_temp_view_req(Req, _Db) ->
+ send_method_not_allowed(Req, "POST").
+
+output_map_view(Req, View, Group, Db, QueryArgs, nil) ->
+ #view_query_args{
+ limit = Limit,
+ direction = Dir,
+ skip = SkipCount,
+ start_key = StartKey,
+ start_docid = StartDocId
+ } = QueryArgs,
+ CurrentEtag = view_group_etag(Group, Db),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ {ok, RowCount} = ?COUCH:get_row_count(View),
+ Start = {StartKey, StartDocId},
+ FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount, #view_fold_helper_funs{reduce_count=fun ?COUCH:reduce_to_count/1}),
+ FoldAccInit = {Limit, SkipCount, undefined, [], nil},
+ FoldResult = ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAccInit),
+ finish_view_fold(Req, RowCount, FoldResult)
+ end);
+
+output_map_view(Req, View, Group, Db, QueryArgs, Keys) ->
+ #view_query_args{
+ limit = Limit,
+ direction = Dir,
+ skip = SkipCount,
+ start_docid = StartDocId
+ } = QueryArgs,
+ CurrentEtag = view_group_etag(Group, Db, Keys),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ {ok, RowCount} = ?COUCH:get_row_count(View),
+ FoldAccInit = {Limit, SkipCount, undefined, [], nil},
+ FoldResult = lists:foldl(
+ fun(Key, {ok, FoldAcc}) ->
+ Start = {Key, StartDocId},
+ FoldlFun = make_view_fold_fun(Req,
+ QueryArgs#view_query_args{
+ start_key = Key,
+ end_key = Key
+ }, CurrentEtag, Db, RowCount,
+ #view_fold_helper_funs{
+ reduce_count = fun ?COUCH:reduce_to_count/1
+ }),
+ ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAcc)
+ end, {ok, FoldAccInit}, Keys),
+ finish_view_fold(Req, RowCount, FoldResult)
+ end).
+
+output_reduce_view(Req, Db, View, Group, QueryArgs, nil) ->
+ #view_query_args{
+ start_key = StartKey,
+ end_key = EndKey,
+ limit = Limit,
+ skip = Skip,
+ direction = Dir,
+ start_docid = StartDocId,
+ end_docid = EndDocId,
+ group_level = GroupLevel
+ } = QueryArgs,
+ CurrentEtag = view_group_etag(Group, Db),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}),
+ FoldAccInit = {Limit, Skip, undefined, []},
+ {ok, {_, _, Resp, _}} = ?COUCH:view_fold_reduce(View, Dir, {StartKey, StartDocId},
+ {EndKey, EndDocId}, GroupRowsFun, RespFun, FoldAccInit),
+ finish_reduce_fold(Req, Resp)
+ end);
+
+output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) ->
+ #view_query_args{
+ limit = Limit,
+ skip = Skip,
+ direction = Dir,
+ start_docid = StartDocId,
+ end_docid = EndDocId,
+ group_level = GroupLevel
+ } = QueryArgs,
+ CurrentEtag = view_group_etag(Group, Db),
+ chttpd:etag_respond(Req, CurrentEtag, fun() ->
+ {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}),
+ {Resp, _RedAcc3} = lists:foldl(
+ fun(Key, {Resp, RedAcc}) ->
+ % run the reduce once for each key in keys, with limit etc reapplied for each key
+ FoldAccInit = {Limit, Skip, Resp, RedAcc},
+ {_, {_, _, Resp2, RedAcc2}} = ?COUCH:view_fold_reduce(View, Dir, {Key, StartDocId},
+ {Key, EndDocId}, GroupRowsFun, RespFun, FoldAccInit),
+ % Switch to comma
+ {Resp2, RedAcc2}
+ end,
+ {undefined, []}, Keys), % Start with no comma
+ finish_reduce_fold(Req, Resp)
+ end).
+
+reverse_key_default(nil) -> {};
+reverse_key_default({}) -> nil;
+reverse_key_default(Key) -> Key.
+
+get_stale_type(Req) ->
+ list_to_atom(chttpd:qs_value(Req, "stale", "nil")).
+
+get_reduce_type(Req) ->
+ list_to_atom(chttpd:qs_value(Req, "reduce", "true")).
+
+parse_view_params(Req, Keys, ViewType) ->
+ QueryList = chttpd:qs(Req),
+ QueryParams =
+ lists:foldl(fun({K, V}, Acc) ->
+ parse_view_param(K, V) ++ Acc
+ end, [], QueryList),
+ IsMultiGet = case Keys of
+ nil -> false;
+ _ -> true
+ end,
+ Args = #view_query_args{
+ view_type=ViewType,
+ multi_get=IsMultiGet
+ },
+ QueryArgs = lists:foldl(fun({K, V}, Args2) ->
+ validate_view_query(K, V, Args2)
+ end, Args, lists:reverse(QueryParams)), % Reverse to match QS order.
+
+ GroupLevel = QueryArgs#view_query_args.group_level,
+ case {ViewType, GroupLevel, IsMultiGet} of
+ {reduce, exact, true} ->
+ QueryArgs;
+ {reduce, _, false} ->
+ QueryArgs;
+ {reduce, _, _} ->
+ Msg = <<"Multi-key fetchs for reduce "
+ "view must include `group=true`">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ QueryArgs
+ end,
+ QueryArgs.
+
+parse_view_param("", _) ->
+ [];
+parse_view_param("key", Value) ->
+ JsonKey = ?JSON_DECODE(Value),
+ [{start_key, JsonKey}, {end_key, JsonKey}];
+parse_view_param("startkey_docid", Value) ->
+ [{start_docid, ?l2b(Value)}];
+parse_view_param("endkey_docid", Value) ->
+ [{end_docid, ?l2b(Value)}];
+parse_view_param("startkey", Value) ->
+ [{start_key, ?JSON_DECODE(Value)}];
+parse_view_param("endkey", Value) ->
+ [{end_key, ?JSON_DECODE(Value)}];
+parse_view_param("limit", Value) ->
+ [{limit, parse_positive_int_param(Value)}];
+parse_view_param("count", _Value) ->
+ throw({query_parse_error, <<"Query parameter 'count' is now 'limit'.">>});
+parse_view_param("stale", "ok") ->
+ [{stale, ok}];
+parse_view_param("stale", _Value) ->
+ throw({query_parse_error, <<"stale only available as stale=ok">>});
+parse_view_param("update", _Value) ->
+ throw({query_parse_error, <<"update=false is now stale=ok">>});
+parse_view_param("descending", Value) ->
+ [{descending, parse_bool_param(Value)}];
+parse_view_param("skip", Value) ->
+ [{skip, parse_int_param(Value)}];
+parse_view_param("group", Value) ->
+ case parse_bool_param(Value) of
+ true -> [{group_level, exact}];
+ false -> [{group_level, 0}]
+ end;
+parse_view_param("group_level", Value) ->
+ [{group_level, parse_positive_int_param(Value)}];
+parse_view_param("inclusive_end", Value) ->
+ [{inclusive_end, parse_bool_param(Value)}];
+parse_view_param("reduce", Value) ->
+ [{reduce, parse_bool_param(Value)}];
+parse_view_param("include_docs", Value) ->
+ [{include_docs, parse_bool_param(Value)}];
+parse_view_param("list", Value) ->
+ [{list, ?l2b(Value)}];
+parse_view_param("callback", _) ->
+ []; % Verified in the JSON response functions
+parse_view_param("show_total_rows", Value) ->
+ [{show_total_rows, parse_bool_param(Value)}];
+parse_view_param(Key, Value) ->
+ [{extra, {Key, Value}}].
+
+validate_view_query(start_key, Value, Args) ->
+ case Args#view_query_args.multi_get of
+ true ->
+ Msg = <<"Query parameter `start_key` is "
+ "not compatiible with multi-get">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args#view_query_args{start_key=Value}
+ end;
+validate_view_query(start_docid, Value, Args) ->
+ Args#view_query_args{start_docid=Value};
+validate_view_query(end_key, Value, Args) ->
+ case Args#view_query_args.multi_get of
+ true->
+ Msg = <<"Query paramter `end_key` is "
+ "not compatibile with multi-get">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args#view_query_args{end_key=Value}
+ end;
+validate_view_query(end_docid, Value, Args) ->
+ Args#view_query_args{end_docid=Value};
+validate_view_query(limit, Value, Args) ->
+ Args#view_query_args{limit=Value};
+validate_view_query(list, Value, Args) ->
+ Args#view_query_args{list=Value};
+validate_view_query(stale, Value, Args) ->
+ Args#view_query_args{stale=Value};
+validate_view_query(descending, true, Args) ->
+ case Args#view_query_args.direction of
+ rev -> Args; % Already reversed
+ fwd ->
+ Args#view_query_args{
+ direction = rev,
+ start_key =
+ reverse_key_default(Args#view_query_args.start_key),
+ start_docid =
+ reverse_key_default(Args#view_query_args.start_docid),
+ end_key =
+ reverse_key_default(Args#view_query_args.end_key),
+ end_docid =
+ reverse_key_default(Args#view_query_args.end_docid)
+ }
+ end;
+validate_view_query(descending, false, Args) ->
+ Args; % Ignore default condition
+validate_view_query(skip, Value, Args) ->
+ Args#view_query_args{skip=Value};
+validate_view_query(group_level, Value, Args) ->
+ case Args#view_query_args.view_type of
+ reduce ->
+ Args#view_query_args{group_level=Value};
+ _ ->
+ Msg = <<"Invalid URL parameter 'group' or "
+ " 'group_level' for non-reduce view.">>,
+ throw({query_parse_error, Msg})
+ end;
+validate_view_query(inclusive_end, Value, Args) ->
+ Args#view_query_args{inclusive_end=Value};
+validate_view_query(reduce, _, Args) ->
+ case Args#view_query_args.view_type of
+ map ->
+ Msg = <<"Invalid URL parameter `reduce` for map view.">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args
+ end;
+validate_view_query(include_docs, true, Args) ->
+ case Args#view_query_args.view_type of
+ reduce ->
+ Msg = <<"Query paramter `include_docs` "
+ "is invalid for reduce views.">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args#view_query_args{include_docs=true}
+ end;
+validate_view_query(include_docs, _Value, Args) ->
+ Args;
+validate_view_query(show_total_rows, false, Args) ->
+ Args#view_query_args{show_total_rows=false};
+validate_view_query(show_total_rows, _Value, Args) ->
+ Args;
+validate_view_query(extra, _Value, Args) ->
+ Args.
+
+make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) ->
+ #view_query_args{
+ end_key = EndKey,
+ end_docid = EndDocId,
+ inclusive_end = InclusiveEnd,
+ direction = Dir
+ } = QueryArgs,
+
+ #view_fold_helper_funs{
+ passed_end = PassedEndFun,
+ start_response = StartRespFun,
+ send_row = SendRowFun,
+ reduce_count = ReduceCountFun
+ } = apply_default_helper_funs(HelperFuns,
+ {Dir, EndKey, EndDocId, InclusiveEnd}),
+
+ #view_query_args{
+ include_docs = IncludeDocs
+ } = QueryArgs,
+
+ fun({{Key, DocId}, Value}, OffsetReds, {AccLimit, AccSkip, Resp, RowFunAcc,
+ OffsetAcc}) ->
+ PassedEnd = PassedEndFun(Key, DocId),
+ case {PassedEnd, AccLimit, AccSkip, Resp} of
+ {true, _, _, _} ->
+ % The stop key has been passed, stop looping.
+ % We may need offset so calcluate it here.
+ % Checking Resp is an optimization that tells
+ % us its already been calculated (and sent).
+ NewOffset = case Resp of
+ undefined -> ReduceCountFun(OffsetReds);
+ _ -> nil
+ end,
+ {stop, {AccLimit, AccSkip, Resp, RowFunAcc, NewOffset}};
+ {_, 0, _, _} ->
+ % we've done "limit" rows, stop foldling
+ {stop, {0, 0, Resp, RowFunAcc, OffsetAcc}};
+ {_, _, AccSkip, _} when AccSkip > 0 ->
+ % just keep skipping
+ {ok, {AccLimit, AccSkip - 1, Resp, RowFunAcc, OffsetAcc}};
+ {_, _, _, undefined} ->
+ % rendering the first row, first we start the response
+ Offset = ReduceCountFun(OffsetReds),
+ {ok, Resp2, RowFunAcc0} = StartRespFun(Req, Etag,
+ TotalViewCount, Offset, RowFunAcc),
+ {Go, RowFunAcc2} = SendRowFun(Resp2, Db, {{Key, DocId}, Value},
+ IncludeDocs, RowFunAcc0),
+ {Go, {AccLimit - 1, 0, Resp2, RowFunAcc2, Offset}};
+ {_, AccLimit, _, Resp} when (AccLimit > 0) ->
+ % rendering all other rows
+ {Go, RowFunAcc2} = SendRowFun(Resp, Db, {{Key, DocId}, Value},
+ IncludeDocs, RowFunAcc),
+ {Go, {AccLimit - 1, 0, Resp, RowFunAcc2, OffsetAcc}}
+ end
+ end.
+
+make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) ->
+ #reduce_fold_helper_funs{
+ start_response = StartRespFun,
+ send_row = SendRowFun
+ } = apply_default_helper_funs(HelperFuns),
+
+ GroupRowsFun =
+ fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 ->
+ true;
+ ({Key1,_}, {Key2,_})
+ when is_integer(GroupLevel) and is_list(Key1) and is_list(Key2) ->
+ lists:sublist(Key1, GroupLevel) == lists:sublist(Key2, GroupLevel);
+ ({Key1,_}, {Key2,_}) ->
+ Key1 == Key2
+ end,
+
+ RespFun = fun
+ (_Key, _Red, {AccLimit, AccSkip, Resp, RowAcc}) when AccSkip > 0 ->
+ % keep skipping
+ {ok, {AccLimit, AccSkip - 1, Resp, RowAcc}};
+ (_Key, _Red, {0, _AccSkip, Resp, RowAcc}) ->
+ % we've exhausted limit rows, stop
+ {stop, {0, _AccSkip, Resp, RowAcc}};
+
+ (_Key, Red, {AccLimit, 0, undefined, RowAcc0}) when GroupLevel == 0 ->
+ % we haven't started responding yet and group=false
+ {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0),
+ {Go, RowAcc2} = SendRowFun(Resp2, {null, Red}, RowAcc),
+ {Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
+ (_Key, Red, {AccLimit, 0, Resp, RowAcc}) when GroupLevel == 0 ->
+ % group=false but we've already started the response
+ {Go, RowAcc2} = SendRowFun(Resp, {null, Red}, RowAcc),
+ {Go, {AccLimit - 1, 0, Resp, RowAcc2}};
+
+ (Key, Red, {AccLimit, 0, undefined, RowAcc0})
+ when is_integer(GroupLevel), is_list(Key) ->
+ % group_level and we haven't responded yet
+ {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0),
+ {Go, RowAcc2} = SendRowFun(Resp2, {lists:sublist(Key, GroupLevel), Red}, RowAcc),
+ {Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
+ (Key, Red, {AccLimit, 0, Resp, RowAcc})
+ when is_integer(GroupLevel), is_list(Key) ->
+ % group_level and we've already started the response
+ {Go, RowAcc2} = SendRowFun(Resp, {lists:sublist(Key, GroupLevel), Red}, RowAcc),
+ {Go, {AccLimit - 1, 0, Resp, RowAcc2}};
+
+ (Key, Red, {AccLimit, 0, undefined, RowAcc0}) ->
+ % group=true and we haven't responded yet
+ {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0),
+ {Go, RowAcc2} = SendRowFun(Resp2, {Key, Red}, RowAcc),
+ {Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
+ (Key, Red, {AccLimit, 0, Resp, RowAcc}) ->
+ % group=true and we've already started the response
+ {Go, RowAcc2} = SendRowFun(Resp, {Key, Red}, RowAcc),
+ {Go, {AccLimit - 1, 0, Resp, RowAcc2}}
+ end,
+ {ok, GroupRowsFun, RespFun}.
+
+apply_default_helper_funs(#view_fold_helper_funs{
+ passed_end = PassedEnd,
+ start_response = StartResp,
+ send_row = SendRow
+}=Helpers, {Dir, EndKey, EndDocId, InclusiveEnd}) ->
+ PassedEnd2 = case PassedEnd of
+ undefined -> make_passed_end_fun(Dir, EndKey, EndDocId, InclusiveEnd);
+ _ -> PassedEnd
+ end,
+
+ StartResp2 = case StartResp of
+ undefined -> fun json_view_start_resp/5;
+ _ -> StartResp
+ end,
+
+ SendRow2 = case SendRow of
+ undefined -> fun send_json_view_row/5;
+ _ -> SendRow
+ end,
+
+ Helpers#view_fold_helper_funs{
+ passed_end = PassedEnd2,
+ start_response = StartResp2,
+ send_row = SendRow2
+ }.
+
+apply_default_helper_funs(#reduce_fold_helper_funs{
+ start_response = StartResp,
+ send_row = SendRow
+}=Helpers) ->
+ StartResp2 = case StartResp of
+ undefined -> fun json_reduce_start_resp/3;
+ _ -> StartResp
+ end,
+
+ SendRow2 = case SendRow of
+ undefined -> fun send_json_reduce_row/3;
+ _ -> SendRow
+ end,
+
+ Helpers#reduce_fold_helper_funs{
+ start_response = StartResp2,
+ send_row = SendRow2
+ }.
+
+make_passed_end_fun(fwd, EndKey, EndDocId, InclusiveEnd) ->
+ case InclusiveEnd of
+ true ->
+ fun(ViewKey, ViewId) ->
+ couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId])
+ end;
+ false ->
+ fun
+ (ViewKey, _ViewId) when ViewKey == EndKey ->
+ true;
+ (ViewKey, ViewId) ->
+ couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId])
+ end
+ end;
+
+make_passed_end_fun(rev, EndKey, EndDocId, InclusiveEnd) ->
+ case InclusiveEnd of
+ true ->
+ fun(ViewKey, ViewId) ->
+ couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId])
+ end;
+ false->
+ fun
+ (ViewKey, _ViewId) when ViewKey == EndKey ->
+ true;
+ (ViewKey, ViewId) ->
+ couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId])
+ end
+ end.
+
+json_view_start_resp(Req, Etag, TotalViewCount, Offset, _Acc) ->
+ {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]),
+ BeginBody = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
+ [TotalViewCount, Offset]),
+ {ok, Resp, BeginBody}.
+
+send_json_view_row(Resp, Db, {{Key, DocId}, Value}, IncludeDocs, RowFront) ->
+ JsonObj = view_row_obj(Db, {{Key, DocId}, Value}, IncludeDocs),
+ send_chunk(Resp, RowFront ++ ?JSON_ENCODE(JsonObj)),
+ {ok, ",\r\n"}.
+
+json_reduce_start_resp(Req, Etag, _Acc0) ->
+ {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]),
+ {ok, Resp, "{\"rows\":[\r\n"}.
+
+send_json_reduce_row(Resp, {Key, Value}, RowFront) ->
+ send_chunk(Resp, RowFront ++ ?JSON_ENCODE({[{key, Key}, {value, Value}]})),
+ {ok, ",\r\n"}.
+
+view_group_etag(Group, Db) ->
+ view_group_etag(Group, Db, nil).
+
+view_group_etag(#group{sig=Sig,current_seq=CurrentSeq}, _Db, Extra) ->
+ % ?LOG_ERROR("Group ~p",[Group]),
+ % This is not as granular as it could be.
+ % If there are updates to the db that do not effect the view index,
+ % they will change the Etag. For more granular Etags we'd need to keep
+ % track of the last Db seq that caused an index change.
+ chttpd:make_etag({Sig, CurrentSeq, Extra}).
+
+% the view row has an error
+view_row_obj(_Db, {{Key, error}, Value}, _IncludeDocs) ->
+ {[{key, Key}, {error, Value}]};
+% include docs in the view output
+view_row_obj(Db, {{Key, DocId}, {Props}}, true) ->
+ Rev = case couch_util:get_value(<<"_rev">>, Props) of
+ undefined ->
+ nil;
+ Rev0 ->
+ couch_doc:parse_rev(Rev0)
+ end,
+ view_row_with_doc(Db, {{Key, DocId}, {Props}}, Rev);
+view_row_obj(Db, {{Key, DocId}, Value}, true) ->
+ view_row_with_doc(Db, {{Key, DocId}, Value}, nil);
+% the normal case for rendering a view row
+view_row_obj(_Db, {{Key, DocId}, Value}, _IncludeDocs) ->
+ {[{id, DocId}, {key, Key}, {value, Value}]}.
+
+view_row_with_doc(Db, {{Key, DocId}, Value}, Rev) ->
+ ?LOG_DEBUG("Include Doc: ~p ~p", [DocId, Rev]),
+ case (catch chttpd_db:couch_doc_open(Db, DocId, Rev, [])) of
+ {{not_found, missing}, _RevId} ->
+ {[{id, DocId}, {key, Key}, {value, Value}, {error, missing}]};
+ {not_found, missing} ->
+ {[{id, DocId}, {key, Key}, {value, Value}, {error, missing}]};
+ {not_found, deleted} ->
+ {[{id, DocId}, {key, Key}, {value, Value}]};
+ Doc ->
+ JsonDoc = couch_doc:to_json_obj(Doc, []),
+ {[{id, DocId}, {key, Key}, {value, Value}, {doc, JsonDoc}]}
+ end.
+
+finish_view_fold(Req, TotalRows, FoldResult) ->
+ case FoldResult of
+ {ok, {_, _, undefined, _, Offset}} ->
+ % nothing found in the view, nothing has been returned
+ % send empty view
+ NewOffset = case Offset of
+ nil -> TotalRows;
+ _ -> Offset
+ end,
+ send_json(Req, 200, {[
+ {total_rows, TotalRows},
+ {offset, NewOffset},
+ {rows, []}
+ ]});
+ {ok, {_, _, Resp, _, _}} ->
+ % end the view
+ send_chunk(Resp, "\r\n]}"),
+ end_json_response(Resp);
+ Error ->
+ throw(Error)
+ end.
+
+finish_reduce_fold(Req, Resp) ->
+ case Resp of
+ undefined ->
+ send_json(Req, 200, {[
+ {rows, []}
+ ]});
+ Resp ->
+ send_chunk(Resp, "\r\n]}"),
+ end_json_response(Resp)
+ end.
+
+parse_bool_param("true") -> true;
+parse_bool_param("false") -> false;
+parse_bool_param(Val) ->
+ Msg = io_lib:format("Invalid value for boolean paramter: ~p", [Val]),
+ throw({query_parse_error, ?l2b(Msg)}).
+
+parse_int_param(Val) ->
+ case (catch list_to_integer(Val)) of
+ IntVal when is_integer(IntVal) ->
+ IntVal;
+ _ ->
+ Msg = io_lib:format("Invalid value for integer parameter: ~p", [Val]),
+ throw({query_parse_error, ?l2b(Msg)})
+ end.
+
+parse_positive_int_param(Val) ->
+ case parse_int_param(Val) of
+ IntVal when IntVal >= 0 ->
+ IntVal;
+ _ ->
+ Fmt = "Invalid value for positive integer parameter: ~p",
+ Msg = io_lib:format(Fmt, [Val]),
+ throw({query_parse_error, ?l2b(Msg)})
+ end.