From 8b187422a888a94b8f1d48eee52f100510a83141 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 11:41:57 -0400 Subject: new app for dedicated cluster HTTP interface --- ebin/chttpd.app | 23 ++ include/chttpd.hrl | 7 + src/chttpd.erl | 602 +++++++++++++++++++++++++++++++ src/chttpd_app.erl | 11 + src/chttpd_auth.erl | 481 +++++++++++++++++++++++++ src/chttpd_db.erl | 939 ++++++++++++++++++++++++++++++++++++++++++++++++ src/chttpd_external.erl | 166 +++++++++ src/chttpd_misc.erl | 232 ++++++++++++ src/chttpd_oauth.erl | 168 +++++++++ src/chttpd_server.erl | 3 + src/chttpd_show.erl | 496 +++++++++++++++++++++++++ src/chttpd_stats.erl | 62 ++++ src/chttpd_sup.erl | 15 + src/chttpd_view.erl | 738 +++++++++++++++++++++++++++++++++++++ 14 files changed, 3943 insertions(+) create mode 100644 ebin/chttpd.app create mode 100644 include/chttpd.hrl create mode 100644 src/chttpd.erl create mode 100644 src/chttpd_app.erl create mode 100644 src/chttpd_auth.erl create mode 100644 src/chttpd_db.erl create mode 100644 src/chttpd_external.erl create mode 100644 src/chttpd_misc.erl create mode 100644 src/chttpd_oauth.erl create mode 100644 src/chttpd_server.erl create mode 100644 src/chttpd_show.erl create mode 100644 src/chttpd_stats.erl create mode 100644 src/chttpd_sup.erl create mode 100644 src/chttpd_view.erl diff --git a/ebin/chttpd.app b/ebin/chttpd.app new file mode 100644 index 00000000..445b0969 --- /dev/null +++ b/ebin/chttpd.app @@ -0,0 +1,23 @@ +{application, chttpd, [ + {description, "HTTP interface for CouchDB cluster"}, + {vsn, "1.0"}, + {modules, [ + chttpd, + chttpd_app, + chttpd_auth, + chttpd_db, + chttpd_external, + chttpd_misc, + chttpd_oauth, + chttpd_server, + chttpd_show, + chttpd_stats, + chttpd_sup, + chttpd_view, + cloudant_auth + ]}, + {registered, []}, + {applications, [kernel, stdlib, couch, fabric]}, + {mod, {chttpd_app,[]}}, + {start_phases, []} +]}. \ No newline at end of file diff --git a/include/chttpd.hrl b/include/chttpd.hrl new file mode 100644 index 00000000..49c0f74a --- /dev/null +++ b/include/chttpd.hrl @@ -0,0 +1,7 @@ +-define(FABRIC, true). + +-ifndef(COUCH). +-include("../../couch/src/couch_db.hrl"). +-endif. + +-include_lib("eunit/include/eunit.hrl"). diff --git a/src/chttpd.erl b/src/chttpd.erl new file mode 100644 index 00000000..a062f89a --- /dev/null +++ b/src/chttpd.erl @@ -0,0 +1,602 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd). +-include("chttpd.hrl"). + +-export([start_link/0, stop/0, handle_request/5, config_change/2]). + +-export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,path/1,absolute_uri/2,body_length/1]). +-export([verify_is_server_admin/1,unquote/1,quote/1,recv/2,recv_chunked/4,error_info/1]). +-export([parse_form/1,json_body/1,json_body_obj/1,body/1,doc_etag/1, make_etag/1, etag_respond/3]). +-export([primary_header_value/2,partition/1,serve_file/3, server_header/0]). +-export([start_chunked_response/3,send_chunk/2]). +-export([start_response_length/4, send/2]). +-export([start_json_response/2, start_json_response/3, end_json_response/1]). +-export([send_response/4,send_method_not_allowed/2,send_error/4, send_redirect/2,send_chunked_error/2]). +-export([send_json/2,send_json/3,send_json/4]). + +start_link() -> + BindAddress = couch_config:get("chttpd", "bind_address", any), + Port = couch_config:get("chttpd", "port", "5984"), + Backlog = list_to_integer(couch_config:get("chttpd", "backlog", "128")), + + Default = fun chttpd_db:handle_request/1, + + UrlHandlers = [ + {<<"/">>, fun chttpd_misc:handle_welcome_req/1}, + {<<"favicon.ico">>, fun chttpd_misc:handle_favicon_req/1}, + {<<"_utils">>, fun chttpd_misc:handle_utils_dir_req/1}, + {<<"_all_dbs">>, fun chttpd_misc:handle_all_dbs_req/1}, + {<<"_active_tasks">>, fun chttpd_misc:handle_task_status_req/1}, + {<<"_config">>, fun chttpd_misc:handle_config_req/1}, + {<<"_replicate">>, fun chttpd_misc:handle_replicate_req/1}, + {<<"_uuids">>, fun chttpd_misc:handle_uuids_req/1}, + {<<"_log">>, fun chttpd_misc:handle_log_req/1}, + {<<"_sleep">>, fun chttpd_misc:handle_sleep_req/1}, + {<<"_session">>, fun chttpd_auth:handle_session_req/1}, + {<<"_user">>, fun chttpd_auth:handle_user_req/1}, + {<<"_oauth">>, fun chttpd_oauth:handle_oauth_req/1}, + {<<"_stats">>, fun chttpd_stats:handle_stats_req/1}, + {<<"_restart">>, fun showroom_http:handle_restart_req/1}, + {<<"_cloudant">>, fun showroom_httpd_admin:handle_cloudant_req/1} + ], + + DbHandlers = [ + {<<"_view_cleanup">>, fun chttpd_view:handle_view_cleanup_req/2}, + {<<"_compact">>, fun chttpd_db:handle_compact_req/2}, + {<<"_design">>, fun chttpd_db:handle_design_req/2}, + {<<"_view">>, fun chttpd_db:handle_db_view_req/2}, + {<<"_temp_view">>, fun chttpd_db:handle_temp_view_req/2}, + {<<"_changes">>, fun chttpd_db:handle_changes_req/2} + ], + + DesignHandlers = [ + {<<"_view">>, fun chttpd_view:handle_view_req/2}, + {<<"_show">>, fun chttpd_show:handle_doc_show_req/2}, + {<<"_list">>, fun chttpd_show:handle_view_list_req/2}, + {<<"_update">>, fun chttpd_show:handle_doc_update_req/2}, + {<<"_info">>, fun chttpd_db:handle_design_info_req/2} + ], + + Loop = fun(Req)-> ?MODULE:handle_request(Req, Default, UrlHandlers, + DbHandlers, DesignHandlers) end, + + {ok, Pid} = case mochiweb_http:start([ + {loop, Loop}, + {name, ?MODULE}, + {ip, BindAddress}, + {port, Port}, + {backlog, Backlog} + ]) of + {ok, MochiPid} -> {ok, MochiPid}; + {error, Reason} -> + io:format("Failure to start Mochiweb: ~s~n",[Reason]), + throw({error, Reason}) + end, + + ok = couch_config:register(fun ?MODULE:config_change/2, Pid), + + {ok, Pid}. + +config_change("chttpd", "bind_address") -> + ?MODULE:stop(); +config_change("chttpd", "port") -> + ?MODULE:stop(); +config_change("chttpd", "backlog") -> + ?MODULE:stop(). + +stop() -> + mochiweb_http:stop(?MODULE). + +handle_request(MochiReq, DefaultFun, + UrlHandlers, DbUrlHandlers, DesignUrlHandlers) -> + Begin = now(), + + AuthenticationFuns = [ + fun chttpd_auth:cookie_authentication_handler/1, + fun chttpd_auth:default_authentication_handler/1 + ], + + % for the path, use the raw path with the query string and fragment + % removed, but URL quoting left intact + RawUri = MochiReq:get(raw_path), + Customer = cloudant_util:customer_name(MochiReq:get_header_value("X-Cloudant-User"), MochiReq:get_header_value("Host")), + Path = ?COUCH:db_path(RawUri, Customer), + + HandlerKey = + case mochiweb_util:partition(Path, "/") of + {"", "", ""} -> + <<"/">>; % Special case the root url handler + {FirstPart, _, _} -> + list_to_binary(FirstPart) + end, + LogForClosedSocket = io_lib:format("mochiweb_recv_error for ~s - ~p ~s", [ + MochiReq:get(peer), + MochiReq:get(method), + RawUri + ]), + + Method1 = + case MochiReq:get(method) of + % already an atom + Meth when is_atom(Meth) -> Meth; + + % Non standard HTTP verbs aren't atoms (COPY, MOVE etc) so convert when + % possible (if any module references the atom, then it's existing). + Meth -> couch_util:to_existing_atom(Meth) + end, + increment_method_stats(Method1), + % alias HEAD to GET as mochiweb takes care of stripping the body + Method = case Method1 of + 'HEAD' -> 'GET'; + Other -> Other + end, + + HttpReq = #httpd{ + mochi_req = MochiReq, + method = Method, + path_parts = [list_to_binary(chttpd:unquote(Part)) + || Part <- string:tokens(Path, "/")], + db_url_handlers = DbUrlHandlers, + design_url_handlers = DesignUrlHandlers + }, + + HandlerFun = couch_util:get_value(HandlerKey, UrlHandlers, DefaultFun), + {ok, Resp} = + try + erase(cookie_auth_failed), + case authenticate_request(HttpReq, AuthenticationFuns) of + #httpd{} = Req -> + HandlerFun(cloudant_auth:authorize_request(Req)); + Response -> + Response + end + catch + throw:{http_head_abort, Resp0} -> + {ok, Resp0}; + throw:{invalid_json, S} -> + ?LOG_ERROR("attempted upload of invalid JSON ~s", [S]), + send_error(HttpReq, {bad_request, "invalid UTF-8 JSON"}); + exit:{mochiweb_recv_error, E} -> + ?LOG_INFO("mochiweb_recv_error: ~p", [E]), + showroom_log:message(notice, LogForClosedSocket, []), + exit(normal); + throw:Error -> + % ?LOG_DEBUG("Minor error in HTTP request: ~p",[Error]), + % ?LOG_DEBUG("Stacktrace: ~p",[erlang:get_stacktrace()]), + send_error(HttpReq, Error); + error:badarg -> + ?LOG_ERROR("Badarg error in HTTP request",[]), + ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]), + send_error(HttpReq, badarg); + error:function_clause -> + ?LOG_ERROR("function_clause error in HTTP request",[]), + ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]), + send_error(HttpReq, function_clause); + Tag:Error -> + ?LOG_ERROR("Uncaught error in HTTP request: ~p",[{Tag, Error}]), + ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]), + send_error(HttpReq, Error) + end, + + RequestTime = round(timer:now_diff(now(), Begin)/1000), + showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", [ + MochiReq:get(peer), + MochiReq:get_header_value("Host"), + atom_to_list(Method1), + RawUri, + Resp:get(code), + RequestTime + ]), + couch_stats_collector:record({couchdb, request_time}, RequestTime), + couch_stats_collector:increment({httpd, requests}), + {ok, Resp}. + +% Try authentication handlers in order until one returns a result +authenticate_request(#httpd{user_ctx=#user_ctx{}} = Req, _AuthFuns) -> + Req; +authenticate_request(#httpd{} = Req, [AuthFun|Rest]) -> + authenticate_request(AuthFun(Req), Rest); +authenticate_request(#httpd{} = Req, []) -> + case couch_config:get("chttpd_auth", "require_valid_user", "false") of + "true" -> + throw({unauthorized, <<"Authentication required.">>}); + "false" -> + case couch_config:get("admins") of + [] -> + Ctx = #user_ctx{roles=[<<"_reader">>, <<"_writer">>, <<"_admin">>]}, + Req#httpd{user_ctx = Ctx}; + _ -> + Req#httpd{user_ctx=#user_ctx{}} + end + end; +authenticate_request(Response, _AuthFuns) -> + Response. + +increment_method_stats(Method) -> + couch_stats_collector:increment({httpd_request_methods, Method}). + + +% Utilities + +partition(Path) -> + mochiweb_util:partition(Path, "/"). + +header_value(#httpd{mochi_req=MochiReq}, Key) -> + MochiReq:get_header_value(Key). + +header_value(#httpd{mochi_req=MochiReq}, Key, Default) -> + case MochiReq:get_header_value(Key) of + undefined -> Default; + Value -> Value + end. + +primary_header_value(#httpd{mochi_req=MochiReq}, Key) -> + MochiReq:get_primary_header_value(Key). + +serve_file(#httpd{mochi_req=MochiReq}=Req, RelativePath, DocumentRoot) -> + {ok, MochiReq:serve_file(RelativePath, DocumentRoot, + server_header() ++ chttpd_auth:cookie_auth_header(Req, []))}. + +qs_value(Req, Key) -> + qs_value(Req, Key, undefined). + +qs_value(Req, Key, Default) -> + couch_util:get_value(Key, qs(Req), Default). + +qs(#httpd{mochi_req=MochiReq}) -> + MochiReq:parse_qs(). + +path(#httpd{mochi_req=MochiReq}) -> + MochiReq:get(path). + +absolute_uri(#httpd{mochi_req=MochiReq}, Path) -> + XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"), + Host = case MochiReq:get_header_value(XHost) of + undefined -> + case MochiReq:get_header_value("Host") of + undefined -> + {ok, {Address, Port}} = inet:sockname(MochiReq:get(socket)), + inet_parse:ntoa(Address) ++ ":" ++ integer_to_list(Port); + Value1 -> + Value1 + end; + Value -> Value + end, + XSsl = couch_config:get("httpd", "x_forwarded_ssl", "X-Forwarded-Ssl"), + Scheme = case MochiReq:get_header_value(XSsl) of + "on" -> "https"; + _ -> + XProto = couch_config:get("httpd", "x_forwarded_proto", "X-Forwarded-Proto"), + case MochiReq:get_header_value(XProto) of + % Restrict to "https" and "http" schemes only + "https" -> "https"; + _ -> "http" + end + end, + Customer = cloudant_util:customer_name(MochiReq:get_header_value("X-Cloudant-User"), + Host), + CustomerRegex = ["^/", Customer, "[/%2F]+"], + NewPath = re:replace(Path, CustomerRegex, "/"), + Scheme ++ "://" ++ Host ++ NewPath. + +unquote(UrlEncodedString) -> + mochiweb_util:unquote(UrlEncodedString). + +quote(UrlDecodedString) -> + mochiweb_util:quote_plus(UrlDecodedString). + +parse_form(#httpd{mochi_req=MochiReq}) -> + mochiweb_multipart:parse_form(MochiReq). + +recv(#httpd{mochi_req=MochiReq}, Len) -> + MochiReq:recv(Len). + +recv_chunked(#httpd{mochi_req=MochiReq}, MaxChunkSize, ChunkFun, InitState) -> + % Fun is called once with each chunk + % Fun({Length, Binary}, State) + % called with Length == 0 on the last time. + MochiReq:stream_body(MaxChunkSize, ChunkFun, InitState). + +body_length(Req) -> + case header_value(Req, "Transfer-Encoding") of + undefined -> + case header_value(Req, "Content-Length") of + undefined -> undefined; + Length -> list_to_integer(Length) + end; + "chunked" -> chunked; + Unknown -> {unknown_transfer_encoding, Unknown} + end. + +body(#httpd{mochi_req=MochiReq, req_body=ReqBody}) -> + case ReqBody of + undefined -> + % Maximum size of document PUT request body (4GB) + MaxSize = list_to_integer( + couch_config:get("couchdb", "max_document_size", "4294967296")), + MochiReq:recv_body(MaxSize); + _Else -> + ReqBody + end. + +json_body(Httpd) -> + ?JSON_DECODE(body(Httpd)). + +json_body_obj(Httpd) -> + case json_body(Httpd) of + {Props} -> {Props}; + _Else -> + throw({bad_request, "Request body must be a JSON object"}) + end. + + +doc_etag(#doc{revs={Start, [DiskRev|_]}}) -> + "\"" ++ ?b2l(couch_doc:rev_to_str({Start, DiskRev})) ++ "\"". + +make_etag(Term) -> + <> = erlang:md5(term_to_binary(Term)), + list_to_binary("\"" ++ lists:flatten(io_lib:format("~.36B",[SigInt])) ++ "\""). + +etag_match(Req, CurrentEtag) when is_binary(CurrentEtag) -> + etag_match(Req, binary_to_list(CurrentEtag)); + +etag_match(Req, CurrentEtag) -> + EtagsToMatch = string:tokens( + chttpd:header_value(Req, "If-None-Match", ""), ", "), + lists:member(CurrentEtag, EtagsToMatch). + +etag_respond(Req, CurrentEtag, RespFun) -> + case etag_match(Req, CurrentEtag) of + true -> + % the client has this in their cache. + chttpd:send_response(Req, 304, [{"Etag", CurrentEtag}], <<>>); + false -> + % Run the function. + RespFun() + end. + +verify_is_server_admin(#httpd{user_ctx=#user_ctx{roles=Roles}}) -> + case lists:member(<<"_admin">>, Roles) of + true -> ok; + false -> throw({unauthorized, <<"You are not a server admin.">>}) + end. + +start_response_length(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Length) -> + couch_stats_collector:increment({httpd_status_codes, Code}), + Resp = MochiReq:start_response_length({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), Length}), + case MochiReq:get(method) of + 'HEAD' -> throw({http_head_abort, Resp}); + _ -> ok + end, + {ok, Resp}. + +send(Resp, Data) -> + Resp:send(Data), + {ok, Resp}. + +start_chunked_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) -> + couch_stats_collector:increment({httpd_status_codes, Code}), + Resp = MochiReq:respond({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), chunked}), + case MochiReq:get(method) of + 'HEAD' -> throw({http_head_abort, Resp}); + _ -> ok + end, + {ok, Resp}. + +send_chunk(Resp, Data) -> + Resp:write_chunk(Data), + {ok, Resp}. + +send_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Body) -> + couch_stats_collector:increment({httpd_status_codes, Code}), + if Code >= 400 -> + ?LOG_DEBUG("httpd ~p error response:~n ~s", [Code, Body]); + true -> ok + end, + {ok, MochiReq:respond({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), Body})}. + +send_method_not_allowed(Req, Methods) -> + send_error(Req, 405, [{"Allow", Methods}], <<"method_not_allowed">>, ?l2b("Only " ++ Methods ++ " allowed")). + +send_json(Req, Value) -> + send_json(Req, 200, Value). + +send_json(Req, Code, Value) -> + send_json(Req, Code, [], Value). + +send_json(Req, Code, Headers, Value) -> + DefaultHeaders = [ + {"Content-Type", negotiate_content_type(Req)}, + {"Cache-Control", "must-revalidate"} + ], + Body = list_to_binary( + [start_jsonp(Req), ?JSON_ENCODE(Value), end_jsonp(), $\n] + ), + send_response(Req, Code, DefaultHeaders ++ Headers, Body). + +start_json_response(Req, Code) -> + start_json_response(Req, Code, []). + +start_json_response(Req, Code, Headers) -> + DefaultHeaders = [ + {"Content-Type", negotiate_content_type(Req)}, + {"Cache-Control", "must-revalidate"} + ], + start_jsonp(Req), % Validate before starting chunked. + %start_chunked_response(Req, Code, DefaultHeaders ++ Headers). + {ok, Resp} = start_chunked_response(Req, Code, DefaultHeaders ++ Headers), + case start_jsonp(Req) of + [] -> ok; + Start -> send_chunk(Resp, Start) + end, + {ok, Resp}. + +end_json_response(Resp) -> + send_chunk(Resp, end_jsonp() ++ [$\r,$\n]), + %send_chunk(Resp, [$\n]), + send_chunk(Resp, []). + +start_jsonp(Req) -> + case get(jsonp) of + undefined -> put(jsonp, qs_value(Req, "callback", no_jsonp)); + _ -> ok + end, + case get(jsonp) of + no_jsonp -> []; + [] -> []; + CallBack -> + try + validate_callback(CallBack), + CallBack ++ "(" + catch + Error -> + put(jsonp, no_jsonp), + throw(Error) + end + end. + +end_jsonp() -> + Resp = case get(jsonp) of + no_jsonp -> []; + [] -> []; + _ -> ");" + end, + put(jsonp, undefined), + Resp. + +validate_callback(CallBack) when is_binary(CallBack) -> + validate_callback(binary_to_list(CallBack)); +validate_callback([]) -> + ok; +validate_callback([Char | Rest]) -> + case Char of + _ when Char >= $a andalso Char =< $z -> ok; + _ when Char >= $A andalso Char =< $Z -> ok; + _ when Char >= $0 andalso Char =< $9 -> ok; + _ when Char == $. -> ok; + _ when Char == $_ -> ok; + _ when Char == $[ -> ok; + _ when Char == $] -> ok; + _ -> + throw({bad_request, invalid_callback}) + end, + validate_callback(Rest). + + +error_info({Error, Reason}) when is_list(Reason) -> + error_info({Error, ?l2b(Reason)}); +error_info(bad_request) -> + {400, <<"bad_request">>, <<>>}; +error_info({bad_request, Reason}) -> + {400, <<"bad_request">>, Reason}; +error_info({query_parse_error, Reason}) -> + {400, <<"query_parse_error">>, Reason}; +error_info(not_found) -> + {404, <<"not_found">>, <<"missing">>}; +error_info({not_found, Reason}) -> + {404, <<"not_found">>, Reason}; +error_info({not_acceptable, Reason}) -> + {406, <<"not_acceptable">>, Reason}; +error_info(conflict) -> + {409, <<"conflict">>, <<"Document update conflict.">>}; +error_info({forbidden, Msg}) -> + {403, <<"forbidden">>, Msg}; +error_info({credentials_expired, Msg}) -> + {403, <<"credentials_expired">>, Msg}; +error_info({unauthorized, Msg}) -> + {401, <<"unauthorized">>, Msg}; +error_info(file_exists) -> + {412, <<"file_exists">>, <<"The database could not be " + "created, the file already exists.">>}; +error_info({r_quorum_not_met, Reason}) -> + {412, <<"read_quorum_not_met">>, Reason}; +error_info({w_quorum_not_met, Reason}) -> + {500, <<"write_quorum_not_met">>, Reason}; +error_info({bad_ctype, Reason}) -> + {415, <<"bad_content_type">>, Reason}; +error_info({error, illegal_database_name}) -> + {400, <<"illegal_database_name">>, <<"Only lowercase characters (a-z), " + "digits (0-9), and any of the characters _, $, (, ), +, -, and / " + "are allowed">>}; +error_info(not_implemented) -> + {501, <<"not_implemented">>, <<"this feature is not yet implemented">>}; +error_info({Error, Reason}) -> + {500, couch_util:to_binary(Error), couch_util:to_binary(Reason)}; +error_info(Error) -> + {500, <<"unknown_error">>, couch_util:to_binary(Error)}. + +send_error(_Req, {already_sent, Resp, _Error}) -> + {ok, Resp}; + +send_error(#httpd{mochi_req=MochiReq}=Req, Error) -> + {Code, ErrorStr, ReasonStr} = error_info(Error), + Headers = if Code == 401 -> + case MochiReq:get_header_value("X-CouchDB-WWW-Authenticate") of + undefined -> + case couch_config:get("httpd", "WWW-Authenticate", nil) of + nil -> + []; + Type -> + [{"WWW-Authenticate", Type}] + end; + Type -> + [{"WWW-Authenticate", Type}] + end; + true -> + [] + end, + send_error(Req, Code, Headers, ErrorStr, ReasonStr). + +send_error(Req, Code, ErrorStr, ReasonStr) -> + send_error(Req, Code, [], ErrorStr, ReasonStr). + +send_error(Req, Code, Headers, ErrorStr, ReasonStr) -> + send_json(Req, Code, Headers, + {[{<<"error">>, ErrorStr}, + {<<"reason">>, ReasonStr}]}). + +% give the option for list functions to output html or other raw errors +send_chunked_error(Resp, {_Error, {[{<<"body">>, Reason}]}}) -> + send_chunk(Resp, Reason), + send_chunk(Resp, []); + +send_chunked_error(Resp, Error) -> + {Code, ErrorStr, ReasonStr} = error_info(Error), + JsonError = {[{<<"code">>, Code}, + {<<"error">>, ErrorStr}, + {<<"reason">>, ReasonStr}]}, + send_chunk(Resp, ?l2b([$\n,?JSON_ENCODE(JsonError),$\n])), + send_chunk(Resp, []). + +send_redirect(Req, Path) -> + Headers = [{"Location", chttpd:absolute_uri(Req, Path)}], + send_response(Req, 301, Headers, <<>>). + +negotiate_content_type(#httpd{mochi_req=MochiReq}) -> + %% Determine the appropriate Content-Type header for a JSON response + %% depending on the Accept header in the request. A request that explicitly + %% lists the correct JSON MIME type will get that type, otherwise the + %% response will have the generic MIME type "text/plain" + AcceptedTypes = case MochiReq:get_header_value("Accept") of + undefined -> []; + AcceptHeader -> string:tokens(AcceptHeader, ", ") + end, + case lists:member("application/json", AcceptedTypes) of + true -> "application/json"; + false -> "text/plain;charset=utf-8" + end. + +server_header() -> + OTPVersion = erlang:system_info(otp_release), + [{"Server", "CouchDB/0.11.0 (Erlang OTP/" ++ OTPVersion ++ ")"}]. diff --git a/src/chttpd_app.erl b/src/chttpd_app.erl new file mode 100644 index 00000000..4b8356fb --- /dev/null +++ b/src/chttpd_app.erl @@ -0,0 +1,11 @@ +-module(chttpd_app). +-behaviour(application). +-export([start/2, stop/1]). + +-include("chttpd.hrl"). + +start(_Type, StartArgs) -> + chttpd_sup:start_link(StartArgs). + +stop(_State) -> + ok. diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl new file mode 100644 index 00000000..3916f7cf --- /dev/null +++ b/src/chttpd_auth.erl @@ -0,0 +1,481 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_auth). +-include("chttpd.hrl"). + +-export([special_test_authentication_handler/1, null_authentication_handler/1, + cookie_authentication_handler/1, default_authentication_handler/1, + handle_session_req/1, handle_user_req/1, cookie_auth_header/2]). + +% used by OAuth handler +-export([get_user/1, ensure_users_db_exists/1]). + +-import(chttpd, [send_json/2, send_json/4, send_method_not_allowed/2]). + +special_test_authentication_handler(Req) -> + case chttpd:header_value(Req, "WWW-Authenticate") of + "X-Couch-Test-Auth " ++ NamePass -> + % NamePass is a colon separated string: "joe schmoe:a password". + [Name, Pass] = re:split(NamePass, ":", [{return, list}]), + case {Name, Pass} of + {"Jan Lehnardt", "apple"} -> ok; + {"Christopher Lenz", "dog food"} -> ok; + {"Noah Slater", "biggiesmalls endian"} -> ok; + {"Chris Anderson", "mp3"} -> ok; + {"Damien Katz", "pecan pie"} -> ok; + {_, _} -> + throw({unauthorized, <<"Name or password is incorrect.">>}) + end, + Req#httpd{user_ctx=#user_ctx{name=?l2b(Name)}}; + _ -> + % No X-Couch-Test-Auth credentials sent, give admin access so the + % previous authentication can be restored after the test + Req#httpd{user_ctx=#user_ctx{roles=[<<"_admin">>]}} + end. + +null_authentication_handler(Req) -> + Ctx = #user_ctx{roles=[<<"_reader">>, <<"writer">>, <<"_admin">>]}, + Req#httpd{user_ctx=Ctx}. + +default_authentication_handler(Req) -> + case basic_username_pw(Req) of + {Username, Password} -> + case get_user(Username) of + nil -> + throw({unauthorized, <<"unknown username">>}); + Props -> + ExpectedHash = couch_util:get_value(<<"password_sha">>, Props), + Salt = couch_util:get_value(<<"salt">>, Props), + case hash_password(?l2b(Password), Salt) of + ExpectedHash -> + Ctx = #user_ctx{ + name = couch_util:get_value(<<"username">>, Props), + roles = couch_util:get_value(<<"roles">>, Props) + }, + Req#httpd{user_ctx=Ctx}; + _ -> + throw({unauthorized, <<"password is incorrect">>}) + end + end; + nil -> + Req + end. + +cookie_authentication_handler(#httpd{path_parts=[<<"_session">>], + method='POST'} = Req) -> + % ignore any cookies sent with login request + Req; +cookie_authentication_handler(Req) -> + case cookie_auth_user(Req) of + nil -> + Req; + cookie_auth_failed -> + put(cookie_auth_failed, true), + Req#httpd{auth=cookie_auth_failed}; + Req2 -> + Req2 + end. + +cookie_auth_header(#httpd{auth=cookie_auth_failed}, Headers) -> + % check for an AuthSession cookie from login handler + CookieHeader = couch_util:get_value("Set-Cookie", Headers, ""), + Cookies = mochiweb_cookies:parse_cookie(CookieHeader), + AuthSession = couch_util:get_value("AuthSession", Cookies), + if AuthSession == undefined -> + [generate_cookie_buster()]; + true -> + [] + end; +cookie_auth_header(#httpd{user_ctx=#user_ctx{name=null}}, _Headers) -> + []; +cookie_auth_header(#httpd{user_ctx=Ctx, auth={Secret,true}}, Headers) -> + % Note: we only set the AuthSession cookie if: + % * a valid AuthSession cookie has been received + % * we are outside a 10% timeout window + % * and if an AuthSession cookie hasn't already been set e.g. by a login + % or logout handler. + % The login and logout handlers set the AuthSession cookie themselves. + CookieHeader = couch_util:get_value("Set-Cookie", Headers, ""), + Cookies = mochiweb_cookies:parse_cookie(CookieHeader), + AuthSession = couch_util:get_value("AuthSession", Cookies), + if AuthSession == undefined -> + [generate_cookie(Ctx#user_ctx.name, Secret, timestamp())]; + true -> + [] + end; +cookie_auth_header(Req, Headers) -> + case get(cookie_auth_failed) of + true -> + cookie_auth_header(Req#httpd{auth=cookie_auth_failed}, Headers); + _ -> + [] + end. + +handle_session_req(#httpd{method='POST', mochi_req=MochiReq, user_ctx=Ctx}=Req) -> + % login + Form = parse_form(MochiReq), + UserName = extract_username(Form), + case get_user(UserName) of + nil -> + throw({forbidden, <<"unknown username">>}); + User -> + UserSalt = couch_util:get_value(<<"salt">>, User), + case lists:member(<<"_admin">>, Ctx#user_ctx.roles) of + true -> + ok; + false -> + Password = extract_password(Form), + ExpectedHash = couch_util:get_value(<<"password_sha">>, User), + case hash_password(Password, UserSalt) of + ExpectedHash -> + ok; + _Else -> + throw({forbidden, <<"Name or password is incorrect.">>}) + end + end, + Secret = ?l2b(couch_config:get("chttpd_auth", "secret")), + SecretAndSalt = <>, + Cookie = generate_cookie(UserName, SecretAndSalt, timestamp()), + send_response(Req, [Cookie]) + end; +handle_session_req(#httpd{method='GET', user_ctx=UserCtx}=Req) -> + % whoami + #user_ctx{name = Name, roles = Roles} = UserCtx, + ForceLogin = chttpd:qs_value(Req, "basic", "false"), + case {Name, ForceLogin} of + {null, "true"} -> + throw({unauthorized, <<"Please login.">>}); + _False -> + send_json(Req, {[{ok,true}, {name,Name}, {roles,Roles}]}) + end; +handle_session_req(#httpd{method='DELETE'}=Req) -> + % logout + send_response(Req, [generate_cookie_buster()]); +handle_session_req(Req) -> + send_method_not_allowed(Req, "GET,HEAD,POST,DELETE"). + +handle_user_req(#httpd{method='POST'}=Req) -> + DbName = couch_config:get("chttpd_auth", "authentication_db"), + {ok, Db} = ensure_users_db_exists(?l2b(DbName)), + Result = create_user(Req, Db), + couch_db:close(Db), + Result; +handle_user_req(#httpd{method=Method, path_parts=[_]}=_Req) when + Method == 'PUT' orelse Method == 'DELETE' -> + throw({bad_request, <<"Username is missing">>}); +handle_user_req(#httpd{method='PUT', path_parts=[_, UserName]}=Req) -> + DbName = couch_config:get("chttpd_auth", "authentication_db"), + {ok, Db} = ensure_users_db_exists(?l2b(DbName)), + Result = update_user(Req, Db, UserName), + couch_db:close(Db), + Result; +handle_user_req(#httpd{method='DELETE', path_parts=[_, UserName]}=Req) -> + DbName = couch_config:get("chttpd_auth", "authentication_db"), + {ok, Db} = ensure_users_db_exists(?l2b(DbName)), + Result = delete_user(Req, Db, UserName), + couch_db:close(Db), + Result; +handle_user_req(Req) -> + send_method_not_allowed(Req, "DELETE,POST,PUT"). + +get_user(UserName) when is_list(UserName) -> + get_user(?l2b(UserName)); +get_user(UserName) -> + case couch_config:get("admins", ?b2l(UserName)) of + "-hashed-" ++ HashedPwdAndSalt -> + [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","), + [ + {<<"username">>, UserName}, + {<<"roles">>, [<<"_reader">>, <<"_writer">>, <<"_admin">>]}, + {<<"salt">>, ?l2b(Salt)}, + {<<"password_sha">>, ?l2b(HashedPwd)} + ]; + _ -> + try ets:lookup(users_cache, UserName) of + [{UserName, Props}] -> + Props; + [] -> + load_user_from_db(UserName) + catch error:badarg -> + load_user_from_db(UserName) + end + end. + +load_user_from_db(UserName) -> + DbName = couch_config:get("chttpd_auth", "authentication_db"), + {ok, Db} = ensure_users_db_exists(?l2b(DbName)), + UserProps = case couch_db:open_doc(Db, UserName, []) of + {ok, Doc} -> + ?LOG_INFO("cache miss on username ~s", [UserName]), + {Props} = couch_doc:to_json_obj(Doc, []), + Props; + _Else -> + ?LOG_INFO("no record of user ~s", [UserName]), + nil + end, + couch_db:close(Db), + UserProps. + +ensure_users_db_exists(DbName) -> + Options = [{user_ctx, #user_ctx{roles=[<<"_admin">>]}}], + case couch_db:open(DbName, Options) of + {ok, Db} -> + {ok, Db}; + {error, _} -> + couch_db:create(DbName, Options) + end. + +% internal functions + +basic_username_pw(Req) -> + case chttpd:header_value(Req, "Authorization") of + "Basic " ++ Base64Value -> + case string:tokens(?b2l(couch_util:decodeBase64(Base64Value)),":") of + [User, Pass] -> + {User, Pass}; + [User] -> + {User, ""}; + _ -> + nil + end; + _ -> + nil + end. + +cookie_auth_user(#httpd{mochi_req=MochiReq}=Req) -> + case MochiReq:get_cookie_value("AuthSession") of + undefined -> + nil; + Cookie -> + AuthSession = couch_util:decodeBase64Url(Cookie), + [User, TimeStr | HashParts] = string:tokens(?b2l(AuthSession), ":"), + % Verify expiry and hash + case couch_config:get("chttpd_auth", "secret") of + undefined -> + ?LOG_DEBUG("AuthSession cookie, but no secret in config!", []), + cookie_auth_failed; + SecretStr -> + case get_user(User) of + nil -> + ?LOG_DEBUG("no record of user ~s", [User]), + cookie_auth_failed; + Result -> + Secret = ?l2b(SecretStr), + UserSalt = couch_util:get_value(<<"salt">>, Result), + FullSecret = <>, + ExpectedHash = crypto:sha_mac(FullSecret, [User, ":", TimeStr]), + case ?l2b(string:join(HashParts, ":")) of + ExpectedHash -> + TimeStamp = erlang:list_to_integer(TimeStr, 16), + Timeout = erlang:list_to_integer(couch_config:get( + "chttpd_auth", "timeout", "600")), + CurrentTime = timestamp(), + if CurrentTime < TimeStamp + Timeout -> + TimeLeft = TimeStamp + Timeout - CurrentTime, + Req#httpd{user_ctx=#user_ctx{ + name=?l2b(User), + roles=couch_util:get_value(<<"roles">>, Result, []) + }, auth={FullSecret, TimeLeft < Timeout*0.9}}; + true -> + ?LOG_DEBUG("cookie for ~s was expired", [User]), + put(cookie_auth_failed, true), + Msg = lists:concat(["Your session has expired after ", + Timeout div 60, " minutes of inactivity"]), + throw({credentials_expired, ?l2b(Msg)}) + end; + _Else -> + ?LOG_DEBUG("cookie password hash was incorrect", []), + cookie_auth_failed + end + end + end + end. + +create_user(#httpd{method='POST', mochi_req=MochiReq}=Req, Db) -> + Form = parse_form(MochiReq), + {UserName, Password} = extract_username_password(Form), + case get_user(UserName) of + nil -> + Roles = [?l2b(R) || R <- proplists:get_all_values("roles", Form)], + if Roles /= [] -> + chttpd:verify_is_server_admin(Req); + true -> ok end, + Active = chttpd_view:parse_bool_param(couch_util:get_value("active", + Form, "true")), + UserSalt = couch_util:new_uuid(), + UserDoc = #doc{ + id = UserName, + body = {[ + {<<"active">>, Active}, + {<<"email">>, ?l2b(couch_util:get_value("email", Form, ""))}, + {<<"password_sha">>, hash_password(Password, UserSalt)}, + {<<"roles">>, Roles}, + {<<"salt">>, UserSalt}, + {<<"type">>, <<"user">>}, + {<<"username">>, UserName} + ]} + }, + {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []), + ?LOG_DEBUG("User ~s (~s) with password, ~s created.", [UserName, + UserName, Password]), + send_response(Req); + _Result -> + ?LOG_DEBUG("Can't create ~s: already exists", [UserName]), + throw({forbidden, <<"User already exists.">>}) + end. + +delete_user(#httpd{user_ctx=UserCtx}=Req, Db, UserName) -> + case get_user(UserName) of + nil -> + throw({not_found, <<"User doesn't exist">>}); + User -> + case lists:member(<<"_admin">>,UserCtx#user_ctx.roles) of + true -> + ok; + false when UserCtx#user_ctx.name == UserName -> + ok; + false -> + throw({forbidden, <<"You aren't allowed to delete the user">>}) + end, + {Pos,Rev} = couch_doc:parse_rev(couch_util:get_value(<<"_rev">>,User)), + UserDoc = #doc{ + id = UserName, + revs = {Pos, [Rev]}, + deleted = true + }, + {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []), + send_response(Req) + end. + +extract_username(Form) -> + try ?l2b(couch_util:get_value("username", Form)) + catch error:badarg -> + throw({bad_request, <<"user accounts must have a username">>}) + end. + +extract_password(Form) -> + try ?l2b(couch_util:get_value("password", Form)) + catch error:badarg -> + throw({bad_request, <<"user accounts must have a password">>}) + end. + +extract_username_password(Form) -> + try + {?l2b(couch_util:get_value("username", Form)), + ?l2b(couch_util:get_value("password", Form))} + catch error:badarg -> + Msg = <<"user accounts must have a username and password">>, + throw({bad_request, Msg}) + end. + +generate_cookie_buster() -> + T0 = calendar:now_to_datetime({0,86400,0}), + Opts = [{max_age,0}, {path,"/"}, {local_time,T0}], + mochiweb_cookies:cookie("AuthSession", "", Opts). + +generate_cookie(User, Secret, TimeStamp) -> + SessionData = ?b2l(User) ++ ":" ++ erlang:integer_to_list(TimeStamp, 16), + Hash = crypto:sha_mac(Secret, SessionData), + Cookie = couch_util:encodeBase64Url(SessionData ++ ":" ++ ?b2l(Hash)), + % MaxAge = erlang:list_to_integer(couch_config:get("chttpd_auth", + % "timeout", "600")), + % TODO add {secure, true} to options when SSL is detected + mochiweb_cookies:cookie("AuthSession", Cookie, [{path, "/"}]). + % {max_age, MaxAge}]). + +hash_password(Password, Salt) -> + ?l2b(couch_util:to_hex(crypto:sha(<>))). + +parse_form(MochiReq) -> + case MochiReq:get_primary_header_value("content-type") of + "application/x-www-form-urlencoded" ++ _ -> + ReqBody = MochiReq:recv_body(), + mochiweb_util:parse_qs(ReqBody); + _ -> + throw({bad_request, <<"you must specify " + "application/x-www-form-urlencoded as the primary content-type">>}) + end. + +send_response(Req) -> + send_response(Req, []). + +send_response(Req, ExtraHeaders) -> + {Code, Headers} = case chttpd:qs_value(Req, "next", nil) of + nil -> {200, []}; + Redirect -> + {302, [{"Location", chttpd:absolute_uri(Req, Redirect)}]} + end, + send_json(Req, Code, Headers ++ ExtraHeaders, {[{ok, true}]}). + +timestamp() -> + {MegaSeconds, Seconds, _} = erlang:now(), + MegaSeconds * 1000000 + Seconds. + +update_user(#httpd{mochi_req=MochiReq, user_ctx=UserCtx}=Req, Db, UserName) -> + case get_user(UserName) of + nil -> + throw({not_found, <<"User doesn't exist">>}); + User -> + Form = parse_form(MochiReq), + NewPassword = ?l2b(couch_util:get_value("password", Form, "")), + OldPassword = ?l2b(couch_util:get_value("old_password", Form, "")), + + UserSalt = couch_util:get_value(<<"salt">>, User), + CurrentPasswordHash = couch_util:get_value(<<"password_sha">>, User), + + Roles = [?l2b(R) || R <- proplists:get_all_values("roles", Form)], + if Roles /= [] -> + chttpd:verify_is_server_admin(Req); + true -> ok end, + + PasswordHash = case NewPassword of + <<>> -> + CurrentPasswordHash; + _Else -> + case lists:member(<<"_admin">>,UserCtx#user_ctx.roles) of + true -> + hash_password(NewPassword, UserSalt); + false when UserCtx#user_ctx.name == UserName -> + %% for user we test old password before allowing change + case hash_password(OldPassword, UserSalt) of + CurrentPasswordHash -> + hash_password(NewPassword, UserSalt); + _ -> + throw({forbidden, <<"Old password is incorrect.">>}) + end; + _ -> + Msg = <<"You aren't allowed to change this password.">>, + throw({forbidden, Msg}) + end + end, + + Active = chttpd_view:parse_bool_param(couch_util:get_value("active", + Form, "true")), + {Pos,Rev} = couch_doc:parse_rev(couch_util:get_value(<<"_rev">>,User)), + UserDoc = #doc{ + id = UserName, + revs = {Pos,[Rev]}, + body = {[ + {<<"active">>, Active}, + {<<"email">>, ?l2b(couch_util:get_value("email", Form, ""))}, + {<<"password_sha">>, PasswordHash}, + {<<"roles">>, Roles}, + {<<"salt">>, UserSalt}, + {<<"type">>, <<"user">>}, + {<<"username">>, UserName} + ]} + }, + {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []), + ?LOG_DEBUG("User ~s updated.", [UserName]), + send_response(Req) + end. diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl new file mode 100644 index 00000000..94a43e20 --- /dev/null +++ b/src/chttpd_db.erl @@ -0,0 +1,939 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_db). +-include("chttpd.hrl"). + +-export([handle_request/1, handle_compact_req/2, handle_design_req/2, + db_req/2, couch_doc_open/4,handle_changes_req/2, + update_doc_result_to_json/1, update_doc_result_to_json/2, + handle_design_info_req/2, handle_view_cleanup_req/2]). + +-import(chttpd, + [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, + start_json_response/2,send_chunk/2,end_json_response/1, + start_chunked_response/3, absolute_uri/2, send/2, + start_response_length/4]). + +-record(doc_query_args, { + options = [], + rev = nil, + open_revs = [], + show = nil +}). + +% Database request handlers +handle_request(#httpd{path_parts=[DbName|RestParts],method=Method, + db_url_handlers=DbUrlHandlers}=Req)-> + case {Method, RestParts} of + {'PUT', []} -> + create_db_req(Req, DbName); + {'DELETE', []} -> + delete_db_req(Req, DbName); + {_, []} -> + do_db_req(Req, fun db_req/2); + {_, [SecondPart|_]} -> + Handler = couch_util:get_value(SecondPart, DbUrlHandlers, fun db_req/2), + do_db_req(Req, Handler) + end. + +handle_changes_req(#httpd{method='GET'}=Req, Db) -> + MakeCallback = fun(Resp) -> + fun({change, Change, _}, "continuous") -> + send_chunk(Resp, [?JSON_ENCODE(Change) | "\n"]); + ({change, Change, Prepend}, _) -> + send_chunk(Resp, [Prepend, ?JSON_ENCODE(Change)]); + (start, "continuous") -> + ok; + (start, _) -> + send_chunk(Resp, "{\"results\":[\n"); + ({stop, EndSeq}, "continuous") -> + send_chunk( + Resp, + [?JSON_ENCODE({[{<<"last_seq">>, EndSeq}]}) | "\n"] + ), + end_json_response(Resp); + ({stop, EndSeq}, _) -> + send_chunk( + Resp, + io_lib:format("\n],\n\"last_seq\":~w}\n", [EndSeq]) + ), + end_json_response(Resp); + (timeout, _) -> + send_chunk(Resp, "\n") + end + end, + ChangesArgs = parse_changes_query(Req), + ChangesFun = couch_changes:handle_changes(ChangesArgs, Req, Db), + case ChangesArgs#changes_args.feed of + "normal" -> + {ok, Info} = couch_db:get_db_info(Db), + CurrentEtag = chttpd:make_etag(Info), + chttpd:etag_respond( + Req, + CurrentEtag, + fun() -> + {ok, Resp} = chttpd:start_json_response( + Req, 200, [{"Etag", CurrentEtag}] + ), + ChangesFun(MakeCallback(Resp)) + end + ); + _ -> + % "longpoll" or "continuous" + {ok, Resp} = chttpd:start_json_response(Req, 200), + ChangesFun(MakeCallback(Resp)) + end; + +handle_changes_req(#httpd{path_parts=[_,<<"_changes">>]}=Req, _Db) -> + send_method_not_allowed(Req, "GET,HEAD"). + +handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, _Db) -> + ok = ?COUCH:compact_view_group(DbName, Id), + send_json(Req, 202, {[{ok, true}]}); + +handle_compact_req(#httpd{method='POST'}=Req, Db) -> + StartSeq = chttpd:qs_value(Req, "start_seq", "0"), + ok = ?COUCH:compact_db(Db, list_to_integer(StartSeq)), + send_json(Req, 202, {[{ok, true}]}); + +handle_compact_req(Req, _Db) -> + send_method_not_allowed(Req, "POST"). + +handle_view_cleanup_req(#httpd{method='POST'}=Req, _Db) -> + % delete unreferenced index files + % ok = ?COUCH:cleanup_view_index_files(Db), + send_json(Req, 202, {[{ok, true}]}); + +handle_view_cleanup_req(Req, _Db) -> + send_method_not_allowed(Req, "POST"). + + +handle_design_req(#httpd{ + path_parts=[_DbName,_Design,_DesName, <<"_",_/binary>> = Action | _Rest], + design_url_handlers = DesignUrlHandlers + }=Req, Db) -> + Handler = couch_util:get_value(Action, DesignUrlHandlers, fun db_req/2), + Handler(Req, Db); + +handle_design_req(Req, Db) -> + db_req(Req, Db). + +handle_design_info_req(#httpd{ + method='GET', + path_parts=[_DbName, _Design, DesignName, _] + }=Req, Db) -> + DesignId = <<"_design/", DesignName/binary>>, + {ok, GroupInfoList} = ?COUCH:get_view_group_info(Db, DesignId), + send_json(Req, 200, {[ + {name, DesignName}, + {view_index, {GroupInfoList}} + ]}); + +handle_design_info_req(Req, _Db) -> + send_method_not_allowed(Req, "GET"). + +create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> + N = chttpd:qs_value(Req, "n"), + Q = chttpd:qs_value(Req, "q"), + case ?COUCH:create_db(DbName, [{user_ctx, UserCtx},{n,N},{q,Q}]) of + ok -> + DocUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)), + send_json(Req, 201, [{"Location", DocUrl}], {[{ok, true}]}); + Error -> + throw(Error) + end. + +delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> + case ?COUCH:delete_db(DbName, [{user_ctx, UserCtx}]) of + ok -> + send_json(Req, 200, {[{ok, true}]}); + Error -> + throw(Error) + end. + +do_db_req(#httpd{path_parts=[DbName|_]}=Req, Fun) -> + Fun(Req, #db{name=DbName}). + +db_req(#httpd{method='GET',path_parts=[DbName]}=Req, _Db) -> + Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"), + chttpd:header_value(Req, "Host")), + {ok, DbInfo} = ?COUCH:get_db_info(DbName, Customer), + send_json(Req, {DbInfo}); + +db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) -> + Doc = couch_doc:from_json_obj(chttpd:json_body(Req)), + Doc2 = case Doc#doc.id of + <<"">> -> + Doc#doc{id=couch_util:new_uuid(), revs={0, []}}; + _ -> + Doc + end, + DocId = Doc2#doc.id, + case chttpd:qs_value(Req, "batch") of + "ok" -> + % batch + ok = couch_batch_save:eventually_save_doc( + Db#db.name, Doc2, Db#db.user_ctx), + send_json(Req, 202, [], {[ + {ok, true}, + {id, DocId} + ]}); + _Normal -> + % normal + {ok, NewRev} = ?COUCH:update_doc(Db, Doc2, []), + DocUrl = absolute_uri( + Req, binary_to_list(<<"/",DbName/binary,"/", DocId/binary>>)), + send_json(Req, 201, [{"Location", DocUrl}], {[ + {ok, true}, + {id, DocId}, + {rev, couch_doc:rev_to_str(NewRev)} + ]}) + end; + + +db_req(#httpd{path_parts=[_DbName]}=Req, _Db) -> + send_method_not_allowed(Req, "DELETE,GET,HEAD,POST"); + +db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, Db) -> + UpdateSeq = ?COUCH:get_update_seq(Db), + CommittedSeq = ?COUCH:get_committed_update_seq(Db), + {ok, StartTime} = + case chttpd:qs_value(Req, "seq") of + undefined -> + committed = couch_batch_save:commit_now(Db#db.name, Db#db.user_ctx), + ?COUCH:ensure_full_commit(Db); + RequiredStr -> + RequiredSeq = list_to_integer(RequiredStr), + if RequiredSeq > UpdateSeq -> + throw({bad_request, + "can't do a full commit ahead of current update_seq"}); + RequiredSeq > CommittedSeq -> + % user asked for an explicit sequence, don't commit any batches + ?COUCH:ensure_full_commit(Db); + true -> + %% hack to make sure we always get cluster max time - APK + ?COUCH:ensure_full_commit(Db) + % {ok, Db#db.instance_start_time} + end + end, + send_json(Req, 201, {[ + {ok, true}, + {instance_start_time, StartTime} + ]}); + +db_req(#httpd{path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) -> + send_method_not_allowed(Req, "POST"); + +db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) -> + couch_stats_collector:increment({httpd, bulk_requests}), + {JsonProps} = chttpd:json_body_obj(Req), + DocsArray = couch_util:get_value(<<"docs">>, JsonProps), + case chttpd:header_value(Req, "X-Couch-Full-Commit") of + "true" -> + Options = [full_commit]; + "false" -> + Options = [delay_commit]; + _ -> + Options = [] + end, + case couch_util:get_value(<<"new_edits">>, JsonProps, true) of + true -> + Docs = lists:map( + fun({ObjProps} = JsonObj) -> + Doc = couch_doc:from_json_obj(JsonObj), + validate_attachment_names(Doc), + Id = case Doc#doc.id of + <<>> -> couch_util:new_uuid(); + Id0 -> Id0 + end, + case couch_util:get_value(<<"_rev">>, ObjProps) of + undefined -> + Revs = {0, []}; + Rev -> + {Pos, RevId} = couch_doc:parse_rev(Rev), + Revs = {Pos, [RevId]} + end, + Doc#doc{id=Id,revs=Revs} + end, + DocsArray), + Options2 = + case couch_util:get_value(<<"all_or_nothing">>, JsonProps) of + true -> [all_or_nothing|Options]; + _ -> Options + end, + case ?COUCH:update_docs(Db, Docs, Options2) of + {ok, Results} -> + % output the results + DocResults = lists:zipwith(fun update_doc_result_to_json/2, + Docs, Results), + send_json(Req, 201, DocResults); + {aborted, Errors} -> + ErrorsJson = + lists:map(fun update_doc_result_to_json/1, Errors), + send_json(Req, 417, ErrorsJson) + end; + false -> + Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray], + {ok, Errors} = ?COUCH:update_docs(Db, Docs, Options, replicated_changes), + ErrorsJson = + lists:map(fun update_doc_result_to_json/1, Errors), + send_json(Req, 201, ErrorsJson) + end; +db_req(#httpd{path_parts=[_,<<"_bulk_docs">>]}=Req, _Db) -> + send_method_not_allowed(Req, "POST"); + +db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) -> + {IdsRevs} = chttpd:json_body_obj(Req), + IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs], + + case ?COUCH:purge_docs(Db, IdsRevs2) of + {ok, PurgeSeq, PurgedIdsRevs} -> + PurgedIdsRevs2 = [{Id, couch_doc:rev_to_strs(Revs)} || {Id, Revs} <- PurgedIdsRevs], + send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs2}}]}); + Error -> + throw(Error) + end; + +db_req(#httpd{path_parts=[_,<<"_purge">>]}=Req, _Db) -> + send_method_not_allowed(Req, "POST"); + +db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> + all_docs_view(Req, Db, nil); + +db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> + {Fields} = chttpd:json_body_obj(Req), + Keys = couch_util:get_value(<<"keys">>, Fields, nil), + case Keys of + Keys when is_list(Keys) -> ok; + nil -> ?LOG_DEBUG("POST to _all_docs with no keys member.", []); + _ -> throw({bad_request, "`keys` member must be a array."}) + end, + all_docs_view(Req, Db, Keys); + +db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) -> + send_method_not_allowed(Req, "GET,HEAD,POST"); + +db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) -> + throw(not_implemented), + #view_query_args{ + start_key = StartKey, + limit = Limit, + skip = SkipCount, + direction = Dir + } = QueryArgs = chttpd_view:parse_view_params(Req, nil, map), + + Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"), + chttpd:header_value(Req, "Host")), + {ok, Info} = ?COUCH:get_db_info(Db, Customer), + CurrentEtag = chttpd:make_etag(Info), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + TotalRowCount = couch_util:get_value(doc_count, Info), + FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, + TotalRowCount, #view_fold_helper_funs{ + reduce_count = fun ?COUCH:enum_docs_since_reduce_to_count/1 + }), + StartKey2 = case StartKey of + nil -> 0; + <<>> -> 100000000000; + {} -> 100000000000; + StartKey when is_integer(StartKey) -> StartKey + end, + {ok, FoldResult} = ?COUCH:enum_docs_since(Db, StartKey2, Dir, + fun(DocInfo, Offset, Acc) -> + #doc_info{ + id=Id, + high_seq=Seq, + revs=[#rev_info{rev=Rev,deleted=Deleted} | RestInfo] + } = DocInfo, + ConflictRevs = couch_doc:rev_to_strs( + [Rev1 || #rev_info{deleted=false, rev=Rev1} <- RestInfo]), + DelConflictRevs = couch_doc:rev_to_strs( + [Rev1 || #rev_info{deleted=true, rev=Rev1} <- RestInfo]), + Json = { + [{<<"rev">>, couch_doc:rev_to_str(Rev)}] ++ + case ConflictRevs of + [] -> []; + _ -> [{<<"conflicts">>, ConflictRevs}] + end ++ + case DelConflictRevs of + [] -> []; + _ -> [{<<"deleted_conflicts">>, DelConflictRevs}] + end ++ + case Deleted of + true -> [{<<"deleted">>, true}]; + false -> [] + end + }, + FoldlFun({{Seq, Id}, Json}, Offset, Acc) + end, {Limit, SkipCount, undefined, [], nil}), + chttpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}) + end); + +db_req(#httpd{path_parts=[_,<<"_all_docs_by_seq">>]}=Req, _Db) -> + send_method_not_allowed(Req, "GET,HEAD"); + +db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) -> + {JsonDocIdRevs} = chttpd:json_body_obj(Req), + JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs], + {ok, Results} = ?COUCH:get_missing_revs(Db, JsonDocIdRevs2), + Results2 = [{Id, [couch_doc:rev_to_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results], + send_json(Req, {[ + {missing_revs, {Results2}} + ]}); + +db_req(#httpd{path_parts=[_,<<"_missing_revs">>]}=Req, _Db) -> + send_method_not_allowed(Req, "POST"); + +db_req(#httpd{method='PUT',path_parts=[_,<<"_admins">>]}=Req, + Db) -> + Admins = chttpd:json_body(Req), + ok = ?COUCH:set_admins(Db, Admins), + send_json(Req, {[{<<"ok">>, true}]}); + +db_req(#httpd{method='GET',path_parts=[_,<<"_admins">>]}=Req, Db) -> + send_json(Req, ?COUCH:get_admins(Db)); + +db_req(#httpd{path_parts=[_,<<"_admins">>]}=Req, _Db) -> + send_method_not_allowed(Req, "PUT,GET"); + +db_req(#httpd{method='PUT',path_parts=[_,<<"_revs_limit">>]}=Req, + Db) -> + Limit = chttpd:json_body(Req), + ok = ?COUCH:set_revs_limit(Db, Limit), + send_json(Req, {[{<<"ok">>, true}]}); + +db_req(#httpd{method='GET',path_parts=[_,<<"_revs_limit">>]}=Req, Db) -> + send_json(Req, ?COUCH:get_revs_limit(Db)); + +db_req(#httpd{path_parts=[_,<<"_revs_limit">>]}=Req, _Db) -> + send_method_not_allowed(Req, "PUT,GET"); + +% Special case to enable using an unencoded slash in the URL of design docs, +% as slashes in document IDs must otherwise be URL encoded. +db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name]}=Req, Db) -> + db_doc_req(Req, Db, <<"_design/",Name/binary>>); + +db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name|FileNameParts]}=Req, Db) -> + db_attachment_req(Req, Db, <<"_design/",Name/binary>>, FileNameParts); + + +% Special case to allow for accessing local documents without %2F +% encoding the docid. Throws out requests that don't have the second +% path part or that specify an attachment name. +db_req(#httpd{path_parts=[_DbName, <<"_local">>, Name]}=Req, Db) -> + db_doc_req(Req, Db, <<"_local/", Name/binary>>); + +db_req(#httpd{path_parts=[_DbName, <<"_local">>]}, _Db) -> + throw({bad_request, <<"Missing _local document id.">>}); + +db_req(#httpd{path_parts=[_DbName, <<"_local/">>]}, _Db) -> + throw({bad_request, <<"Missing _local document id.">>}); + +db_req(#httpd{path_parts=[_DbName, <<"_local">> | _Rest]}, _Db) -> + throw({bad_request, <<"_local documents do not accept attachments.">>}); + +db_req(#httpd{path_parts=[_DbName, <<"_local/", _/binary>>, _ | _]}, _Db) -> + throw({bad_request, <<"_local documents do not accept attachments.">>}); + +db_req(#httpd{path_parts=[_, DocId]}=Req, Db) -> + db_doc_req(Req, Db, DocId); + +db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) -> + db_attachment_req(Req, Db, DocId, FileNameParts). + +all_docs_view(Req, Db, Keys) -> + Etag = couch_util:new_uuid(), + QueryArgs = chttpd_view:parse_view_params(Req, nil, map), + chttpd:etag_respond(Req, Etag, fun() -> + {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), + {ok, Total, Result} = ?COUCH:all_docs_view(Resp, Db, Keys, QueryArgs), + send_chunk(Resp, all_docs_final_chunk(Total, Result)), + end_json_response(Resp) + end). + +all_docs_final_chunk(Total, {_, _, undefined, _, nil}) -> + ?JSON_ENCODE({[{total_rows, Total}, {offset, Total}, {rows, []}]}); +all_docs_final_chunk(Total, {_, _, undefined, _, Offset}) -> + ?JSON_ENCODE({[{total_rows, Total}, {offset, Offset}, {rows, []}]}); +all_docs_final_chunk(_, {_, _, _, _, _}) -> + "\r\n]}"; +all_docs_final_chunk(_, Error) -> + throw(Error). + +db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) -> + % check for the existence of the doc to handle the 404 case. + couch_doc_open(Db, DocId, nil, []), + case chttpd:qs_value(Req, "rev") of + undefined -> + update_doc(Req, Db, DocId, {[{<<"_deleted">>,true}]}); + Rev -> + update_doc(Req, Db, DocId, {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]}) + end; + +db_doc_req(#httpd{method='GET'}=Req, Db, DocId) -> + #doc_query_args{ + show = Format, + rev = Rev, + open_revs = Revs, + options = Options + } = parse_doc_query(Req), + case Format of + nil -> + case Revs of + [] -> + Doc = couch_doc_open(Db, DocId, Rev, Options), + DiskEtag = chttpd:doc_etag(Doc), + case Doc#doc.meta of + [] -> + % output etag only when we have no meta + chttpd:etag_respond(Req, DiskEtag, fun() -> + send_json(Req, 200, [{"Etag", DiskEtag}], couch_doc:to_json_obj(Doc, Options)) + end); + _ -> + send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options)) + end; + _ -> + {ok, Results} = ?COUCH:open_revs(Db, DocId, Revs, Options), + {ok, Resp} = start_json_response(Req, 200), + send_chunk(Resp, "["), + % We loop through the docs. The first time through the separator + % is whitespace, then a comma on subsequent iterations. + lists:foldl( + fun(Result, AccSeparator) -> + case Result of + {ok, Doc} -> + JsonDoc = couch_doc:to_json_obj(Doc, Options), + Json = ?JSON_ENCODE({[{ok, JsonDoc}]}), + send_chunk(Resp, AccSeparator ++ Json); + {{not_found, missing}, RevId} -> + Json = ?JSON_ENCODE({[{"missing", RevId}]}), + send_chunk(Resp, AccSeparator ++ Json) + end, + "," % AccSeparator now has a comma + end, + "", Results), + send_chunk(Resp, "]"), + end_json_response(Resp) + end; + _ -> + {DesignName, ShowName} = Format, + chttpd_show:handle_doc_show(Req, DesignName, ShowName, DocId, Db) + end; + +db_doc_req(#httpd{method='POST'}=Req, Db, DocId) -> + couch_doc:validate_docid(DocId), + case chttpd:header_value(Req, "content-type") of + "multipart/form-data" ++ _Rest -> + ok; + _Else -> + throw({bad_ctype, <<"Invalid Content-Type header for form upload">>}) + end, + Form = chttpd:parse_form(Req), + Rev = couch_doc:parse_rev(list_to_binary(couch_util:get_value("_rev", Form))), + {ok, [{ok, Doc}]} = ?COUCH:open_revs(Db, DocId, [Rev], []), + + UpdatedAtts = [ + #att{name=validate_attachment_name(Name), + type=list_to_binary(ContentType), + data=Content} || + {Name, {ContentType, _}, Content} <- + proplists:get_all_values("_attachments", Form) + ], + #doc{atts=OldAtts} = Doc, + OldAtts2 = lists:flatmap( + fun(#att{name=OldName}=Att) -> + case [1 || A <- UpdatedAtts, A#att.name == OldName] of + [] -> [Att]; % the attachment wasn't in the UpdatedAtts, return it + _ -> [] % the attachment was in the UpdatedAtts, drop it + end + end, OldAtts), + NewDoc = Doc#doc{ + atts = UpdatedAtts ++ OldAtts2 + }, + {ok, NewRev} = ?COUCH:update_doc(Db, NewDoc, []), + + send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewRev)) ++ "\""}], {[ + {ok, true}, + {id, DocId}, + {rev, couch_doc:rev_to_str(NewRev)} + ]}); + +db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) -> + couch_doc:validate_docid(DocId), + Json = chttpd:json_body(Req), + case chttpd:qs_value(Req, "batch") of + "ok" -> + % batch + Doc = couch_doc_from_req(Req, DocId, Json), + ok = couch_batch_save:eventually_save_doc(Db#db.name, Doc, Db#db.user_ctx), + send_json(Req, 202, [], {[ + {ok, true}, + {id, DocId} + ]}); + _Normal -> + % normal + DbName = couch_db:name(Db), + Location = absolute_uri(Req, <<"/", DbName/binary, "/", DocId/binary>>), + update_doc(Req, Db, DocId, Json, [{"Location", Location}]) + end; + +db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) -> + SourceRev = + case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of + missing_rev -> nil; + Rev -> Rev + end, + {TargetDocId, TargetRevs} = parse_copy_destination_header(Req), + % open old doc + Doc = couch_doc_open(Db, SourceDocId, SourceRev, []), + % save new doc + {ok, NewTargetRev} = ?COUCH:update_doc(Db, + Doc#doc{id=TargetDocId, revs=TargetRevs}, []), + % respond + send_json(Req, 201, + [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewTargetRev)) ++ "\""}], + update_doc_result_to_json(TargetDocId, {ok, NewTargetRev})); + +db_doc_req(Req, _Db, _DocId) -> + send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY"). + + +update_doc_result_to_json({{Id, Rev}, Error}) -> + {_Code, Err, Msg} = chttpd:error_info(Error), + {[{id, Id}, {rev, couch_doc:rev_to_str(Rev)}, + {error, Err}, {reason, Msg}]}. + +update_doc_result_to_json(#doc{id=DocId}, Result) -> + update_doc_result_to_json(DocId, Result); +update_doc_result_to_json(DocId, {ok, NewRev}) -> + {[{id, DocId}, {rev, couch_doc:rev_to_str(NewRev)}]}; +update_doc_result_to_json(DocId, Error) -> + {_Code, ErrorStr, Reason} = chttpd:error_info(Error), + {[{id, DocId}, {error, ErrorStr}, {reason, Reason}]}. + + +update_doc(Req, Db, DocId, Json) -> + update_doc(Req, Db, DocId, Json, []). + +update_doc(Req, Db, DocId, Json, Headers) -> + #doc{deleted=Deleted} = Doc = couch_doc_from_req(Req, DocId, Json), + + case chttpd:header_value(Req, "X-Couch-Full-Commit") of + "true" -> + Options = [full_commit]; + "false" -> + Options = [delay_commit]; + _ -> + Options = [] + end, + {Status, NewRev} = case ?COUCH:update_doc(Db, Doc, Options) of + {ok, NewRev1} -> {201, NewRev1}; + {accepted, NewRev1} -> {202, NewRev1} + end, + NewRevStr = couch_doc:rev_to_str(NewRev), + ResponseHeaders = [{"Etag", <<"\"", NewRevStr/binary, "\"">>}] ++ Headers, + send_json(Req, if Deleted -> 200; true -> Status end, + ResponseHeaders, {[ + {ok, true}, + {id, DocId}, + {rev, NewRevStr}]}). + +couch_doc_from_req(Req, DocId, Json) -> + Doc = couch_doc:from_json_obj(Json), + validate_attachment_names(Doc), + ExplicitDocRev = + case Doc#doc.revs of + {Start,[RevId|_]} -> {Start, RevId}; + _ -> undefined + end, + case extract_header_rev(Req, ExplicitDocRev) of + missing_rev -> + Revs = {0, []}; + {Pos, Rev} -> + Revs = {Pos, [Rev]} + end, + Doc#doc{id=DocId, revs=Revs}. + + +% Useful for debugging +% couch_doc_open(Db, DocId) -> +% couch_doc_open(Db, DocId, nil, []). + +couch_doc_open(Db, DocId, Rev, Options) -> + case Rev of + nil -> % open most recent rev + case ?COUCH:open_doc(Db, DocId, Options) of + {ok, Doc} -> + Doc; + Error -> + throw(Error) + end; + _ -> % open a specific rev (deletions come back as stubs) + case ?COUCH:open_revs(Db, DocId, [Rev], Options) of + {ok, [{ok, Doc}]} -> + Doc; + {ok, [Else]} -> + throw(Else) + end + end. + +% Attachment request handlers + +db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) -> + FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, FileNameParts),"/")), + #doc_query_args{ + rev=Rev, + options=Options + } = parse_doc_query(Req), + #doc{ + atts=Atts + } = Doc = couch_doc_open(Db, DocId, Rev, Options), + case [A || A <- Atts, A#att.name == FileName] of + [] -> + throw({not_found, "Document is missing attachment"}); + [#att{type=Type, len=Len}=Att] -> + Etag = chttpd:doc_etag(Doc), + chttpd:etag_respond(Req, Etag, fun() -> + {ok, Resp} = start_response_length(Req, 200, [ + {"ETag", Etag}, + {"Cache-Control", "must-revalidate"}, + {"Content-Type", binary_to_list(Type)} + ], integer_to_list(Len)), + couch_doc:att_foldl(Att, fun(BinSegment, _) -> + send(Resp, BinSegment) + end, {ok, Resp}) + end) + end; + + +db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts) + when (Method == 'PUT') or (Method == 'DELETE') -> + FileName = validate_attachment_name( + mochiweb_util:join( + lists:map(fun binary_to_list/1, + FileNameParts),"/")), + + NewAtt = case Method of + 'DELETE' -> + []; + _ -> + [#att{ + name=FileName, + type = case chttpd:header_value(Req,"Content-Type") of + undefined -> + % We could throw an error here or guess by the FileName. + % Currently, just giving it a default. + <<"application/octet-stream">>; + CType -> + list_to_binary(CType) + end, + data = ?COUCH:att_receiver(Req, chttpd:body_length(Req)), + len = case chttpd:header_value(Req,"Content-Length") of + undefined -> + undefined; + Length -> + list_to_integer(Length) + end + }] + end, + + Doc = case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of + missing_rev -> % make the new doc + couch_doc:validate_docid(DocId), + #doc{id=DocId}; + Rev -> + case ?COUCH:open_revs(Db, DocId, [Rev], []) of + {ok, [{ok, Doc0}]} -> Doc0; + {ok, [Error]} -> throw(Error) + end + end, + + #doc{atts=Atts} = Doc, + DocEdited = Doc#doc{ + atts = NewAtt ++ [A || A <- Atts, A#att.name /= FileName] + }, + {ok, UpdatedRev} = ?COUCH:update_doc(Db, DocEdited, []), + DbName = couch_db:name(Db), + + {Status, Headers} = case Method of + 'DELETE' -> + {200, []}; + _ -> + {201, [{"Location", absolute_uri(Req, "/" ++ + binary_to_list(DbName) ++ "/" ++ + binary_to_list(DocId) ++ "/" ++ + binary_to_list(FileName) + )}]} + end, + send_json(Req,Status, Headers, {[ + {ok, true}, + {id, DocId}, + {rev, couch_doc:rev_to_str(UpdatedRev)} + ]}); + +db_attachment_req(Req, _Db, _DocId, _FileNameParts) -> + send_method_not_allowed(Req, "DELETE,GET,HEAD,PUT"). + +parse_doc_format(FormatStr) when is_binary(FormatStr) -> + parse_doc_format(?b2l(FormatStr)); +parse_doc_format(FormatStr) when is_list(FormatStr) -> + SplitFormat = lists:splitwith(fun($/) -> false; (_) -> true end, FormatStr), + case SplitFormat of + {DesignName, [$/ | ShowName]} -> {?l2b(DesignName), ?l2b(ShowName)}; + _Else -> throw({bad_request, <<"Invalid doc format">>}) + end; +parse_doc_format(_BadFormatStr) -> + throw({bad_request, <<"Invalid doc format">>}). + +parse_doc_query(Req) -> + lists:foldl(fun({Key,Value}, Args) -> + case {Key, Value} of + {"attachments", "true"} -> + Options = [attachments | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"meta", "true"} -> + Options = [revs_info, conflicts, deleted_conflicts | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"revs", "true"} -> + Options = [revs | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"local_seq", "true"} -> + Options = [local_seq | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"revs_info", "true"} -> + Options = [revs_info | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"conflicts", "true"} -> + Options = [conflicts | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"deleted_conflicts", "true"} -> + Options = [deleted_conflicts | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"rev", Rev} -> + Args#doc_query_args{rev=couch_doc:parse_rev(Rev)}; + {"open_revs", "all"} -> + Args#doc_query_args{open_revs=all}; + {"open_revs", RevsJsonStr} -> + JsonArray = ?JSON_DECODE(RevsJsonStr), + Args#doc_query_args{open_revs=[couch_doc:parse_rev(Rev) || Rev <- JsonArray]}; + {"show", FormatStr} -> + Args#doc_query_args{show=parse_doc_format(FormatStr)}; + {"r", R} -> + Options = [{r,R} | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + {"w", W} -> + Options = [{w,W} | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; + _Else -> % unknown key value pair, ignore. + Args + end + end, #doc_query_args{}, chttpd:qs(Req)). + +parse_changes_query(Req) -> + lists:foldl(fun({Key, Value}, Args) -> + case {Key, Value} of + {"feed", _} -> + Args#changes_args{feed=Value}; + {"descending", "true"} -> + Args#changes_args{dir=rev}; + {"since", _} -> + Args#changes_args{since=list_to_integer(Value)}; + {"limit", _} -> + Args#changes_args{limit=list_to_integer(Value)}; + {"style", _} -> + Args#changes_args{style=list_to_existing_atom(Value)}; + {"heartbeat", "true"} -> + Args#changes_args{heartbeat=true}; + {"heartbeat", _} -> + Args#changes_args{heartbeat=list_to_integer(Value)}; + {"timeout", _} -> + Args#changes_args{timeout=list_to_integer(Value)}; + {"include_docs", "true"} -> + Args#changes_args{include_docs=true}; + {"filter", _} -> + Args#changes_args{filter=Value}; + _Else -> % unknown key value pair, ignore. + Args + end + end, #changes_args{}, chttpd:qs(Req)). + +extract_header_rev(Req, ExplicitRev) when is_binary(ExplicitRev) or is_list(ExplicitRev)-> + extract_header_rev(Req, couch_doc:parse_rev(ExplicitRev)); +extract_header_rev(Req, ExplicitRev) -> + Etag = case chttpd:header_value(Req, "If-Match") of + undefined -> undefined; + Value -> couch_doc:parse_rev(string:strip(Value, both, $")) + end, + case {ExplicitRev, Etag} of + {undefined, undefined} -> missing_rev; + {_, undefined} -> ExplicitRev; + {undefined, _} -> Etag; + _ when ExplicitRev == Etag -> Etag; + _ -> + throw({bad_request, "Document rev and etag have different values"}) + end. + + +parse_copy_destination_header(Req) -> + Destination = chttpd:header_value(Req, "Destination"), + case re:run(Destination, "\\?", [{capture, none}]) of + nomatch -> + {list_to_binary(Destination), {0, []}}; + match -> + [DocId, RevQs] = re:split(Destination, "\\?", [{return, list}]), + [_RevQueryKey, Rev] = re:split(RevQs, "=", [{return, list}]), + {Pos, RevId} = couch_doc:parse_rev(Rev), + {list_to_binary(DocId), {Pos, [RevId]}} + end. + +validate_attachment_names(Doc) -> + lists:foreach(fun(#att{name=Name}) -> + validate_attachment_name(Name) + end, Doc#doc.atts). + +validate_attachment_name(Name) when is_list(Name) -> + validate_attachment_name(list_to_binary(Name)); +validate_attachment_name(<<"_",_/binary>>) -> + throw({bad_request, <<"Attachment name can't start with '_'">>}); +validate_attachment_name(Name) -> + case is_valid_utf8(Name) of + true -> Name; + false -> throw({bad_request, <<"Attachment name is not UTF-8 encoded">>}) + end. + +%% borrowed from mochijson2:json_bin_is_safe() +is_valid_utf8(<<>>) -> + true; +is_valid_utf8(<>) -> + case C of + $\" -> + false; + $\\ -> + false; + $\b -> + false; + $\f -> + false; + $\n -> + false; + $\r -> + false; + $\t -> + false; + C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF -> + false; + C when C < 16#7f -> + is_valid_utf8(Rest); + _ -> + false + end. diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl new file mode 100644 index 00000000..d096bff9 --- /dev/null +++ b/src/chttpd_external.erl @@ -0,0 +1,166 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_external). + +-export([handle_external_req/2, handle_external_req/3]). +-export([send_external_response/2, json_req_obj/2]). +-export([default_or_content_type/2, parse_external_response/1]). + +-import(chttpd,[send_error/4]). + +-include("chttpd.hrl"). + +% handle_external_req/2 +% for the old type of config usage: +% _external = {chttpd_external, handle_external_req} +% with urls like +% /db/_external/action/design/name +handle_external_req(#httpd{ + path_parts=[_DbName, _External, UrlName | _Path] + }=HttpReq, Db) -> + process_external_req(HttpReq, Db, UrlName); +handle_external_req(#httpd{path_parts=[_, _]}=Req, _Db) -> + send_error(Req, 404, <<"external_server_error">>, <<"No server name specified.">>); +handle_external_req(Req, _) -> + send_error(Req, 404, <<"external_server_error">>, <<"Broken assumption">>). + +% handle_external_req/3 +% for this type of config usage: +% _action = {chttpd_external, handle_external_req, <<"action">>} +% with urls like +% /db/_action/design/name +handle_external_req(HttpReq, Db, Name) -> + process_external_req(HttpReq, Db, Name). + +process_external_req(HttpReq, Db, Name) -> + + Response = couch_external_manager:execute(binary_to_list(Name), + json_req_obj(HttpReq, Db)), + + case Response of + {unknown_external_server, Msg} -> + send_error(HttpReq, 404, <<"external_server_error">>, Msg); + _ -> + send_external_response(HttpReq, Response) + end. + +json_req_obj(#httpd{mochi_req=Req, + method=Verb, + path_parts=Path, + req_body=ReqBody + }, Db) -> + Body = case ReqBody of + undefined -> Req:recv_body(); + Else -> Else + end, + ParsedForm = case Req:get_primary_header_value("content-type") of + "application/x-www-form-urlencoded" ++ _ -> + mochiweb_util:parse_qs(Body); + _ -> + [] + end, + Headers = Req:get(headers), + Hlist = mochiweb_headers:to_list(Headers), + Customer = cloudant_util:customer_name( + Req:get_header_value("X-Cloudant-User"), Req:get_header_value("Host")), + {ok, Info} = ?COUCH:get_db_info(Db, Customer), + + % send correct path to customer - BugzID 6849 + CustomerBin = list_to_binary(Customer), + Len = byte_size(CustomerBin), + FixedPath = case Path of + [<> | Rest] -> + [DbName | Rest]; + NoCustomer -> + NoCustomer + end, + + % add headers... + {[{<<"info">>, {Info}}, + {<<"verb">>, Verb}, + {<<"path">>, FixedPath}, + {<<"query">>, to_json_terms(Req:parse_qs())}, + {<<"headers">>, to_json_terms(Hlist)}, + {<<"body">>, Body}, + {<<"form">>, to_json_terms(ParsedForm)}, + {<<"cookie">>, to_json_terms(Req:parse_cookie())}, + {<<"userCtx">>, couch_util:json_user_ctx(Db)}]}. + +to_json_terms(Data) -> + to_json_terms(Data, []). +to_json_terms([], Acc) -> + {lists:reverse(Acc)}; +to_json_terms([{Key, Value} | Rest], Acc) when is_atom(Key) -> + to_json_terms(Rest, [{list_to_binary(atom_to_list(Key)), list_to_binary(Value)} | Acc]); +to_json_terms([{Key, Value} | Rest], Acc) -> + to_json_terms(Rest, [{list_to_binary(Key), list_to_binary(Value)} | Acc]). + + +send_external_response(#httpd{mochi_req=MochiReq}, Response) -> + #extern_resp_args{ + code = Code, + data = Data, + ctype = CType, + headers = Headers + } = parse_external_response(Response), + Resp = MochiReq:respond({Code, + default_or_content_type(CType, Headers ++ chttpd:server_header()), Data}), + {ok, Resp}. + +parse_external_response({Response}) -> + lists:foldl(fun({Key,Value}, Args) -> + case {Key, Value} of + {"", _} -> + Args; + {<<"code">>, Value} -> + Args#extern_resp_args{code=Value}; + {<<"stop">>, true} -> + Args#extern_resp_args{stop=true}; + {<<"json">>, Value} -> + Args#extern_resp_args{ + data=?JSON_ENCODE(Value), + ctype="application/json"}; + {<<"body">>, Value} -> + Args#extern_resp_args{data=Value, ctype="text/html; charset=utf-8"}; + {<<"base64">>, Value} -> + Args#extern_resp_args{ + data=couch_util:decodeBase64(Value), + ctype="application/binary" + }; + {<<"headers">>, {Headers}} -> + NewHeaders = lists:map(fun({Header, HVal}) -> + {binary_to_list(Header), binary_to_list(HVal)} + end, Headers), + Args#extern_resp_args{headers=NewHeaders}; + _ -> % unknown key + Msg = lists:flatten(io_lib:format("Invalid data from external server: ~p", [{Key, Value}])), + throw({external_response_error, Msg}) + end + end, #extern_resp_args{}, Response). + +default_or_content_type(DefaultContentType, Headers) -> + {ContentType, OtherHeaders} = lists:partition( + fun({HeaderName, _}) -> + HeaderName == "Content-Type" + end, Headers), + + % XXX: What happens if we were passed multiple content types? We add another? + case ContentType of + [{"Content-Type", SetContentType}] -> + TrueContentType = SetContentType; + _Else -> + TrueContentType = DefaultContentType + end, + + HeadersWithContentType = lists:append(OtherHeaders, [{"Content-Type", TrueContentType}]), + HeadersWithContentType. diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl new file mode 100644 index 00000000..8867dfbe --- /dev/null +++ b/src/chttpd_misc.erl @@ -0,0 +1,232 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_misc). + +-export([handle_welcome_req/2,handle_favicon_req/2,handle_utils_dir_req/2, + handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1, + handle_uuids_req/1,handle_config_req/1,handle_log_req/1, + handle_task_status_req/1,handle_sleep_req/1,handle_welcome_req/1, + handle_utils_dir_req/1]). + +-export([increment_update_seq_req/2]). + + +-include("chttpd.hrl"). + +-import(chttpd, + [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, + start_json_response/2,send_chunk/2,end_json_response/1, + start_chunked_response/3, send_error/4]). + +% httpd global handlers + +handle_welcome_req(Req) -> + handle_welcome_req(Req, <<"Welcome">>). + +handle_welcome_req(#httpd{method='GET'}=Req, WelcomeMessage) -> + send_json(Req, {[ + {couchdb, WelcomeMessage}, + {version, <<"0.10.1+">>}, + {cloudant_build, get_version()} + ]}); +handle_welcome_req(Req, _) -> + send_method_not_allowed(Req, "GET,HEAD"). + +get_version() -> + Releases = release_handler:which_releases(), + Version = case [V || {"dbcore", V, _, current} <- Releases] of + [] -> + case [V || {"dbcore", V, _, permanent} <- Releases] of + [] -> + "dev"; + [Permanent] -> + Permanent + end; + [Current] -> + Current + end, + list_to_binary(Version). + +handle_favicon_req(#httpd{method='GET'}=Req, DocumentRoot) -> + chttpd:serve_file(Req, "favicon.ico", DocumentRoot); +handle_favicon_req(Req, _) -> + send_method_not_allowed(Req, "GET,HEAD"). + +handle_utils_dir_req(Req) -> + handle_utils_dir_req(Req, couch_config:get("chttpd", "docroot")). + +handle_utils_dir_req(#httpd{method='GET'}=Req, DocumentRoot) -> + "/" ++ UrlPath = chttpd:path(Req), + case chttpd:partition(UrlPath) of + {_ActionKey, "/", RelativePath} -> + % GET /_utils/path or GET /_utils/ + chttpd:serve_file(Req, RelativePath, DocumentRoot); + {_ActionKey, "", _RelativePath} -> + % GET /_utils + RedirectPath = chttpd:path(Req) ++ "/", + chttpd:send_redirect(Req, RedirectPath) + end; +handle_utils_dir_req(Req, _) -> + send_method_not_allowed(Req, "GET,HEAD"). + +handle_sleep_req(#httpd{method='GET'}=Req) -> + Time = list_to_integer(chttpd:qs_value(Req, "time")), + receive snicklefart -> ok after Time -> ok end, + send_json(Req, {[{ok, true}]}); +handle_sleep_req(Req) -> + send_method_not_allowed(Req, "GET,HEAD"). + +handle_all_dbs_req(#httpd{method='GET'}=Req) -> + Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"), + chttpd:header_value(Req, "Host")), + {ok, DbNames} = fabric:all_dbs(Customer), + send_json(Req, DbNames); +handle_all_dbs_req(Req) -> + send_method_not_allowed(Req, "GET,HEAD"). + + +handle_task_status_req(#httpd{method='GET'}=Req) -> + % convert the list of prop lists to a list of json objects + send_json(Req, [{Props} || Props <- couch_task_status:all()]); +handle_task_status_req(Req) -> + send_method_not_allowed(Req, "GET,HEAD"). + +handle_replicate_req(#httpd{method='POST'}=Req) -> + PostBody = get(post_body), + try ?COUCH:replicate_db(PostBody, Req#httpd.user_ctx) of + {ok, {continuous, RepId}} -> + send_json(Req, 202, {[{ok, true}, {<<"_local_id">>, RepId}]}); + {ok, {JsonResults}} -> + send_json(Req, {[{ok, true} | JsonResults]}); + {error, {Type, Details}} -> + send_json(Req, 500, {[{error, Type}, {reason, Details}]}); + {error, Reason} -> + send_json(Req, 500, {[{error, Reason}]}) + catch + throw:{db_not_found, Msg} -> + send_json(Req, 404, {[{error, db_not_found}, {reason, Msg}]}); + throw:{node_not_connected, Msg} -> + send_json(Req, 404, {[{error, node_not_connected}, {reason, Msg}]}) + end; +handle_replicate_req(Req) -> + send_method_not_allowed(Req, "POST"). + + +handle_restart_req(#httpd{method='POST'}=Req) -> + couch_server_sup:restart_core_server(), + send_json(Req, 200, {[{ok, true}]}); +handle_restart_req(Req) -> + send_method_not_allowed(Req, "POST"). + + +handle_uuids_req(#httpd{method='GET'}=Req) -> + Count = list_to_integer(chttpd:qs_value(Req, "count", "1")), + % generate the uuids + UUIDs = [ couch_util:new_uuid() || _ <- lists:seq(1,Count)], + % send a JSON response + Etag = chttpd:make_etag(UUIDs), + chttpd:etag_respond(Req, Etag, fun() -> + CacheBustingHeaders = [ + {"Date", httpd_util:rfc1123_date()}, + {"Cache-Control", "no-cache"}, + % Past date, ON PURPOSE! + {"Expires", "Fri, 01 Jan 1990 00:00:00 GMT"}, + {"Pragma", "no-cache"}, + {"ETag", Etag} + ], + send_json(Req, 200, CacheBustingHeaders, {[{<<"uuids">>, UUIDs}]}) + end); +handle_uuids_req(Req) -> + send_method_not_allowed(Req, "GET"). + + +% Config request handler + + +% GET /_config/ +% GET /_config +handle_config_req(#httpd{method='GET', path_parts=[_]}=Req) -> + Grouped = lists:foldl(fun({{Section, Key}, Value}, Acc) -> + case dict:is_key(Section, Acc) of + true -> + dict:append(Section, {list_to_binary(Key), list_to_binary(Value)}, Acc); + false -> + dict:store(Section, [{list_to_binary(Key), list_to_binary(Value)}], Acc) + end + end, dict:new(), couch_config:all()), + KVs = dict:fold(fun(Section, Values, Acc) -> + [{list_to_binary(Section), {Values}} | Acc] + end, [], Grouped), + send_json(Req, 200, {KVs}); +% GET /_config/Section +handle_config_req(#httpd{method='GET', path_parts=[_,Section]}=Req) -> + KVs = [{list_to_binary(Key), list_to_binary(Value)} + || {Key, Value} <- couch_config:get(Section)], + send_json(Req, 200, {KVs}); +% PUT /_config/Section/Key +% "value" +handle_config_req(#httpd{method='PUT', path_parts=[_, Section, Key]}=Req) -> + Value = chttpd:json_body(Req), + Persist = chttpd:header_value(Req, "X-Couch-Persist") /= "false", + OldValue = couch_config:get(Section, Key, ""), + ok = couch_config:set(Section, Key, ?b2l(Value), Persist), + send_json(Req, 200, list_to_binary(OldValue)); +% GET /_config/Section/Key +handle_config_req(#httpd{method='GET', path_parts=[_, Section, Key]}=Req) -> + case couch_config:get(Section, Key, null) of + null -> + throw({not_found, unknown_config_value}); + Value -> + send_json(Req, 200, list_to_binary(Value)) + end; +% DELETE /_config/Section/Key +handle_config_req(#httpd{method='DELETE',path_parts=[_,Section,Key]}=Req) -> + Persist = chttpd:header_value(Req, "X-Couch-Persist") /= "false", + case couch_config:get(Section, Key, null) of + null -> + throw({not_found, unknown_config_value}); + OldValue -> + couch_config:delete(Section, Key, Persist), + send_json(Req, 200, list_to_binary(OldValue)) + end; +handle_config_req(Req) -> + send_method_not_allowed(Req, "GET,PUT,DELETE"). + + +% httpd db handlers + +increment_update_seq_req(#httpd{method='POST'}=Req, Db) -> + {ok, NewSeq} = ?COUCH:increment_update_seq(Db), + send_json(Req, {[{ok, true}, + {update_seq, NewSeq} + ]}); +increment_update_seq_req(Req, _Db) -> + send_method_not_allowed(Req, "POST"). + +% httpd log handlers + +handle_log_req(#httpd{method='GET'}=Req) -> + Bytes = list_to_integer(chttpd:qs_value(Req, "bytes", "1000")), + Offset = list_to_integer(chttpd:qs_value(Req, "offset", "0")), + Chunk = couch_log:read(Bytes, Offset), + {ok, Resp} = start_chunked_response(Req, 200, [ + % send a plaintext response + {"Content-Type", "text/plain; charset=utf-8"}, + {"Content-Length", integer_to_list(length(Chunk))} + ]), + send_chunk(Resp, Chunk), + send_chunk(Resp, ""); +handle_log_req(Req) -> + send_method_not_allowed(Req, "GET"). + + diff --git a/src/chttpd_oauth.erl b/src/chttpd_oauth.erl new file mode 100644 index 00000000..58fafbb8 --- /dev/null +++ b/src/chttpd_oauth.erl @@ -0,0 +1,168 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_oauth). +-include("chttpd.hrl"). + +-export([oauth_authentication_handler/1, handle_oauth_req/1, consumer_lookup/2]). + +% OAuth auth handler using per-node user db +oauth_authentication_handler(#httpd{mochi_req=MochiReq}=Req) -> + serve_oauth(Req, fun(URL, Params, Consumer, Signature) -> + AccessToken = couch_util:get_value("oauth_token", Params), + TokenSecret = couch_config:get("oauth_token_secrets", AccessToken), + case oauth:verify(Signature, atom_to_list(MochiReq:get(method)), URL, Params, Consumer, TokenSecret) of + true -> + set_user_ctx(Req, AccessToken); + false -> + Req + end + end, true). + +% Look up the consumer key and get the roles to give the consumer +set_user_ctx(Req, AccessToken) -> + DbName = couch_config:get("chttpd_auth", "authentication_db"), + {ok, _Db} = chttpd_auth:ensure_users_db_exists(?l2b(DbName)), + Name = ?l2b(couch_config:get("oauth_token_users", AccessToken)), + case chttpd_auth:get_user(Name) of + nil -> Req; + User -> + Roles = couch_util:get_value(<<"roles">>, User, []), + Req#httpd{user_ctx=#user_ctx{name=Name, roles=Roles}} + end. + +% OAuth request_token +handle_oauth_req(#httpd{path_parts=[_OAuth, <<"request_token">>], method=Method}=Req) -> + serve_oauth(Req, fun(URL, Params, Consumer, Signature) -> + AccessToken = couch_util:get_value("oauth_token", Params), + TokenSecret = couch_config:get("oauth_token_secrets", AccessToken), + case oauth:verify(Signature, atom_to_list(Method), URL, Params, Consumer, TokenSecret) of + true -> + ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>); + false -> + invalid_signature(Req) + end + end, false); +handle_oauth_req(#httpd{path_parts=[_OAuth, <<"authorize">>]}=Req) -> + {ok, serve_oauth_authorize(Req)}; +handle_oauth_req(#httpd{path_parts=[_OAuth, <<"access_token">>], method='GET'}=Req) -> + serve_oauth(Req, fun(URL, Params, Consumer, Signature) -> + case oauth:token(Params) of + "requestkey" -> + case oauth:verify(Signature, "GET", URL, Params, Consumer, "requestsecret") of + true -> + ok(Req, <<"oauth_token=accesskey&oauth_token_secret=accesssecret">>); + false -> + invalid_signature(Req) + end; + _ -> + chttpd:send_error(Req, 400, <<"invalid_token">>, <<"Invalid OAuth token.">>) + end + end, false); +handle_oauth_req(#httpd{path_parts=[_OAuth, <<"access_token">>]}=Req) -> + chttpd:send_method_not_allowed(Req, "GET"). + +invalid_signature(Req) -> + chttpd:send_error(Req, 400, <<"invalid_signature">>, <<"Invalid signature value.">>). + +% This needs to be protected i.e. force user to login using HTTP Basic Auth or form-based login. +serve_oauth_authorize(#httpd{method=Method}=Req) -> + case Method of + 'GET' -> + % Confirm with the User that they want to authenticate the Consumer + serve_oauth(Req, fun(URL, Params, Consumer, Signature) -> + AccessToken = couch_util:get_value("oauth_token", Params), + TokenSecret = couch_config:get("oauth_token_secrets", AccessToken), + case oauth:verify(Signature, "GET", URL, Params, Consumer, TokenSecret) of + true -> + ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>); + false -> + invalid_signature(Req) + end + end, false); + 'POST' -> + % If the User has confirmed, we direct the User back to the Consumer with a verification code + serve_oauth(Req, fun(URL, Params, Consumer, Signature) -> + AccessToken = couch_util:get_value("oauth_token", Params), + TokenSecret = couch_config:get("oauth_token_secrets", AccessToken), + case oauth:verify(Signature, "POST", URL, Params, Consumer, TokenSecret) of + true -> + %redirect(oauth_callback, oauth_token, oauth_verifier), + ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>); + false -> + invalid_signature(Req) + end + end, false); + _ -> + chttpd:send_method_not_allowed(Req, "GET,POST") + end. + +serve_oauth(#httpd{mochi_req=MochiReq}=Req, Fun, FailSilently) -> + % 1. In the HTTP Authorization header as defined in OAuth HTTP Authorization Scheme. + % 2. As the HTTP POST request body with a content-type of application/x-www-form-urlencoded. + % 3. Added to the URLs in the query part (as defined by [RFC3986] section 3). + AuthHeader = case MochiReq:get_header_value("authorization") of + undefined -> + ""; + Else -> + [Head | Tail] = re:split(Else, "\\s", [{parts, 2}, {return, list}]), + case [string:to_lower(Head) | Tail] of + ["oauth", Rest] -> Rest; + _ -> "" + end + end, + HeaderParams = oauth_uri:params_from_header_string(AuthHeader), + %Realm = couch_util:get_value("realm", HeaderParams), + Params = proplists:delete("realm", HeaderParams) ++ MochiReq:parse_qs(), + ?LOG_DEBUG("OAuth Params: ~p", [Params]), + case couch_util:get_value("oauth_version", Params, "1.0") of + "1.0" -> + case couch_util:get_value("oauth_consumer_key", Params, undefined) of + undefined -> + case FailSilently of + true -> Req; + false -> chttpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer.">>) + end; + ConsumerKey -> + SigMethod = couch_util:get_value("oauth_signature_method", Params), + case consumer_lookup(ConsumerKey, SigMethod) of + none -> + chttpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer (key or signature method).">>); + Consumer -> + Signature = couch_util:get_value("oauth_signature", Params), + URL = chttpd:absolute_uri(Req, MochiReq:get(raw_path)), + Fun(URL, proplists:delete("oauth_signature", Params), + Consumer, Signature) + end + end; + _ -> + chttpd:send_error(Req, 400, <<"invalid_oauth_version">>, <<"Invalid OAuth version.">>) + end. + +consumer_lookup(Key, MethodStr) -> + SignatureMethod = case MethodStr of + "PLAINTEXT" -> plaintext; + "HMAC-SHA1" -> hmac_sha1; + %"RSA-SHA1" -> rsa_sha1; + _Else -> undefined + end, + case SignatureMethod of + undefined -> none; + _SupportedMethod -> + case couch_config:get("oauth_consumer_secrets", Key, undefined) of + undefined -> none; + Secret -> {Key, Secret, SignatureMethod} + end + end. + +ok(#httpd{mochi_req=MochiReq}, Body) -> + {ok, MochiReq:respond({200, [], Body})}. diff --git a/src/chttpd_server.erl b/src/chttpd_server.erl new file mode 100644 index 00000000..77fb5410 --- /dev/null +++ b/src/chttpd_server.erl @@ -0,0 +1,3 @@ +-module(chttpd_server). + +-include("chttpd.hrl"). \ No newline at end of file diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl new file mode 100644 index 00000000..bba05ec1 --- /dev/null +++ b/src/chttpd_show.erl @@ -0,0 +1,496 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_show). + +-export([handle_doc_show_req/2, handle_doc_update_req/2, handle_view_list_req/2, + handle_doc_show/5, handle_view_list/7, start_list_resp/5, + send_list_row/6]). + +-include("chttpd.hrl"). + +-import(chttpd, + [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, + start_json_response/2,send_chunk/2,send_chunked_error/2, + start_chunked_response/3, send_error/4]). + +handle_doc_show_req(#httpd{ + method='GET', + path_parts=[_DbName, _Design, DesignName, _Show, ShowName, DocId] + }=Req, Db) -> + handle_doc_show(Req, DesignName, ShowName, DocId, Db); + +handle_doc_show_req(#httpd{ + path_parts=[_DbName, _Design, DesignName, _Show, ShowName] + }=Req, Db) -> + handle_doc_show(Req, DesignName, ShowName, nil, Db); + +handle_doc_show_req(#httpd{method='GET'}=Req, _Db) -> + send_error(Req, 404, <<"show_error">>, <<"Invalid path.">>); + +handle_doc_show_req(Req, _Db) -> + send_method_not_allowed(Req, "GET,POST,HEAD"). + +handle_doc_update_req(#httpd{ + method = 'PUT', + path_parts=[_DbName, _Design, DesignName, _Update, UpdateName, DocId] + }=Req, Db) -> + DesignId = <<"_design/", DesignName/binary>>, + #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), + Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), + UpdateSrc = couch_util:get_nested_json_value({Props}, [<<"updates">>, UpdateName]), + Doc = try chttpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of + FoundDoc -> FoundDoc + catch + _ -> nil + end, + send_doc_update_response(Lang, UpdateSrc, DocId, Doc, Req, Db); + +handle_doc_update_req(#httpd{ + method = 'POST', + path_parts=[_DbName, _Design, DesignName, _Update, UpdateName] + }=Req, Db) -> + DesignId = <<"_design/", DesignName/binary>>, + #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), + Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), + UpdateSrc = couch_util:get_nested_json_value({Props}, [<<"updates">>, UpdateName]), + send_doc_update_response(Lang, UpdateSrc, nil, nil, Req, Db); + +handle_doc_update_req(#httpd{ + path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName, _DocId] + }=Req, _Db) -> + send_method_not_allowed(Req, "PUT"); + +handle_doc_update_req(#httpd{ + path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName] + }=Req, _Db) -> + send_method_not_allowed(Req, "POST"); + +handle_doc_update_req(Req, _Db) -> + send_error(Req, 404, <<"update_error">>, <<"Invalid path.">>). + + + + +handle_doc_show(Req, DesignName, ShowName, DocId, Db) -> + DesignId = <<"_design/", DesignName/binary>>, + #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), + Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), + ShowSrc = couch_util:get_nested_json_value({Props}, [<<"shows">>, ShowName]), + Doc = case DocId of + nil -> nil; + _ -> + try chttpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of + FoundDoc -> FoundDoc + catch + _ -> nil + end + end, + send_doc_show_response(Lang, ShowSrc, DocId, Doc, Req, Db). + +% view-list request with view and list from same design doc. +handle_view_list_req(#httpd{method='GET', + path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db) -> + handle_view_list(Req, DesignName, ListName, DesignName, ViewName, Db, nil); + +% view-list request with view and list from different design docs. +handle_view_list_req(#httpd{method='GET', + path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewDesignName, ViewName]}=Req, Db) -> + handle_view_list(Req, DesignName, ListName, ViewDesignName, ViewName, Db, nil); + +handle_view_list_req(#httpd{method='GET'}=Req, _Db) -> + send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>); + +handle_view_list_req(#httpd{method='POST', + path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db) -> + ReqBody = chttpd:body(Req), + {Props2} = ?JSON_DECODE(ReqBody), + Keys = couch_util:get_value(<<"keys">>, Props2, nil), + handle_view_list(Req#httpd{req_body=ReqBody}, DesignName, ListName, DesignName, ViewName, Db, Keys); + +handle_view_list_req(Req, _Db) -> + send_method_not_allowed(Req, "GET,POST,HEAD"). + +handle_view_list(Req, ListDesignName, ListName, ViewDesignName, ViewName, Db, Keys) -> + ListDesignId = <<"_design/", ListDesignName/binary>>, + #doc{body={ListProps}} = chttpd_db:couch_doc_open(Db, ListDesignId, nil, []), + if + ViewDesignName == ListDesignName -> + ViewProps = ListProps, + ViewDesignId = ListDesignId; + true -> + ViewDesignId = <<"_design/", ViewDesignName/binary>>, + #doc{body={ViewProps}} = chttpd_db:couch_doc_open(Db, ViewDesignId, nil, []) + end, + + ViewLang = couch_util:get_value(<<"language">>, ViewProps, <<"javascript">>), + ListSrc = couch_util:get_nested_json_value({ListProps}, [<<"lists">>, ListName]), + Group = couch_view_group:design_doc_to_view_group(Db, #doc{id=ViewDesignId, + body={ViewProps}}), + send_view_list_response(ViewLang, ListSrc, ViewName, ViewDesignId, Req, Db, + Group, Keys). + % send_view_list_response(ViewLang, ListSrc, ViewName, ViewDesignId, Req, Db, Keys). + + +send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db, Group, Keys) -> + IsReduce = chttpd_view:get_reduce_type(Req), + ViewType = chttpd_view:extract_view_type(ViewName, Group#group.views, + IsReduce), + QueryArgs = chttpd_view:parse_view_params(Req, Keys, ViewType), + {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), + StartListRespFun = make_map_start_resp_fun(QueryServer, Db), + Etag = couch_util:new_uuid(), + chttpd:etag_respond(Req, Etag, fun() -> + {ok, Total, Result} = ?COUCH:list_view(Req, Db, DesignId, ViewName, + Keys, QueryArgs, QueryServer), + finish_list(Req, QueryServer, Etag, Result, StartListRespFun, Total) + end). + +send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db, Keys) -> + Stale = chttpd_view:get_stale_type(Req), + Reduce = chttpd_view:get_reduce_type(Req), + case ?COUCH:get_map_view(Db, DesignId, ViewName, Stale) of + {ok, View, Group} -> + QueryArgs = chttpd_view:parse_view_params(Req, Keys, map), + output_map_list(Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys); + {not_found, _Reason} -> + case ?COUCH:get_reduce_view(Db, DesignId, ViewName, Stale) of + {ok, ReduceView, Group} -> + case Reduce of + false -> + QueryArgs = chttpd_view:parse_view_params( + Req, Keys, map_red + ), + MapView = ?COUCH:extract_map_view(ReduceView), + output_map_list(Req, Lang, ListSrc, MapView, Group, Db, QueryArgs, Keys); + _ -> + QueryArgs = chttpd_view:parse_view_params( + Req, Keys, reduce + ), + output_reduce_list(Req, Lang, ListSrc, ReduceView, Group, Db, QueryArgs, Keys) + end; + {not_found, Reason} -> + throw({not_found, Reason}) + end + end. + + +output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, nil) -> + #view_query_args{ + limit = Limit, + direction = Dir, + skip = SkipCount, + start_key = StartKey, + start_docid = StartDocId + } = QueryArgs, + {ok, RowCount} = ?COUCH:get_row_count(View), + Start = {StartKey, StartDocId}, + Headers = MReq:get(headers), + Hlist = mochiweb_headers:to_list(Headers), + Accept = couch_util:get_value('Accept', Hlist), + CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + % get the os process here + % pass it into the view fold with closures + {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), + + StartListRespFun = make_map_start_resp_fun(QueryServer, Db), + SendListRowFun = make_map_send_row_fun(QueryServer), + + FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount, + #view_fold_helper_funs{ + reduce_count = fun ?COUCH:reduce_to_count/1, + start_response = StartListRespFun, + send_row = SendListRowFun + }), + FoldAccInit = {Limit, SkipCount, undefined, [], nil}, + {ok, FoldResult} = ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAccInit), + finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount) + end); + +output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys) -> + #view_query_args{ + limit = Limit, + direction = Dir, + skip = SkipCount, + start_docid = StartDocId + } = QueryArgs, + {ok, RowCount} = ?COUCH:get_row_count(View), + Headers = MReq:get(headers), + Hlist = mochiweb_headers:to_list(Headers), + Accept = couch_util:get_value('Accept', Hlist), + CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + % get the os process here + % pass it into the view fold with closures + {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), + + StartListRespFun = make_map_start_resp_fun(QueryServer, Db), + SendListRowFun = make_map_send_row_fun(QueryServer), + + FoldAccInit = {Limit, SkipCount, undefined, [], nil}, + {ok, FoldResult} = lists:foldl( + fun(Key, {ok, FoldAcc}) -> + FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs#view_query_args{ + start_key = Key, + end_key = Key + }, CurrentEtag, Db, RowCount, + #view_fold_helper_funs{ + reduce_count = fun ?COUCH:reduce_to_count/1, + start_response = StartListRespFun, + send_row = SendListRowFun + }), + ?COUCH:view_fold(View, {Key, StartDocId}, Dir, FoldlFun, FoldAcc) + end, {ok, FoldAccInit}, Keys), + finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount) + end). + +make_map_start_resp_fun(QueryServer, Db) -> + fun(Req, Etag, TotalRows, Offset, _Acc) -> + Head = {[{<<"total_rows">>, TotalRows}, {<<"offset">>, Offset}]}, + start_list_resp(QueryServer, Req, Db, Head, Etag) + end. + +make_reduce_start_resp_fun(QueryServer, _Req, Db, _CurrentEtag) -> + fun(Req2, Etag, _Acc) -> + start_list_resp(QueryServer, Req2, Db, {[]}, Etag) + end. + +start_list_resp(QueryServer, Req, Db, Head, Etag) -> + [<<"start">>,Chunks,JsonResp] = couch_query_servers:render_list_head(QueryServer, + Req, Db, Head), + JsonResp2 = apply_etag(JsonResp, Etag), + #extern_resp_args{ + code = Code, + ctype = CType, + headers = ExtHeaders + } = chttpd_external:parse_external_response(JsonResp2), + JsonHeaders = chttpd_external:default_or_content_type(CType, ExtHeaders), + {ok, Resp} = start_chunked_response(Req, Code, JsonHeaders), + {ok, Resp, ?b2l(?l2b(Chunks))}. + +make_map_send_row_fun(QueryServer) -> + fun(Resp, Db, Row, IncludeDocs, RowFront) -> + send_list_row(Resp, QueryServer, Db, Row, RowFront, IncludeDocs) + end. + +make_reduce_send_row_fun(QueryServer, Db) -> + fun(Resp, Row, RowFront) -> + send_list_row(Resp, QueryServer, Db, Row, RowFront, false) + end. + +send_list_row(Resp, QueryServer, Db, Row, RowFront, IncludeDoc) -> + try + [Go,Chunks] = couch_query_servers:render_list_row(QueryServer, Db, Row, IncludeDoc), + Chunk = RowFront ++ ?b2l(?l2b(Chunks)), + send_non_empty_chunk(Resp, Chunk), + case Go of + <<"chunks">> -> + {ok, ""}; + <<"end">> -> + {stop, stop} + end + catch + throw:Error -> + send_chunked_error(Resp, Error), + throw({already_sent, Resp, Error}) + end. + +send_non_empty_chunk(Resp, Chunk) -> + case Chunk of + [] -> ok; + _ -> send_chunk(Resp, Chunk) + end. + +output_reduce_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, nil) -> + #view_query_args{ + limit = Limit, + direction = Dir, + skip = SkipCount, + start_key = StartKey, + start_docid = StartDocId, + end_key = EndKey, + end_docid = EndDocId, + group_level = GroupLevel + } = QueryArgs, + Headers = MReq:get(headers), + Hlist = mochiweb_headers:to_list(Headers), + Accept = couch_util:get_value('Accept', Hlist), + CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + % get the os process here + % pass it into the view fold with closures + {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), + StartListRespFun = make_reduce_start_resp_fun(QueryServer, Req, Db, CurrentEtag), + SendListRowFun = make_reduce_send_row_fun(QueryServer, Db), + + {ok, GroupRowsFun, RespFun} = chttpd_view:make_reduce_fold_funs(Req, + GroupLevel, QueryArgs, CurrentEtag, + #reduce_fold_helper_funs{ + start_response = StartListRespFun, + send_row = SendListRowFun + }), + FoldAccInit = {Limit, SkipCount, undefined, []}, + {ok, FoldResult} = ?COUCH:view_fold_reduce(View, Dir, {StartKey, StartDocId}, + {EndKey, EndDocId}, GroupRowsFun, RespFun, + FoldAccInit), + finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, null) + end); + +output_reduce_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys) -> + #view_query_args{ + limit = Limit, + direction = Dir, + skip = SkipCount, + start_docid = StartDocId, + end_docid = EndDocId, + group_level = GroupLevel + } = QueryArgs, + Headers = MReq:get(headers), + Hlist = mochiweb_headers:to_list(Headers), + Accept = couch_util:get_value('Accept', Hlist), + CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx, Keys}), + + chttpd:etag_respond(Req, CurrentEtag, fun() -> + % get the os process here + % pass it into the view fold with closures + {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), + StartListRespFun = make_reduce_start_resp_fun(QueryServer, Req, Db, CurrentEtag), + SendListRowFun = make_reduce_send_row_fun(QueryServer, Db), + + {ok, GroupRowsFun, RespFun} = chttpd_view:make_reduce_fold_funs(Req, + GroupLevel, QueryArgs, CurrentEtag, + #reduce_fold_helper_funs{ + start_response = StartListRespFun, + send_row = SendListRowFun + }), + FoldAccInit = {Limit, SkipCount, undefined, []}, + {ok, FoldResult} = lists:foldl( + fun(Key, {ok, FoldAcc}) -> + ?COUCH:view_fold_reduce(View, Dir, {Key, StartDocId}, + {Key, EndDocId}, GroupRowsFun, RespFun, FoldAcc) + end, {ok, FoldAccInit}, Keys), + finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, null) + end). + +finish_list(Req, QueryServer, Etag, FoldResult, StartFun, TotalRows) -> + FoldResult2 = case FoldResult of + {Limit, SkipCount, Response, RowAcc} -> + {Limit, SkipCount, Response, RowAcc, nil}; + Else -> + Else + end, + case FoldResult2 of + {_, _, undefined, _, _} -> + {ok, Resp, BeginBody} = + render_head_for_empty_list(StartFun, Req, Etag, TotalRows), + [<<"end">>, Chunks] = couch_query_servers:render_list_tail(QueryServer), + Chunk = BeginBody ++ ?b2l(?l2b(Chunks)), + send_non_empty_chunk(Resp, Chunk); + {_, _, Resp, stop, _} -> + ok; + {_, _, Resp, _, _} -> + [<<"end">>, Chunks] = couch_query_servers:render_list_tail(QueryServer), + send_non_empty_chunk(Resp, ?b2l(?l2b(Chunks))) + end, + couch_query_servers:stop_doc_map(QueryServer), + send_chunk(Resp, []). + + +render_head_for_empty_list(StartListRespFun, Req, Etag, null) -> + StartListRespFun(Req, Etag, []); % for reduce +render_head_for_empty_list(StartListRespFun, Req, Etag, TotalRows) -> + StartListRespFun(Req, Etag, TotalRows, null, []). + +send_doc_show_response(Lang, ShowSrc, DocId, nil, #httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Db) -> + % compute etag with no doc + Headers = MReq:get(headers), + Hlist = mochiweb_headers:to_list(Headers), + Accept = couch_util:get_value('Accept', Hlist), + CurrentEtag = chttpd:make_etag({Lang, ShowSrc, nil, Accept, UserCtx}), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + [<<"resp">>, ExternalResp] = couch_query_servers:render_doc_show(Lang, ShowSrc, + DocId, nil, Req, Db), + JsonResp = apply_etag(ExternalResp, CurrentEtag), + chttpd_external:send_external_response(Req, JsonResp) + end); + +send_doc_show_response(Lang, ShowSrc, DocId, #doc{revs=Revs}=Doc, #httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Db) -> + % calculate the etag + Headers = MReq:get(headers), + Hlist = mochiweb_headers:to_list(Headers), + Accept = couch_util:get_value('Accept', Hlist), + CurrentEtag = chttpd:make_etag({Lang, ShowSrc, Revs, Accept, UserCtx}), + % We know our etag now + chttpd:etag_respond(Req, CurrentEtag, fun() -> + [<<"resp">>, ExternalResp] = couch_query_servers:render_doc_show(Lang, ShowSrc, + DocId, Doc, Req, Db), + JsonResp = apply_etag(ExternalResp, CurrentEtag), + chttpd_external:send_external_response(Req, JsonResp) + end). + +send_doc_update_response(Lang, UpdateSrc, DocId, Doc, Req, Db) -> + case couch_query_servers:render_doc_update(Lang, UpdateSrc, + DocId, Doc, Req, Db) of + [<<"up">>, {NewJsonDoc}, JsonResp] -> + Options = case chttpd:header_value(Req, "X-Couch-Full-Commit", "false") of + "true" -> + [full_commit]; + _ -> + [] + end, + NewDoc = couch_doc:from_json_obj({NewJsonDoc}), + Code = 201, + % todo set location field + {ok, _NewRev} = ?COUCH:update_doc(Db, NewDoc, Options); + [<<"up">>, _Other, JsonResp] -> + Code = 200, + ok + end, + JsonResp2 = json_apply_field({<<"code">>, Code}, JsonResp), + chttpd_external:send_external_response(Req, JsonResp2). + +% Maybe this is in the proplists API +% todo move to couch_util +json_apply_field(H, {L}) -> + json_apply_field(H, L, []). +json_apply_field({Key, NewValue}, [{Key, _OldVal} | Headers], Acc) -> + % drop matching keys + json_apply_field({Key, NewValue}, Headers, Acc); +json_apply_field({Key, NewValue}, [{OtherKey, OtherVal} | Headers], Acc) -> + % something else is next, leave it alone. + json_apply_field({Key, NewValue}, Headers, [{OtherKey, OtherVal} | Acc]); +json_apply_field({Key, NewValue}, [], Acc) -> + % end of list, add ours + {[{Key, NewValue}|Acc]}. + +apply_etag({ExternalResponse}, CurrentEtag) -> + % Here we embark on the delicate task of replacing or creating the + % headers on the JsonResponse object. We need to control the Etag and + % Vary headers. If the external function controls the Etag, we'd have to + % run it to check for a match, which sort of defeats the purpose. + case couch_util:get_value(<<"headers">>, ExternalResponse, nil) of + nil -> + % no JSON headers + % add our Etag and Vary headers to the response + {[{<<"headers">>, {[{<<"Etag">>, CurrentEtag}, {<<"Vary">>, <<"Accept">>}]}} | ExternalResponse]}; + JsonHeaders -> + {[case Field of + {<<"headers">>, JsonHeaders} -> % add our headers + JsonHeadersEtagged = json_apply_field({<<"Etag">>, CurrentEtag}, JsonHeaders), + JsonHeadersVaried = json_apply_field({<<"Vary">>, <<"Accept">>}, JsonHeadersEtagged), + {<<"headers">>, JsonHeadersVaried}; + _ -> % skip non-header fields + Field + end || Field <- ExternalResponse]} + end. + diff --git a/src/chttpd_stats.erl b/src/chttpd_stats.erl new file mode 100644 index 00000000..513a80f3 --- /dev/null +++ b/src/chttpd_stats.erl @@ -0,0 +1,62 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_stats). +-include("chttpd.hrl"). +-include("../../couch/src/couch_stats.hrl"). + +-export([handle_stats_req/1]). +-import(chttpd, + [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, + start_json_response/2,send_chunk/2,end_json_response/1, + start_chunked_response/3, send_error/4]). + +-define(b2a(V), list_to_atom(binary_to_list(V))). + +-record(stats_query_args, { + range='0', + flush=false +}). + +handle_stats_req(#httpd{method='GET', path_parts=[_]}=Req) -> + send_json(Req, couch_stats_aggregator:all()); + +handle_stats_req(#httpd{method='GET', path_parts=[_Stats, Module, Key]}=Req) -> + #stats_query_args{ + range=Range, + flush=Flush + } = parse_stats_query(Req), + + case Flush of + true -> + couch_stats_aggregator:time_passed(); + _ -> ok + end, + + Stats = couch_stats_aggregator:get_json({?b2a(Module), ?b2a(Key)}, Range), + Response = {[{Module, {[{Key, Stats}]}}]}, + send_json(Req, Response); + +handle_stats_req(Req) -> + send_method_not_allowed(Req, "GET"). + +parse_stats_query(Req) -> + lists:foldl(fun({Key,Value}, Args) -> + case {Key, Value} of + {"range", Range} -> + Args#stats_query_args{range=list_to_atom(Range)}; + {"flush", "true"} -> + Args#stats_query_args{flush=true}; + _Else -> % unknown key value pair, ignore. + Args + end + end, #stats_query_args{}, chttpd:qs(Req)). diff --git a/src/chttpd_sup.erl b/src/chttpd_sup.erl new file mode 100644 index 00000000..c710ec37 --- /dev/null +++ b/src/chttpd_sup.erl @@ -0,0 +1,15 @@ +-module(chttpd_sup). +-behaviour(supervisor). +-export([init/1]). + +-export([start_link/1]). + +-include("chttpd.hrl"). + +start_link(Args) -> + supervisor:start_link({local,?MODULE}, ?MODULE, Args). + +init([]) -> + Mod = chttpd, + Spec = {Mod, {Mod,start_link,[]}, permanent, 100, worker, [Mod]}, + {ok, {{one_for_one, 3, 10}, [Spec]}}. diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl new file mode 100644 index 00000000..c9fb1c9f --- /dev/null +++ b/src/chttpd_view.erl @@ -0,0 +1,738 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_view). +-include("chttpd.hrl"). + +-export([handle_view_req/2,handle_temp_view_req/2,handle_db_view_req/2]). + +-export([get_stale_type/1, get_reduce_type/1, parse_view_params/3]). +-export([make_view_fold_fun/6, finish_view_fold/3, view_row_obj/3]). +-export([view_group_etag/2, view_group_etag/3, make_reduce_fold_funs/5]). +-export([design_doc_view/5, parse_bool_param/1, extract_view_type/3]). + +-import(chttpd, + [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,send_chunk/2, + start_json_response/2, start_json_response/3, end_json_response/1, + send_chunked_error/2]). + +design_doc_view(Req, Db, Id, ViewName, Keys) -> + DesignId = <<"_design/", Id/binary>>, + {_ViewGroup, QueryArgs} = case ?COUCH:open_doc(Db, DesignId, []) of + {ok, Doc} -> + Group = couch_view_group:design_doc_to_view_group(Db, Doc), + IsReduce = get_reduce_type(Req), + ViewType = extract_view_type(ViewName, Group#group.views, IsReduce), + {Group, parse_view_params(Req, Keys, ViewType)}; + {not_found, Reason} -> + throw({not_found, Reason}) + end, + % this etag is wrong as current_seq == 0 right now, so no caching allowed + % Etag = view_group_etag(ViewGroup, Db, Keys), + Etag = couch_util:new_uuid(), + couch_stats_collector:increment({httpd, view_reads}), + chttpd:etag_respond(Req, Etag, fun() -> + {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), + case ?COUCH:design_view(Resp, Db, DesignId, ViewName, Keys, QueryArgs) of + {ok, Total, Result} -> + send_chunk(Resp, final_chunk(Total, Result)), + end_json_response(Resp); + {ok, Resp} -> + {ok, Resp} + end + end). + +extract_view_type(_ViewName, [], _IsReduce) -> + throw({not_found, missing_named_view}); +extract_view_type(ViewName, [View|Rest], IsReduce) -> + case lists:member(ViewName, [Name || {Name, _} <- View#view.reduce_funs]) of + true -> + if IsReduce -> reduce; true -> red_map end; + false -> + case lists:member(ViewName, View#view.map_names) of + true -> map; + false -> extract_view_type(ViewName, Rest, IsReduce) + end + end. + +final_chunk(Total, {_, _, undefined, _, nil}) -> + ?JSON_ENCODE({[{total_rows, Total}, {offset, Total}, {rows, []}]}); +final_chunk(Total, {_, _, undefined, _, Offset}) -> + ?JSON_ENCODE({[{total_rows, Total}, {offset, Offset}, {rows, []}]}); +final_chunk(_, {_, _, _, _, _}) -> + "\r\n]}"; +final_chunk(_, Error) -> + throw(Error). + +handle_view_req(#httpd{method='GET', + path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) -> + design_doc_view(Req, Db, DName, ViewName, nil); + +handle_view_req(#httpd{method='POST', + path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) -> + {Fields} = chttpd:json_body_obj(Req), + case couch_util:get_value(<<"keys">>, Fields, nil) of + nil -> + Fmt = "POST to view ~p/~p in database ~p with no keys member.", + ?LOG_DEBUG(Fmt, [DName, ViewName, Db]), + design_doc_view(Req, Db, DName, ViewName, nil); + Keys when is_list(Keys) -> + design_doc_view(Req, Db, DName, ViewName, Keys); + _ -> + throw({bad_request, "`keys` member must be a array."}) + end; + +handle_view_req(Req, _Db) -> + send_method_not_allowed(Req, "GET,POST,HEAD"). + +handle_db_view_req(#httpd{method='GET', + path_parts=[_Db, _View, DName, ViewName]}=Req, Db) -> + QueryArgs = chttpd_view:parse_view_params(Req, nil, nil), + #view_query_args{ + list = ListName + } = QueryArgs, + ?LOG_DEBUG("ici ~p", [ListName]), + case ListName of + nil -> chttpd_view:design_doc_view(Req, Db, DName, ViewName, nil); + _ -> + chttpd_show:handle_view_list(Req, DName, ListName, DName, ViewName, Db, nil) + end; + +handle_db_view_req(#httpd{method='POST', + path_parts=[_Db, _View, DName, ViewName]}=Req, Db) -> + QueryArgs = chttpd_view:parse_view_params(Req, nil, nil), + #view_query_args{ + list = ListName + } = QueryArgs, + case ListName of + nil -> + {Fields} = chttpd:json_body_obj(Req), + case couch_util:get_value(<<"keys">>, Fields, nil) of + nil -> + Fmt = "POST to view ~p/~p in database ~p with no keys member.", + ?LOG_DEBUG(Fmt, [DName, ViewName, Db]), + chttpd_view:design_doc_view(Req, Db, DName, ViewName, nil); + Keys when is_list(Keys) -> + chttpd_view:design_doc_view(Req, Db, DName, ViewName, Keys); + _ -> + throw({bad_request, "`keys` member must be a array."}) + end; + _ -> + ReqBody = chttpd:body(Req), + {Props2} = ?JSON_DECODE(ReqBody), + Keys = couch_util:get_value(<<"keys">>, Props2, nil), + chttpd_show:handle_view_list(Req#httpd{req_body=ReqBody}, + DName, ListName, DName, ViewName, Db, Keys) + end; + +handle_db_view_req(Req, _Db) -> + send_method_not_allowed(Req, "GET,POST,HEAD"). + +handle_temp_view_req(#httpd{method='POST'}=Req, Db) -> + throw(not_implemented), + couch_stats_collector:increment({httpd, temporary_view_reads}), + {Props} = chttpd:json_body_obj(Req), + Language = couch_util:get_value(<<"language">>, Props, <<"javascript">>), + {DesignOptions} = couch_util:get_value(<<"options">>, Props, {[]}), + MapSrc = couch_util:get_value(<<"map">>, Props), + Keys = couch_util:get_value(<<"keys">>, Props, nil), + case couch_util:get_value(<<"reduce">>, Props, null) of + null -> + QueryArgs = parse_view_params(Req, Keys, map), + {ok, View, Group} = ?COUCH:get_temp_map_view(Db, Language, + DesignOptions, MapSrc), + output_map_view(Req, View, Group, Db, QueryArgs, Keys); + RedSrc -> + QueryArgs = parse_view_params(Req, Keys, reduce), + {ok, View, Group} = ?COUCH:get_temp_reduce_view(Db, Language, + DesignOptions, MapSrc, RedSrc), + output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) + end; + +handle_temp_view_req(Req, _Db) -> + send_method_not_allowed(Req, "POST"). + +output_map_view(Req, View, Group, Db, QueryArgs, nil) -> + #view_query_args{ + limit = Limit, + direction = Dir, + skip = SkipCount, + start_key = StartKey, + start_docid = StartDocId + } = QueryArgs, + CurrentEtag = view_group_etag(Group, Db), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + {ok, RowCount} = ?COUCH:get_row_count(View), + Start = {StartKey, StartDocId}, + FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount, #view_fold_helper_funs{reduce_count=fun ?COUCH:reduce_to_count/1}), + FoldAccInit = {Limit, SkipCount, undefined, [], nil}, + FoldResult = ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAccInit), + finish_view_fold(Req, RowCount, FoldResult) + end); + +output_map_view(Req, View, Group, Db, QueryArgs, Keys) -> + #view_query_args{ + limit = Limit, + direction = Dir, + skip = SkipCount, + start_docid = StartDocId + } = QueryArgs, + CurrentEtag = view_group_etag(Group, Db, Keys), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + {ok, RowCount} = ?COUCH:get_row_count(View), + FoldAccInit = {Limit, SkipCount, undefined, [], nil}, + FoldResult = lists:foldl( + fun(Key, {ok, FoldAcc}) -> + Start = {Key, StartDocId}, + FoldlFun = make_view_fold_fun(Req, + QueryArgs#view_query_args{ + start_key = Key, + end_key = Key + }, CurrentEtag, Db, RowCount, + #view_fold_helper_funs{ + reduce_count = fun ?COUCH:reduce_to_count/1 + }), + ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAcc) + end, {ok, FoldAccInit}, Keys), + finish_view_fold(Req, RowCount, FoldResult) + end). + +output_reduce_view(Req, Db, View, Group, QueryArgs, nil) -> + #view_query_args{ + start_key = StartKey, + end_key = EndKey, + limit = Limit, + skip = Skip, + direction = Dir, + start_docid = StartDocId, + end_docid = EndDocId, + group_level = GroupLevel + } = QueryArgs, + CurrentEtag = view_group_etag(Group, Db), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}), + FoldAccInit = {Limit, Skip, undefined, []}, + {ok, {_, _, Resp, _}} = ?COUCH:view_fold_reduce(View, Dir, {StartKey, StartDocId}, + {EndKey, EndDocId}, GroupRowsFun, RespFun, FoldAccInit), + finish_reduce_fold(Req, Resp) + end); + +output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) -> + #view_query_args{ + limit = Limit, + skip = Skip, + direction = Dir, + start_docid = StartDocId, + end_docid = EndDocId, + group_level = GroupLevel + } = QueryArgs, + CurrentEtag = view_group_etag(Group, Db), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}), + {Resp, _RedAcc3} = lists:foldl( + fun(Key, {Resp, RedAcc}) -> + % run the reduce once for each key in keys, with limit etc reapplied for each key + FoldAccInit = {Limit, Skip, Resp, RedAcc}, + {_, {_, _, Resp2, RedAcc2}} = ?COUCH:view_fold_reduce(View, Dir, {Key, StartDocId}, + {Key, EndDocId}, GroupRowsFun, RespFun, FoldAccInit), + % Switch to comma + {Resp2, RedAcc2} + end, + {undefined, []}, Keys), % Start with no comma + finish_reduce_fold(Req, Resp) + end). + +reverse_key_default(nil) -> {}; +reverse_key_default({}) -> nil; +reverse_key_default(Key) -> Key. + +get_stale_type(Req) -> + list_to_atom(chttpd:qs_value(Req, "stale", "nil")). + +get_reduce_type(Req) -> + list_to_atom(chttpd:qs_value(Req, "reduce", "true")). + +parse_view_params(Req, Keys, ViewType) -> + QueryList = chttpd:qs(Req), + QueryParams = + lists:foldl(fun({K, V}, Acc) -> + parse_view_param(K, V) ++ Acc + end, [], QueryList), + IsMultiGet = case Keys of + nil -> false; + _ -> true + end, + Args = #view_query_args{ + view_type=ViewType, + multi_get=IsMultiGet + }, + QueryArgs = lists:foldl(fun({K, V}, Args2) -> + validate_view_query(K, V, Args2) + end, Args, lists:reverse(QueryParams)), % Reverse to match QS order. + + GroupLevel = QueryArgs#view_query_args.group_level, + case {ViewType, GroupLevel, IsMultiGet} of + {reduce, exact, true} -> + QueryArgs; + {reduce, _, false} -> + QueryArgs; + {reduce, _, _} -> + Msg = <<"Multi-key fetchs for reduce " + "view must include `group=true`">>, + throw({query_parse_error, Msg}); + _ -> + QueryArgs + end, + QueryArgs. + +parse_view_param("", _) -> + []; +parse_view_param("key", Value) -> + JsonKey = ?JSON_DECODE(Value), + [{start_key, JsonKey}, {end_key, JsonKey}]; +parse_view_param("startkey_docid", Value) -> + [{start_docid, ?l2b(Value)}]; +parse_view_param("endkey_docid", Value) -> + [{end_docid, ?l2b(Value)}]; +parse_view_param("startkey", Value) -> + [{start_key, ?JSON_DECODE(Value)}]; +parse_view_param("endkey", Value) -> + [{end_key, ?JSON_DECODE(Value)}]; +parse_view_param("limit", Value) -> + [{limit, parse_positive_int_param(Value)}]; +parse_view_param("count", _Value) -> + throw({query_parse_error, <<"Query parameter 'count' is now 'limit'.">>}); +parse_view_param("stale", "ok") -> + [{stale, ok}]; +parse_view_param("stale", _Value) -> + throw({query_parse_error, <<"stale only available as stale=ok">>}); +parse_view_param("update", _Value) -> + throw({query_parse_error, <<"update=false is now stale=ok">>}); +parse_view_param("descending", Value) -> + [{descending, parse_bool_param(Value)}]; +parse_view_param("skip", Value) -> + [{skip, parse_int_param(Value)}]; +parse_view_param("group", Value) -> + case parse_bool_param(Value) of + true -> [{group_level, exact}]; + false -> [{group_level, 0}] + end; +parse_view_param("group_level", Value) -> + [{group_level, parse_positive_int_param(Value)}]; +parse_view_param("inclusive_end", Value) -> + [{inclusive_end, parse_bool_param(Value)}]; +parse_view_param("reduce", Value) -> + [{reduce, parse_bool_param(Value)}]; +parse_view_param("include_docs", Value) -> + [{include_docs, parse_bool_param(Value)}]; +parse_view_param("list", Value) -> + [{list, ?l2b(Value)}]; +parse_view_param("callback", _) -> + []; % Verified in the JSON response functions +parse_view_param("show_total_rows", Value) -> + [{show_total_rows, parse_bool_param(Value)}]; +parse_view_param(Key, Value) -> + [{extra, {Key, Value}}]. + +validate_view_query(start_key, Value, Args) -> + case Args#view_query_args.multi_get of + true -> + Msg = <<"Query parameter `start_key` is " + "not compatiible with multi-get">>, + throw({query_parse_error, Msg}); + _ -> + Args#view_query_args{start_key=Value} + end; +validate_view_query(start_docid, Value, Args) -> + Args#view_query_args{start_docid=Value}; +validate_view_query(end_key, Value, Args) -> + case Args#view_query_args.multi_get of + true-> + Msg = <<"Query paramter `end_key` is " + "not compatibile with multi-get">>, + throw({query_parse_error, Msg}); + _ -> + Args#view_query_args{end_key=Value} + end; +validate_view_query(end_docid, Value, Args) -> + Args#view_query_args{end_docid=Value}; +validate_view_query(limit, Value, Args) -> + Args#view_query_args{limit=Value}; +validate_view_query(list, Value, Args) -> + Args#view_query_args{list=Value}; +validate_view_query(stale, Value, Args) -> + Args#view_query_args{stale=Value}; +validate_view_query(descending, true, Args) -> + case Args#view_query_args.direction of + rev -> Args; % Already reversed + fwd -> + Args#view_query_args{ + direction = rev, + start_key = + reverse_key_default(Args#view_query_args.start_key), + start_docid = + reverse_key_default(Args#view_query_args.start_docid), + end_key = + reverse_key_default(Args#view_query_args.end_key), + end_docid = + reverse_key_default(Args#view_query_args.end_docid) + } + end; +validate_view_query(descending, false, Args) -> + Args; % Ignore default condition +validate_view_query(skip, Value, Args) -> + Args#view_query_args{skip=Value}; +validate_view_query(group_level, Value, Args) -> + case Args#view_query_args.view_type of + reduce -> + Args#view_query_args{group_level=Value}; + _ -> + Msg = <<"Invalid URL parameter 'group' or " + " 'group_level' for non-reduce view.">>, + throw({query_parse_error, Msg}) + end; +validate_view_query(inclusive_end, Value, Args) -> + Args#view_query_args{inclusive_end=Value}; +validate_view_query(reduce, _, Args) -> + case Args#view_query_args.view_type of + map -> + Msg = <<"Invalid URL parameter `reduce` for map view.">>, + throw({query_parse_error, Msg}); + _ -> + Args + end; +validate_view_query(include_docs, true, Args) -> + case Args#view_query_args.view_type of + reduce -> + Msg = <<"Query paramter `include_docs` " + "is invalid for reduce views.">>, + throw({query_parse_error, Msg}); + _ -> + Args#view_query_args{include_docs=true} + end; +validate_view_query(include_docs, _Value, Args) -> + Args; +validate_view_query(show_total_rows, false, Args) -> + Args#view_query_args{show_total_rows=false}; +validate_view_query(show_total_rows, _Value, Args) -> + Args; +validate_view_query(extra, _Value, Args) -> + Args. + +make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) -> + #view_query_args{ + end_key = EndKey, + end_docid = EndDocId, + inclusive_end = InclusiveEnd, + direction = Dir + } = QueryArgs, + + #view_fold_helper_funs{ + passed_end = PassedEndFun, + start_response = StartRespFun, + send_row = SendRowFun, + reduce_count = ReduceCountFun + } = apply_default_helper_funs(HelperFuns, + {Dir, EndKey, EndDocId, InclusiveEnd}), + + #view_query_args{ + include_docs = IncludeDocs + } = QueryArgs, + + fun({{Key, DocId}, Value}, OffsetReds, {AccLimit, AccSkip, Resp, RowFunAcc, + OffsetAcc}) -> + PassedEnd = PassedEndFun(Key, DocId), + case {PassedEnd, AccLimit, AccSkip, Resp} of + {true, _, _, _} -> + % The stop key has been passed, stop looping. + % We may need offset so calcluate it here. + % Checking Resp is an optimization that tells + % us its already been calculated (and sent). + NewOffset = case Resp of + undefined -> ReduceCountFun(OffsetReds); + _ -> nil + end, + {stop, {AccLimit, AccSkip, Resp, RowFunAcc, NewOffset}}; + {_, 0, _, _} -> + % we've done "limit" rows, stop foldling + {stop, {0, 0, Resp, RowFunAcc, OffsetAcc}}; + {_, _, AccSkip, _} when AccSkip > 0 -> + % just keep skipping + {ok, {AccLimit, AccSkip - 1, Resp, RowFunAcc, OffsetAcc}}; + {_, _, _, undefined} -> + % rendering the first row, first we start the response + Offset = ReduceCountFun(OffsetReds), + {ok, Resp2, RowFunAcc0} = StartRespFun(Req, Etag, + TotalViewCount, Offset, RowFunAcc), + {Go, RowFunAcc2} = SendRowFun(Resp2, Db, {{Key, DocId}, Value}, + IncludeDocs, RowFunAcc0), + {Go, {AccLimit - 1, 0, Resp2, RowFunAcc2, Offset}}; + {_, AccLimit, _, Resp} when (AccLimit > 0) -> + % rendering all other rows + {Go, RowFunAcc2} = SendRowFun(Resp, Db, {{Key, DocId}, Value}, + IncludeDocs, RowFunAcc), + {Go, {AccLimit - 1, 0, Resp, RowFunAcc2, OffsetAcc}} + end + end. + +make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) -> + #reduce_fold_helper_funs{ + start_response = StartRespFun, + send_row = SendRowFun + } = apply_default_helper_funs(HelperFuns), + + GroupRowsFun = + fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 -> + true; + ({Key1,_}, {Key2,_}) + when is_integer(GroupLevel) and is_list(Key1) and is_list(Key2) -> + lists:sublist(Key1, GroupLevel) == lists:sublist(Key2, GroupLevel); + ({Key1,_}, {Key2,_}) -> + Key1 == Key2 + end, + + RespFun = fun + (_Key, _Red, {AccLimit, AccSkip, Resp, RowAcc}) when AccSkip > 0 -> + % keep skipping + {ok, {AccLimit, AccSkip - 1, Resp, RowAcc}}; + (_Key, _Red, {0, _AccSkip, Resp, RowAcc}) -> + % we've exhausted limit rows, stop + {stop, {0, _AccSkip, Resp, RowAcc}}; + + (_Key, Red, {AccLimit, 0, undefined, RowAcc0}) when GroupLevel == 0 -> + % we haven't started responding yet and group=false + {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0), + {Go, RowAcc2} = SendRowFun(Resp2, {null, Red}, RowAcc), + {Go, {AccLimit - 1, 0, Resp2, RowAcc2}}; + (_Key, Red, {AccLimit, 0, Resp, RowAcc}) when GroupLevel == 0 -> + % group=false but we've already started the response + {Go, RowAcc2} = SendRowFun(Resp, {null, Red}, RowAcc), + {Go, {AccLimit - 1, 0, Resp, RowAcc2}}; + + (Key, Red, {AccLimit, 0, undefined, RowAcc0}) + when is_integer(GroupLevel), is_list(Key) -> + % group_level and we haven't responded yet + {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0), + {Go, RowAcc2} = SendRowFun(Resp2, {lists:sublist(Key, GroupLevel), Red}, RowAcc), + {Go, {AccLimit - 1, 0, Resp2, RowAcc2}}; + (Key, Red, {AccLimit, 0, Resp, RowAcc}) + when is_integer(GroupLevel), is_list(Key) -> + % group_level and we've already started the response + {Go, RowAcc2} = SendRowFun(Resp, {lists:sublist(Key, GroupLevel), Red}, RowAcc), + {Go, {AccLimit - 1, 0, Resp, RowAcc2}}; + + (Key, Red, {AccLimit, 0, undefined, RowAcc0}) -> + % group=true and we haven't responded yet + {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0), + {Go, RowAcc2} = SendRowFun(Resp2, {Key, Red}, RowAcc), + {Go, {AccLimit - 1, 0, Resp2, RowAcc2}}; + (Key, Red, {AccLimit, 0, Resp, RowAcc}) -> + % group=true and we've already started the response + {Go, RowAcc2} = SendRowFun(Resp, {Key, Red}, RowAcc), + {Go, {AccLimit - 1, 0, Resp, RowAcc2}} + end, + {ok, GroupRowsFun, RespFun}. + +apply_default_helper_funs(#view_fold_helper_funs{ + passed_end = PassedEnd, + start_response = StartResp, + send_row = SendRow +}=Helpers, {Dir, EndKey, EndDocId, InclusiveEnd}) -> + PassedEnd2 = case PassedEnd of + undefined -> make_passed_end_fun(Dir, EndKey, EndDocId, InclusiveEnd); + _ -> PassedEnd + end, + + StartResp2 = case StartResp of + undefined -> fun json_view_start_resp/5; + _ -> StartResp + end, + + SendRow2 = case SendRow of + undefined -> fun send_json_view_row/5; + _ -> SendRow + end, + + Helpers#view_fold_helper_funs{ + passed_end = PassedEnd2, + start_response = StartResp2, + send_row = SendRow2 + }. + +apply_default_helper_funs(#reduce_fold_helper_funs{ + start_response = StartResp, + send_row = SendRow +}=Helpers) -> + StartResp2 = case StartResp of + undefined -> fun json_reduce_start_resp/3; + _ -> StartResp + end, + + SendRow2 = case SendRow of + undefined -> fun send_json_reduce_row/3; + _ -> SendRow + end, + + Helpers#reduce_fold_helper_funs{ + start_response = StartResp2, + send_row = SendRow2 + }. + +make_passed_end_fun(fwd, EndKey, EndDocId, InclusiveEnd) -> + case InclusiveEnd of + true -> + fun(ViewKey, ViewId) -> + couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId]) + end; + false -> + fun + (ViewKey, _ViewId) when ViewKey == EndKey -> + true; + (ViewKey, ViewId) -> + couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId]) + end + end; + +make_passed_end_fun(rev, EndKey, EndDocId, InclusiveEnd) -> + case InclusiveEnd of + true -> + fun(ViewKey, ViewId) -> + couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId]) + end; + false-> + fun + (ViewKey, _ViewId) when ViewKey == EndKey -> + true; + (ViewKey, ViewId) -> + couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId]) + end + end. + +json_view_start_resp(Req, Etag, TotalViewCount, Offset, _Acc) -> + {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]), + BeginBody = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n", + [TotalViewCount, Offset]), + {ok, Resp, BeginBody}. + +send_json_view_row(Resp, Db, {{Key, DocId}, Value}, IncludeDocs, RowFront) -> + JsonObj = view_row_obj(Db, {{Key, DocId}, Value}, IncludeDocs), + send_chunk(Resp, RowFront ++ ?JSON_ENCODE(JsonObj)), + {ok, ",\r\n"}. + +json_reduce_start_resp(Req, Etag, _Acc0) -> + {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]), + {ok, Resp, "{\"rows\":[\r\n"}. + +send_json_reduce_row(Resp, {Key, Value}, RowFront) -> + send_chunk(Resp, RowFront ++ ?JSON_ENCODE({[{key, Key}, {value, Value}]})), + {ok, ",\r\n"}. + +view_group_etag(Group, Db) -> + view_group_etag(Group, Db, nil). + +view_group_etag(#group{sig=Sig,current_seq=CurrentSeq}, _Db, Extra) -> + % ?LOG_ERROR("Group ~p",[Group]), + % This is not as granular as it could be. + % If there are updates to the db that do not effect the view index, + % they will change the Etag. For more granular Etags we'd need to keep + % track of the last Db seq that caused an index change. + chttpd:make_etag({Sig, CurrentSeq, Extra}). + +% the view row has an error +view_row_obj(_Db, {{Key, error}, Value}, _IncludeDocs) -> + {[{key, Key}, {error, Value}]}; +% include docs in the view output +view_row_obj(Db, {{Key, DocId}, {Props}}, true) -> + Rev = case couch_util:get_value(<<"_rev">>, Props) of + undefined -> + nil; + Rev0 -> + couch_doc:parse_rev(Rev0) + end, + view_row_with_doc(Db, {{Key, DocId}, {Props}}, Rev); +view_row_obj(Db, {{Key, DocId}, Value}, true) -> + view_row_with_doc(Db, {{Key, DocId}, Value}, nil); +% the normal case for rendering a view row +view_row_obj(_Db, {{Key, DocId}, Value}, _IncludeDocs) -> + {[{id, DocId}, {key, Key}, {value, Value}]}. + +view_row_with_doc(Db, {{Key, DocId}, Value}, Rev) -> + ?LOG_DEBUG("Include Doc: ~p ~p", [DocId, Rev]), + case (catch chttpd_db:couch_doc_open(Db, DocId, Rev, [])) of + {{not_found, missing}, _RevId} -> + {[{id, DocId}, {key, Key}, {value, Value}, {error, missing}]}; + {not_found, missing} -> + {[{id, DocId}, {key, Key}, {value, Value}, {error, missing}]}; + {not_found, deleted} -> + {[{id, DocId}, {key, Key}, {value, Value}]}; + Doc -> + JsonDoc = couch_doc:to_json_obj(Doc, []), + {[{id, DocId}, {key, Key}, {value, Value}, {doc, JsonDoc}]} + end. + +finish_view_fold(Req, TotalRows, FoldResult) -> + case FoldResult of + {ok, {_, _, undefined, _, Offset}} -> + % nothing found in the view, nothing has been returned + % send empty view + NewOffset = case Offset of + nil -> TotalRows; + _ -> Offset + end, + send_json(Req, 200, {[ + {total_rows, TotalRows}, + {offset, NewOffset}, + {rows, []} + ]}); + {ok, {_, _, Resp, _, _}} -> + % end the view + send_chunk(Resp, "\r\n]}"), + end_json_response(Resp); + Error -> + throw(Error) + end. + +finish_reduce_fold(Req, Resp) -> + case Resp of + undefined -> + send_json(Req, 200, {[ + {rows, []} + ]}); + Resp -> + send_chunk(Resp, "\r\n]}"), + end_json_response(Resp) + end. + +parse_bool_param("true") -> true; +parse_bool_param("false") -> false; +parse_bool_param(Val) -> + Msg = io_lib:format("Invalid value for boolean paramter: ~p", [Val]), + throw({query_parse_error, ?l2b(Msg)}). + +parse_int_param(Val) -> + case (catch list_to_integer(Val)) of + IntVal when is_integer(IntVal) -> + IntVal; + _ -> + Msg = io_lib:format("Invalid value for integer parameter: ~p", [Val]), + throw({query_parse_error, ?l2b(Msg)}) + end. + +parse_positive_int_param(Val) -> + case parse_int_param(Val) of + IntVal when IntVal >= 0 -> + IntVal; + _ -> + Fmt = "Invalid value for positive integer parameter: ~p", + Msg = io_lib:format(Fmt, [Val]), + throw({query_parse_error, ?l2b(Msg)}) + end. -- cgit v1.2.3 From 6fe520088f7f1d4b8823b8f8c22209244e68969b Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 12:28:15 -0400 Subject: simpler startup, rely on constant pools for handler tables --- src/chttpd.erl | 123 +++++++++++++++++++++++++++------------------------------ 1 file changed, 58 insertions(+), 65 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index a062f89a..dbaea5bb 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -13,7 +13,7 @@ -module(chttpd). -include("chttpd.hrl"). --export([start_link/0, stop/0, handle_request/5, config_change/2]). +-export([start_link/0, stop/0, handle_request/1, config_change/2]). -export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,path/1,absolute_uri/2,body_length/1]). -export([verify_is_server_admin/1,unquote/1,quote/1,recv/2,recv_chunked/4,error_info/1]). @@ -26,67 +26,21 @@ -export([send_json/2,send_json/3,send_json/4]). start_link() -> - BindAddress = couch_config:get("chttpd", "bind_address", any), - Port = couch_config:get("chttpd", "port", "5984"), - Backlog = list_to_integer(couch_config:get("chttpd", "backlog", "128")), - - Default = fun chttpd_db:handle_request/1, - - UrlHandlers = [ - {<<"/">>, fun chttpd_misc:handle_welcome_req/1}, - {<<"favicon.ico">>, fun chttpd_misc:handle_favicon_req/1}, - {<<"_utils">>, fun chttpd_misc:handle_utils_dir_req/1}, - {<<"_all_dbs">>, fun chttpd_misc:handle_all_dbs_req/1}, - {<<"_active_tasks">>, fun chttpd_misc:handle_task_status_req/1}, - {<<"_config">>, fun chttpd_misc:handle_config_req/1}, - {<<"_replicate">>, fun chttpd_misc:handle_replicate_req/1}, - {<<"_uuids">>, fun chttpd_misc:handle_uuids_req/1}, - {<<"_log">>, fun chttpd_misc:handle_log_req/1}, - {<<"_sleep">>, fun chttpd_misc:handle_sleep_req/1}, - {<<"_session">>, fun chttpd_auth:handle_session_req/1}, - {<<"_user">>, fun chttpd_auth:handle_user_req/1}, - {<<"_oauth">>, fun chttpd_oauth:handle_oauth_req/1}, - {<<"_stats">>, fun chttpd_stats:handle_stats_req/1}, - {<<"_restart">>, fun showroom_http:handle_restart_req/1}, - {<<"_cloudant">>, fun showroom_httpd_admin:handle_cloudant_req/1} - ], - - DbHandlers = [ - {<<"_view_cleanup">>, fun chttpd_view:handle_view_cleanup_req/2}, - {<<"_compact">>, fun chttpd_db:handle_compact_req/2}, - {<<"_design">>, fun chttpd_db:handle_design_req/2}, - {<<"_view">>, fun chttpd_db:handle_db_view_req/2}, - {<<"_temp_view">>, fun chttpd_db:handle_temp_view_req/2}, - {<<"_changes">>, fun chttpd_db:handle_changes_req/2} - ], - - DesignHandlers = [ - {<<"_view">>, fun chttpd_view:handle_view_req/2}, - {<<"_show">>, fun chttpd_show:handle_doc_show_req/2}, - {<<"_list">>, fun chttpd_show:handle_view_list_req/2}, - {<<"_update">>, fun chttpd_show:handle_doc_update_req/2}, - {<<"_info">>, fun chttpd_db:handle_design_info_req/2} - ], - - Loop = fun(Req)-> ?MODULE:handle_request(Req, Default, UrlHandlers, - DbHandlers, DesignHandlers) end, - - {ok, Pid} = case mochiweb_http:start([ - {loop, Loop}, + Options = [ + {loop, fun ?MODULE:handle_request/1}, {name, ?MODULE}, - {ip, BindAddress}, - {port, Port}, - {backlog, Backlog} - ]) of - {ok, MochiPid} -> {ok, MochiPid}; + {ip, couch_config:get("chttpd", "bind_address", any)}, + {port, couch_config:get("chttpd", "port", "5984")}, + {backlog, list_to_integer(couch_config:get("chttpd", "backlog", "128"))} + ], + case mochiweb_http:start(Options) of + {ok, Pid} -> + ok = couch_config:register(fun ?MODULE:config_change/2, Pid), + {ok, Pid}; {error, Reason} -> - io:format("Failure to start Mochiweb: ~s~n",[Reason]), - throw({error, Reason}) - end, - - ok = couch_config:register(fun ?MODULE:config_change/2, Pid), - - {ok, Pid}. + io:format("Failure to start Mochiweb: ~s~n", [Reason]), + {error, Reason} + end. config_change("chttpd", "bind_address") -> ?MODULE:stop(); @@ -98,10 +52,11 @@ config_change("chttpd", "backlog") -> stop() -> mochiweb_http:stop(?MODULE). -handle_request(MochiReq, DefaultFun, - UrlHandlers, DbUrlHandlers, DesignUrlHandlers) -> +handle_request(MochiReq) -> Begin = now(), + DefaultFun = fun chttpd_db:handle_request/1, + AuthenticationFuns = [ fun chttpd_auth:cookie_authentication_handler/1, fun chttpd_auth:default_authentication_handler/1 @@ -147,11 +102,11 @@ handle_request(MochiReq, DefaultFun, method = Method, path_parts = [list_to_binary(chttpd:unquote(Part)) || Part <- string:tokens(Path, "/")], - db_url_handlers = DbUrlHandlers, - design_url_handlers = DesignUrlHandlers + db_url_handlers = db_url_handlers(), + design_url_handlers = design_url_handlers() }, - HandlerFun = couch_util:get_value(HandlerKey, UrlHandlers, DefaultFun), + HandlerFun = couch_util:get_value(HandlerKey, url_handlers(), DefaultFun), {ok, Resp} = try erase(cookie_auth_failed), @@ -226,6 +181,44 @@ authenticate_request(Response, _AuthFuns) -> increment_method_stats(Method) -> couch_stats_collector:increment({httpd_request_methods, Method}). +url_handlers() -> + [ + {<<"/">>, fun chttpd_misc:handle_welcome_req/1}, + {<<"favicon.ico">>, fun chttpd_misc:handle_favicon_req/1}, + {<<"_utils">>, fun chttpd_misc:handle_utils_dir_req/1}, + {<<"_all_dbs">>, fun chttpd_misc:handle_all_dbs_req/1}, + {<<"_active_tasks">>, fun chttpd_misc:handle_task_status_req/1}, + {<<"_config">>, fun chttpd_misc:handle_config_req/1}, + {<<"_replicate">>, fun chttpd_misc:handle_replicate_req/1}, + {<<"_uuids">>, fun chttpd_misc:handle_uuids_req/1}, + {<<"_log">>, fun chttpd_misc:handle_log_req/1}, + {<<"_sleep">>, fun chttpd_misc:handle_sleep_req/1}, + {<<"_session">>, fun chttpd_auth:handle_session_req/1}, + {<<"_user">>, fun chttpd_auth:handle_user_req/1}, + {<<"_oauth">>, fun chttpd_oauth:handle_oauth_req/1}, + {<<"_stats">>, fun chttpd_stats:handle_stats_req/1}, + {<<"_restart">>, fun showroom_http:handle_restart_req/1}, + {<<"_cloudant">>, fun showroom_httpd_admin:handle_cloudant_req/1} + ]. + +db_url_handlers() -> + [ + {<<"_view_cleanup">>, fun chttpd_view:handle_view_cleanup_req/2}, + {<<"_compact">>, fun chttpd_db:handle_compact_req/2}, + {<<"_design">>, fun chttpd_db:handle_design_req/2}, + {<<"_view">>, fun chttpd_db:handle_db_view_req/2}, + {<<"_temp_view">>, fun chttpd_db:handle_temp_view_req/2}, + {<<"_changes">>, fun chttpd_db:handle_changes_req/2} + ]. + +design_url_handlers() -> + [ + {<<"_view">>, fun chttpd_view:handle_view_req/2}, + {<<"_show">>, fun chttpd_show:handle_doc_show_req/2}, + {<<"_list">>, fun chttpd_show:handle_view_list_req/2}, + {<<"_update">>, fun chttpd_show:handle_doc_update_req/2}, + {<<"_info">>, fun chttpd_db:handle_design_info_req/2} + ]. % Utilities -- cgit v1.2.3 From 5e712bdcfd887769e473e973bdcc4949036c9799 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 12:49:38 -0400 Subject: yay, faster and simpler --- src/chttpd.erl | 48 +++++++++++++++++++----------------------------- 1 file changed, 19 insertions(+), 29 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index dbaea5bb..b78a2d22 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -55,8 +55,6 @@ stop() -> handle_request(MochiReq) -> Begin = now(), - DefaultFun = fun chttpd_db:handle_request/1, - AuthenticationFuns = [ fun chttpd_auth:cookie_authentication_handler/1, fun chttpd_auth:default_authentication_handler/1 @@ -67,14 +65,8 @@ handle_request(MochiReq) -> RawUri = MochiReq:get(raw_path), Customer = cloudant_util:customer_name(MochiReq:get_header_value("X-Cloudant-User"), MochiReq:get_header_value("Host")), Path = ?COUCH:db_path(RawUri, Customer), + {HandlerKey, _, _} = mochiweb_util:partition(Path, "/"), - HandlerKey = - case mochiweb_util:partition(Path, "/") of - {"", "", ""} -> - <<"/">>; % Special case the root url handler - {FirstPart, _, _} -> - list_to_binary(FirstPart) - end, LogForClosedSocket = io_lib:format("mochiweb_recv_error for ~s - ~p ~s", [ MochiReq:get(peer), MochiReq:get(method), @@ -106,12 +98,12 @@ handle_request(MochiReq) -> design_url_handlers = design_url_handlers() }, - HandlerFun = couch_util:get_value(HandlerKey, url_handlers(), DefaultFun), {ok, Resp} = try erase(cookie_auth_failed), case authenticate_request(HttpReq, AuthenticationFuns) of #httpd{} = Req -> + HandlerFun = url_handler(HandlerKey), HandlerFun(cloudant_auth:authorize_request(Req)); Response -> Response @@ -181,25 +173,23 @@ authenticate_request(Response, _AuthFuns) -> increment_method_stats(Method) -> couch_stats_collector:increment({httpd_request_methods, Method}). -url_handlers() -> - [ - {<<"/">>, fun chttpd_misc:handle_welcome_req/1}, - {<<"favicon.ico">>, fun chttpd_misc:handle_favicon_req/1}, - {<<"_utils">>, fun chttpd_misc:handle_utils_dir_req/1}, - {<<"_all_dbs">>, fun chttpd_misc:handle_all_dbs_req/1}, - {<<"_active_tasks">>, fun chttpd_misc:handle_task_status_req/1}, - {<<"_config">>, fun chttpd_misc:handle_config_req/1}, - {<<"_replicate">>, fun chttpd_misc:handle_replicate_req/1}, - {<<"_uuids">>, fun chttpd_misc:handle_uuids_req/1}, - {<<"_log">>, fun chttpd_misc:handle_log_req/1}, - {<<"_sleep">>, fun chttpd_misc:handle_sleep_req/1}, - {<<"_session">>, fun chttpd_auth:handle_session_req/1}, - {<<"_user">>, fun chttpd_auth:handle_user_req/1}, - {<<"_oauth">>, fun chttpd_oauth:handle_oauth_req/1}, - {<<"_stats">>, fun chttpd_stats:handle_stats_req/1}, - {<<"_restart">>, fun showroom_http:handle_restart_req/1}, - {<<"_cloudant">>, fun showroom_httpd_admin:handle_cloudant_req/1} - ]. +url_handler("") -> fun chttpd_misc:handle_welcome_req/1; +url_handler("favicon.ico") -> fun chttpd_misc:handle_favicon_req/1; +url_handler("_utils") -> fun chttpd_misc:handle_utils_dir_req/1; +url_handler("_all_dbs") -> fun chttpd_misc:handle_all_dbs_req/1; +url_handler("_active_tasks") -> fun chttpd_misc:handle_task_status_req/1; +url_handler("_config") -> fun chttpd_misc:handle_config_req/1; +url_handler("_replicate") -> fun chttpd_misc:handle_replicate_req/1; +url_handler("_uuids") -> fun chttpd_misc:handle_uuids_req/1; +url_handler("_log") -> fun chttpd_misc:handle_log_req/1; +url_handler("_sleep") -> fun chttpd_misc:handle_sleep_req/1; +url_handler("_session") -> fun chttpd_auth:handle_session_req/1; +url_handler("_user") -> fun chttpd_auth:handle_user_req/1; +url_handler("_oauth") -> fun chttpd_oauth:handle_oauth_req/1; +url_handler("_stats") -> fun chttpd_stats:handle_stats_req/1; +url_handler("_restart") -> fun showroom_http:handle_restart_req/1; +url_handler("_cloudant") -> fun showroom_httpd_admin:handle_cloudant_req/1; +url_handler(_) -> fun chttpd_db:handle_request/1. db_url_handlers() -> [ -- cgit v1.2.3 From 4da7e07ff67f9a0e29cd09c7c91ed3606ac01bdf Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 13:17:53 -0400 Subject: cosmetics --- src/chttpd.erl | 50 +++++++++++++++++++++++++++++--------------------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index b78a2d22..daf9b4da 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -13,17 +13,17 @@ -module(chttpd). -include("chttpd.hrl"). --export([start_link/0, stop/0, handle_request/1, config_change/2]). - --export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,path/1,absolute_uri/2,body_length/1]). --export([verify_is_server_admin/1,unquote/1,quote/1,recv/2,recv_chunked/4,error_info/1]). --export([parse_form/1,json_body/1,json_body_obj/1,body/1,doc_etag/1, make_etag/1, etag_respond/3]). --export([primary_header_value/2,partition/1,serve_file/3, server_header/0]). --export([start_chunked_response/3,send_chunk/2]). --export([start_response_length/4, send/2]). --export([start_json_response/2, start_json_response/3, end_json_response/1]). --export([send_response/4,send_method_not_allowed/2,send_error/4, send_redirect/2,send_chunked_error/2]). --export([send_json/2,send_json/3,send_json/4]). +-export([start_link/0, stop/0, handle_request/1, config_change/2, + primary_header_value/2, header_value/2, header_value/3, qs_value/2, + qs_value/3, qs/1, path/1, absolute_uri/2, body_length/1, + verify_is_server_admin/1, unquote/1, quote/1, recv/2,recv_chunked/4, + error_info/1, parse_form/1, json_body/1, json_body_obj/1, body/1, + doc_etag/1, make_etag/1, etag_respond/3, partition/1, serve_file/3, + server_header/0, start_chunked_response/3,send_chunk/2, + start_response_length/4, send/2, start_json_response/2, + start_json_response/3, end_json_response/1, send_response/4, + send_method_not_allowed/2, send_error/4, send_redirect/2, + send_chunked_error/2, send_json/2,send_json/3,send_json/4]). start_link() -> Options = [ @@ -63,7 +63,9 @@ handle_request(MochiReq) -> % for the path, use the raw path with the query string and fragment % removed, but URL quoting left intact RawUri = MochiReq:get(raw_path), - Customer = cloudant_util:customer_name(MochiReq:get_header_value("X-Cloudant-User"), MochiReq:get_header_value("Host")), + Customer = cloudant_util:customer_name( + MochiReq:get_header_value("X-Cloudant-User"), + MochiReq:get_header_value("Host")), Path = ?COUCH:db_path(RawUri, Customer), {HandlerKey, _, _} = mochiweb_util:partition(Path, "/"), @@ -155,7 +157,7 @@ authenticate_request(#httpd{user_ctx=#user_ctx{}} = Req, _AuthFuns) -> authenticate_request(#httpd{} = Req, [AuthFun|Rest]) -> authenticate_request(AuthFun(Req), Rest); authenticate_request(#httpd{} = Req, []) -> - case couch_config:get("chttpd_auth", "require_valid_user", "false") of + case couch_config:get("chttpd", "require_valid_user", "false") of "true" -> throw({unauthorized, <<"Authentication required.">>}); "false" -> @@ -260,15 +262,17 @@ absolute_uri(#httpd{mochi_req=MochiReq}, Path) -> Scheme = case MochiReq:get_header_value(XSsl) of "on" -> "https"; _ -> - XProto = couch_config:get("httpd", "x_forwarded_proto", "X-Forwarded-Proto"), + XProto = couch_config:get("httpd", "x_forwarded_proto", + "X-Forwarded-Proto"), case MochiReq:get_header_value(XProto) of % Restrict to "https" and "http" schemes only "https" -> "https"; _ -> "http" end end, - Customer = cloudant_util:customer_name(MochiReq:get_header_value("X-Cloudant-User"), - Host), + Customer = cloudant_util:customer_name( + MochiReq:get_header_value("X-Cloudant-User"), + Host), CustomerRegex = ["^/", Customer, "[/%2F]+"], NewPath = re:replace(Path, CustomerRegex, "/"), Scheme ++ "://" ++ Host ++ NewPath. @@ -329,7 +333,7 @@ doc_etag(#doc{revs={Start, [DiskRev|_]}}) -> make_etag(Term) -> <> = erlang:md5(term_to_binary(Term)), - list_to_binary("\"" ++ lists:flatten(io_lib:format("~.36B",[SigInt])) ++ "\""). + list_to_binary(io_lib:format("\"~.36B\"",[SigInt])). etag_match(Req, CurrentEtag) when is_binary(CurrentEtag) -> etag_match(Req, binary_to_list(CurrentEtag)); @@ -357,7 +361,8 @@ verify_is_server_admin(#httpd{user_ctx=#user_ctx{roles=Roles}}) -> start_response_length(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Length) -> couch_stats_collector:increment({httpd_status_codes, Code}), - Resp = MochiReq:start_response_length({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), Length}), + Resp = MochiReq:start_response_length({Code, Headers ++ server_header() ++ + chttpd_auth:cookie_auth_header(Req, Headers), Length}), case MochiReq:get(method) of 'HEAD' -> throw({http_head_abort, Resp}); _ -> ok @@ -370,7 +375,8 @@ send(Resp, Data) -> start_chunked_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) -> couch_stats_collector:increment({httpd_status_codes, Code}), - Resp = MochiReq:respond({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), chunked}), + Resp = MochiReq:respond({Code, Headers ++ server_header() ++ + chttpd_auth:cookie_auth_header(Req, Headers), chunked}), case MochiReq:get(method) of 'HEAD' -> throw({http_head_abort, Resp}); _ -> ok @@ -387,10 +393,12 @@ send_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Body) -> ?LOG_DEBUG("httpd ~p error response:~n ~s", [Code, Body]); true -> ok end, - {ok, MochiReq:respond({Code, Headers ++ server_header() ++ chttpd_auth:cookie_auth_header(Req, Headers), Body})}. + {ok, MochiReq:respond({Code, Headers ++ server_header() ++ + chttpd_auth:cookie_auth_header(Req, Headers), Body})}. send_method_not_allowed(Req, Methods) -> - send_error(Req, 405, [{"Allow", Methods}], <<"method_not_allowed">>, ?l2b("Only " ++ Methods ++ " allowed")). + send_error(Req, 405, [{"Allow", Methods}], <<"method_not_allowed">>, + ?l2b("Only " ++ Methods ++ " allowed")). send_json(Req, Value) -> send_json(Req, 200, Value). -- cgit v1.2.3 From cb6e67e0dffe925794a700e6e5149ededaa8bf7a Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 14:42:57 -0400 Subject: cleanup of customer_name code --- src/chttpd.erl | 11 +++-------- src/chttpd_db.erl | 10 +++------- src/chttpd_external.erl | 10 ++++------ src/chttpd_misc.erl | 3 +-- 4 files changed, 11 insertions(+), 23 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index daf9b4da..1cc15d7e 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -63,9 +63,7 @@ handle_request(MochiReq) -> % for the path, use the raw path with the query string and fragment % removed, but URL quoting left intact RawUri = MochiReq:get(raw_path), - Customer = cloudant_util:customer_name( - MochiReq:get_header_value("X-Cloudant-User"), - MochiReq:get_header_value("Host")), + Customer = cloudant_util:customer_name(#httpd{mochi_req=MochiReq}), Path = ?COUCH:db_path(RawUri, Customer), {HandlerKey, _, _} = mochiweb_util:partition(Path, "/"), @@ -245,7 +243,7 @@ qs(#httpd{mochi_req=MochiReq}) -> path(#httpd{mochi_req=MochiReq}) -> MochiReq:get(path). -absolute_uri(#httpd{mochi_req=MochiReq}, Path) -> +absolute_uri(#httpd{mochi_req=MochiReq} = Req, Path) -> XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"), Host = case MochiReq:get_header_value(XHost) of undefined -> @@ -270,10 +268,7 @@ absolute_uri(#httpd{mochi_req=MochiReq}, Path) -> _ -> "http" end end, - Customer = cloudant_util:customer_name( - MochiReq:get_header_value("X-Cloudant-User"), - Host), - CustomerRegex = ["^/", Customer, "[/%2F]+"], + CustomerRegex = ["^/", cloudant_util:customer_name(Req), "[/%2F]+"], NewPath = re:replace(Path, CustomerRegex, "/"), Scheme ++ "://" ++ Host ++ NewPath. diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 94a43e20..1ae5629d 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -165,10 +165,8 @@ do_db_req(#httpd{path_parts=[DbName|_]}=Req, Fun) -> Fun(Req, #db{name=DbName}). db_req(#httpd{method='GET',path_parts=[DbName]}=Req, _Db) -> - Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"), - chttpd:header_value(Req, "Host")), - {ok, DbInfo} = ?COUCH:get_db_info(DbName, Customer), - send_json(Req, {DbInfo}); + {ok, DbInfo} = fabric:get_db_info(DbName), + send_json(Req, {cloudant_util:customer_db_info(Req, DbInfo)}); db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) -> Doc = couch_doc:from_json_obj(chttpd:json_body(Req)), @@ -332,9 +330,7 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) -> direction = Dir } = QueryArgs = chttpd_view:parse_view_params(Req, nil, map), - Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"), - chttpd:header_value(Req, "Host")), - {ok, Info} = ?COUCH:get_db_info(Db, Customer), + {ok, Info} = fabric:get_db_info(Db), CurrentEtag = chttpd:make_etag(Info), chttpd:etag_respond(Req, CurrentEtag, fun() -> TotalRowCount = couch_util:get_value(doc_count, Info), diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl index d096bff9..9566ec28 100644 --- a/src/chttpd_external.erl +++ b/src/chttpd_external.erl @@ -58,7 +58,7 @@ json_req_obj(#httpd{mochi_req=Req, method=Verb, path_parts=Path, req_body=ReqBody - }, Db) -> + } = HttpReq, Db) -> Body = case ReqBody of undefined -> Req:recv_body(); Else -> Else @@ -71,12 +71,10 @@ json_req_obj(#httpd{mochi_req=Req, end, Headers = Req:get(headers), Hlist = mochiweb_headers:to_list(Headers), - Customer = cloudant_util:customer_name( - Req:get_header_value("X-Cloudant-User"), Req:get_header_value("Host")), - {ok, Info} = ?COUCH:get_db_info(Db, Customer), + {ok, Info} = fabric:get_db_info(Db), % send correct path to customer - BugzID 6849 - CustomerBin = list_to_binary(Customer), + CustomerBin = list_to_binary(cloudant_util:customer_name(HttpReq)), Len = byte_size(CustomerBin), FixedPath = case Path of [<> | Rest] -> @@ -86,7 +84,7 @@ json_req_obj(#httpd{mochi_req=Req, end, % add headers... - {[{<<"info">>, {Info}}, + {[{<<"info">>, {cloudant_util:customer_db_info(HttpReq, Info)}}, {<<"verb">>, Verb}, {<<"path">>, FixedPath}, {<<"query">>, to_json_terms(Req:parse_qs())}, diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 8867dfbe..f67ebf22 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -87,8 +87,7 @@ handle_sleep_req(Req) -> send_method_not_allowed(Req, "GET,HEAD"). handle_all_dbs_req(#httpd{method='GET'}=Req) -> - Customer = cloudant_util:customer_name(chttpd:header_value(Req, "X-Cloudant-User"), - chttpd:header_value(Req, "Host")), + Customer = cloudant_util:customer_name(Req), {ok, DbNames} = fabric:all_dbs(Customer), send_json(Req, DbNames); handle_all_dbs_req(Req) -> -- cgit v1.2.3 From fc1012727de6cdfa3e82bfbdc3a4d87e9f064783 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 14:56:42 -0400 Subject: some ?COUCH cleanup, and replicated_changes updates --- src/chttpd_db.erl | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 1ae5629d..408df4c3 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -145,7 +145,7 @@ handle_design_info_req(Req, _Db) -> create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> N = chttpd:qs_value(Req, "n"), Q = chttpd:qs_value(Req, "q"), - case ?COUCH:create_db(DbName, [{user_ctx, UserCtx},{n,N},{q,Q}]) of + case fabric:create_db(DbName, [{user_ctx, UserCtx},{n,N},{q,Q}]) of ok -> DocUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)), send_json(Req, 201, [{"Location", DocUrl}], {[{ok, true}]}); @@ -154,7 +154,7 @@ create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> end. delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> - case ?COUCH:delete_db(DbName, [{user_ctx, UserCtx}]) of + case fabric:delete_db(DbName, [{user_ctx, UserCtx}]) of ok -> send_json(Req, 200, {[{ok, true}]}); Error -> @@ -188,7 +188,7 @@ db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) -> ]}); _Normal -> % normal - {ok, NewRev} = ?COUCH:update_doc(Db, Doc2, []), + {ok, NewRev} = fabric:update_doc(Db, Doc2, []), DocUrl = absolute_uri( Req, binary_to_list(<<"/",DbName/binary,"/", DocId/binary>>)), send_json(Req, 201, [{"Location", DocUrl}], {[ @@ -269,7 +269,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) -> true -> [all_or_nothing|Options]; _ -> Options end, - case ?COUCH:update_docs(Db, Docs, Options2) of + case fabric:update_docs(Db, Docs, Options2) of {ok, Results} -> % output the results DocResults = lists:zipwith(fun update_doc_result_to_json/2, @@ -282,7 +282,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) -> end; false -> Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray], - {ok, Errors} = ?COUCH:update_docs(Db, Docs, Options, replicated_changes), + {ok, Errors} = fabric:update_docs(Db, Docs, [replicated_changes|Options]), ErrorsJson = lists:map(fun update_doc_result_to_json/1, Errors), send_json(Req, 201, ErrorsJson) @@ -381,7 +381,7 @@ db_req(#httpd{path_parts=[_,<<"_all_docs_by_seq">>]}=Req, _Db) -> db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) -> {JsonDocIdRevs} = chttpd:json_body_obj(Req), JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs], - {ok, Results} = ?COUCH:get_missing_revs(Db, JsonDocIdRevs2), + {ok, Results} = fabric:get_missing_revs(Db, JsonDocIdRevs2), Results2 = [{Id, [couch_doc:rev_to_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results], send_json(Req, {[ {missing_revs, {Results2}} @@ -499,7 +499,7 @@ db_doc_req(#httpd{method='GET'}=Req, Db, DocId) -> send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options)) end; _ -> - {ok, Results} = ?COUCH:open_revs(Db, DocId, Revs, Options), + {ok, Results} = fabric:open_revs(Db, DocId, Revs, Options), {ok, Resp} = start_json_response(Req, 200), send_chunk(Resp, "["), % We loop through the docs. The first time through the separator @@ -536,7 +536,7 @@ db_doc_req(#httpd{method='POST'}=Req, Db, DocId) -> end, Form = chttpd:parse_form(Req), Rev = couch_doc:parse_rev(list_to_binary(couch_util:get_value("_rev", Form))), - {ok, [{ok, Doc}]} = ?COUCH:open_revs(Db, DocId, [Rev], []), + {ok, [{ok, Doc}]} = fabric:open_revs(Db, DocId, [Rev], []), UpdatedAtts = [ #att{name=validate_attachment_name(Name), @@ -556,7 +556,7 @@ db_doc_req(#httpd{method='POST'}=Req, Db, DocId) -> NewDoc = Doc#doc{ atts = UpdatedAtts ++ OldAtts2 }, - {ok, NewRev} = ?COUCH:update_doc(Db, NewDoc, []), + {ok, NewRev} = fabric:update_doc(Db, NewDoc, []), send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewRev)) ++ "\""}], {[ {ok, true}, @@ -593,7 +593,7 @@ db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) -> % open old doc Doc = couch_doc_open(Db, SourceDocId, SourceRev, []), % save new doc - {ok, NewTargetRev} = ?COUCH:update_doc(Db, + {ok, NewTargetRev} = fabric:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRevs}, []), % respond send_json(Req, 201, @@ -632,7 +632,7 @@ update_doc(Req, Db, DocId, Json, Headers) -> _ -> Options = [] end, - {Status, NewRev} = case ?COUCH:update_doc(Db, Doc, Options) of + {Status, NewRev} = case fabric:update_doc(Db, Doc, Options) of {ok, NewRev1} -> {201, NewRev1}; {accepted, NewRev1} -> {202, NewRev1} end, @@ -668,14 +668,14 @@ couch_doc_from_req(Req, DocId, Json) -> couch_doc_open(Db, DocId, Rev, Options) -> case Rev of nil -> % open most recent rev - case ?COUCH:open_doc(Db, DocId, Options) of + case fabric:open_doc(Db, DocId, Options) of {ok, Doc} -> Doc; Error -> throw(Error) end; _ -> % open a specific rev (deletions come back as stubs) - case ?COUCH:open_revs(Db, DocId, [Rev], Options) of + case fabric:open_revs(Db, DocId, [Rev], Options) of {ok, [{ok, Doc}]} -> Doc; {ok, [Else]} -> @@ -748,7 +748,7 @@ db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts) couch_doc:validate_docid(DocId), #doc{id=DocId}; Rev -> - case ?COUCH:open_revs(Db, DocId, [Rev], []) of + case fabric:open_revs(Db, DocId, [Rev], []) of {ok, [{ok, Doc0}]} -> Doc0; {ok, [Error]} -> throw(Error) end @@ -758,7 +758,7 @@ db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts) DocEdited = Doc#doc{ atts = NewAtt ++ [A || A <- Atts, A#att.name /= FileName] }, - {ok, UpdatedRev} = ?COUCH:update_doc(Db, DocEdited, []), + {ok, UpdatedRev} = fabric:update_doc(Db, DocEdited, []), DbName = couch_db:name(Db), {Status, Headers} = case Method of -- cgit v1.2.3 From 49cf1370b6b46b79f93b09868272d1ffec0a2879 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 15:36:58 -0400 Subject: bunch o' updates - fabric view group info - send user_ctx to fabric on update_docs requests - send 403s for compaction and view cleanup --- src/chttpd_db.erl | 65 ++++++++++++++++++++++--------------------------------- 1 file changed, 26 insertions(+), 39 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 408df4c3..7dba3827 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -101,22 +101,13 @@ handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, _Db) -> ok = ?COUCH:compact_view_group(DbName, Id), send_json(Req, 202, {[{ok, true}]}); -handle_compact_req(#httpd{method='POST'}=Req, Db) -> - StartSeq = chttpd:qs_value(Req, "start_seq", "0"), - ok = ?COUCH:compact_db(Db, list_to_integer(StartSeq)), - send_json(Req, 202, {[{ok, true}]}); - -handle_compact_req(Req, _Db) -> - send_method_not_allowed(Req, "POST"). - -handle_view_cleanup_req(#httpd{method='POST'}=Req, _Db) -> - % delete unreferenced index files - % ok = ?COUCH:cleanup_view_index_files(Db), - send_json(Req, 202, {[{ok, true}]}); - -handle_view_cleanup_req(Req, _Db) -> - send_method_not_allowed(Req, "POST"). +handle_compact_req(Req, _) -> + Msg = <<"Compaction is handled automatically by Cloudant">>, + chttpd:send_error(Req, 403, Msg). +handle_view_cleanup_req(Req, _) -> + Msg = <<"Old view indices are purged automatically by Cloudant">>, + chttpd:send_error(Req, 403, Msg). handle_design_req(#httpd{ path_parts=[_DbName,_Design,_DesName, <<"_",_/binary>> = Action | _Rest], @@ -128,14 +119,10 @@ handle_design_req(#httpd{ handle_design_req(Req, Db) -> db_req(Req, Db). -handle_design_info_req(#httpd{ - method='GET', - path_parts=[_DbName, _Design, DesignName, _] - }=Req, Db) -> - DesignId = <<"_design/", DesignName/binary>>, - {ok, GroupInfoList} = ?COUCH:get_view_group_info(Db, DesignId), +handle_design_info_req(#httpd{method='GET', path_parts=[_,_,Name,_]}=Req, Db) -> + {ok, GroupInfoList} = fabric:get_view_group_info(Db, Name), send_json(Req, 200, {[ - {name, DesignName}, + {name, <<"_design/", Name/binary>>}, {view_index, {GroupInfoList}} ]}); @@ -168,7 +155,7 @@ db_req(#httpd{method='GET',path_parts=[DbName]}=Req, _Db) -> {ok, DbInfo} = fabric:get_db_info(DbName), send_json(Req, {cloudant_util:customer_db_info(Req, DbInfo)}); -db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) -> +db_req(#httpd{method='POST',path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> Doc = couch_doc:from_json_obj(chttpd:json_body(Req)), Doc2 = case Doc#doc.id of <<"">> -> @@ -188,7 +175,7 @@ db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) -> ]}); _Normal -> % normal - {ok, NewRev} = fabric:update_doc(Db, Doc2, []), + {ok, NewRev} = fabric:update_doc(Db, Doc2, [{user_ctx,Ctx}]), DocUrl = absolute_uri( Req, binary_to_list(<<"/",DbName/binary,"/", DocId/binary>>)), send_json(Req, 201, [{"Location", DocUrl}], {[ @@ -232,17 +219,17 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, Db) - db_req(#httpd{path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) -> send_method_not_allowed(Req, "POST"); -db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) -> +db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, Db) -> couch_stats_collector:increment({httpd, bulk_requests}), {JsonProps} = chttpd:json_body_obj(Req), DocsArray = couch_util:get_value(<<"docs">>, JsonProps), case chttpd:header_value(Req, "X-Couch-Full-Commit") of "true" -> - Options = [full_commit]; + Options = [full_commit, {user_ctx,Ctx}]; "false" -> - Options = [delay_commit]; + Options = [delay_commit, {user_ctx,Ctx}]; _ -> - Options = [] + Options = [{user_ctx,Ctx}] end, case couch_util:get_value(<<"new_edits">>, JsonProps, true) of true -> @@ -269,7 +256,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) -> true -> [all_or_nothing|Options]; _ -> Options end, - case fabric:update_docs(Db, Docs, Options2) of + case fabric:update_docs(Db, Docs, [Options2]) of {ok, Results} -> % output the results DocResults = lists:zipwith(fun update_doc_result_to_json/2, @@ -526,7 +513,7 @@ db_doc_req(#httpd{method='GET'}=Req, Db, DocId) -> chttpd_show:handle_doc_show(Req, DesignName, ShowName, DocId, Db) end; -db_doc_req(#httpd{method='POST'}=Req, Db, DocId) -> +db_doc_req(#httpd{method='POST', user_ctx=Ctx}=Req, Db, DocId) -> couch_doc:validate_docid(DocId), case chttpd:header_value(Req, "content-type") of "multipart/form-data" ++ _Rest -> @@ -556,7 +543,7 @@ db_doc_req(#httpd{method='POST'}=Req, Db, DocId) -> NewDoc = Doc#doc{ atts = UpdatedAtts ++ OldAtts2 }, - {ok, NewRev} = fabric:update_doc(Db, NewDoc, []), + {ok, NewRev} = fabric:update_doc(Db, NewDoc, [{user_ctx,Ctx}]), send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewRev)) ++ "\""}], {[ {ok, true}, @@ -583,7 +570,7 @@ db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) -> update_doc(Req, Db, DocId, Json, [{"Location", Location}]) end; -db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) -> +db_doc_req(#httpd{method='COPY', user_ctx=Ctx}=Req, Db, SourceDocId) -> SourceRev = case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of missing_rev -> nil; @@ -594,7 +581,7 @@ db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) -> Doc = couch_doc_open(Db, SourceDocId, SourceRev, []), % save new doc {ok, NewTargetRev} = fabric:update_doc(Db, - Doc#doc{id=TargetDocId, revs=TargetRevs}, []), + Doc#doc{id=TargetDocId, revs=TargetRevs}, [{user_ctx,Ctx}]), % respond send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewTargetRev)) ++ "\""}], @@ -621,16 +608,16 @@ update_doc_result_to_json(DocId, Error) -> update_doc(Req, Db, DocId, Json) -> update_doc(Req, Db, DocId, Json, []). -update_doc(Req, Db, DocId, Json, Headers) -> +update_doc(#httpd{user_ctx=Ctx} = Req, Db, DocId, Json, Headers) -> #doc{deleted=Deleted} = Doc = couch_doc_from_req(Req, DocId, Json), case chttpd:header_value(Req, "X-Couch-Full-Commit") of "true" -> - Options = [full_commit]; + Options = [full_commit, {user_ctx,Ctx}]; "false" -> - Options = [delay_commit]; + Options = [delay_commit, {user_ctx,Ctx}]; _ -> - Options = [] + Options = [{user_ctx,Ctx}] end, {Status, NewRev} = case fabric:update_doc(Db, Doc, Options) of {ok, NewRev1} -> {201, NewRev1}; @@ -712,7 +699,7 @@ db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) -> end; -db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts) +db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNameParts) when (Method == 'PUT') or (Method == 'DELETE') -> FileName = validate_attachment_name( mochiweb_util:join( @@ -758,7 +745,7 @@ db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts) DocEdited = Doc#doc{ atts = NewAtt ++ [A || A <- Atts, A#att.name /= FileName] }, - {ok, UpdatedRev} = fabric:update_doc(Db, DocEdited, []), + {ok, UpdatedRev} = fabric:update_doc(Db, DocEdited, [{user_ctx,Ctx}]), DbName = couch_db:name(Db), {Status, Headers} = case Method of -- cgit v1.2.3 From 056d208ad378fbb1416e79db317ed89d07bf2af6 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 16:33:33 -0400 Subject: _all_docs served over http via fabric, woot --- src/chttpd_db.erl | 92 ++++++++++++----------------------------------------- src/chttpd_misc.erl | 5 ++- 2 files changed, 24 insertions(+), 73 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 7dba3827..50adc276 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -308,67 +308,9 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) -> send_method_not_allowed(Req, "GET,HEAD,POST"); -db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs_by_seq">>]}=Req, Db) -> - throw(not_implemented), - #view_query_args{ - start_key = StartKey, - limit = Limit, - skip = SkipCount, - direction = Dir - } = QueryArgs = chttpd_view:parse_view_params(Req, nil, map), - - {ok, Info} = fabric:get_db_info(Db), - CurrentEtag = chttpd:make_etag(Info), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - TotalRowCount = couch_util:get_value(doc_count, Info), - FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, - TotalRowCount, #view_fold_helper_funs{ - reduce_count = fun ?COUCH:enum_docs_since_reduce_to_count/1 - }), - StartKey2 = case StartKey of - nil -> 0; - <<>> -> 100000000000; - {} -> 100000000000; - StartKey when is_integer(StartKey) -> StartKey - end, - {ok, FoldResult} = ?COUCH:enum_docs_since(Db, StartKey2, Dir, - fun(DocInfo, Offset, Acc) -> - #doc_info{ - id=Id, - high_seq=Seq, - revs=[#rev_info{rev=Rev,deleted=Deleted} | RestInfo] - } = DocInfo, - ConflictRevs = couch_doc:rev_to_strs( - [Rev1 || #rev_info{deleted=false, rev=Rev1} <- RestInfo]), - DelConflictRevs = couch_doc:rev_to_strs( - [Rev1 || #rev_info{deleted=true, rev=Rev1} <- RestInfo]), - Json = { - [{<<"rev">>, couch_doc:rev_to_str(Rev)}] ++ - case ConflictRevs of - [] -> []; - _ -> [{<<"conflicts">>, ConflictRevs}] - end ++ - case DelConflictRevs of - [] -> []; - _ -> [{<<"deleted_conflicts">>, DelConflictRevs}] - end ++ - case Deleted of - true -> [{<<"deleted">>, true}]; - false -> [] - end - }, - FoldlFun({{Seq, Id}, Json}, Offset, Acc) - end, {Limit, SkipCount, undefined, [], nil}), - chttpd_view:finish_view_fold(Req, TotalRowCount, {ok, FoldResult}) - end); - -db_req(#httpd{path_parts=[_,<<"_all_docs_by_seq">>]}=Req, _Db) -> - send_method_not_allowed(Req, "GET,HEAD"); - db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) -> {JsonDocIdRevs} = chttpd:json_body_obj(Req), - JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs], - {ok, Results} = fabric:get_missing_revs(Db, JsonDocIdRevs2), + {ok, Results} = fabric:get_missing_revs(Db, JsonDocIdRevs), Results2 = [{Id, [couch_doc:rev_to_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results], send_json(Req, {[ {missing_revs, {Results2}} @@ -435,23 +377,29 @@ db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) -> db_attachment_req(Req, Db, DocId, FileNameParts). all_docs_view(Req, Db, Keys) -> - Etag = couch_util:new_uuid(), - QueryArgs = chttpd_view:parse_view_params(Req, nil, map), + T0 = now(), + {ok, Info} = fabric:get_db_info(Db), + Etag = couch_httpd:make_etag(Info), + ?LOG_INFO("_all_docs etag - ~p ms", [timer:now_diff(now(),T0) / 1000]), + QueryArgs = chttpd_view:parse_view_params(Req, Keys, map), chttpd:etag_respond(Req, Etag, fun() -> {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), - {ok, Total, Result} = ?COUCH:all_docs_view(Resp, Db, Keys, QueryArgs), - send_chunk(Resp, all_docs_final_chunk(Total, Result)), - end_json_response(Resp) + fabric:all_docs(Db, QueryArgs, fun all_docs_callback/2, {nil, Resp}) end). -all_docs_final_chunk(Total, {_, _, undefined, _, nil}) -> - ?JSON_ENCODE({[{total_rows, Total}, {offset, Total}, {rows, []}]}); -all_docs_final_chunk(Total, {_, _, undefined, _, Offset}) -> - ?JSON_ENCODE({[{total_rows, Total}, {offset, Offset}, {rows, []}]}); -all_docs_final_chunk(_, {_, _, _, _, _}) -> - "\r\n]}"; -all_docs_final_chunk(_, Error) -> - throw(Error). +all_docs_callback({total_and_offset, Total, Offset}, {_, Resp}) -> + Chunk = "{\"total_rows\":~p,\"offset\":~p,\"rows\":[\r\n", + send_chunk(Resp, io_lib:format(Chunk, [Total, Offset])), + {ok, {"", Resp}}; +all_docs_callback({row, Row}, {Prepend, Resp}) -> + send_chunk(Resp, [Prepend, ?JSON_ENCODE(Row)]), + {ok, {",\r\n", Resp}}; +all_docs_callback(complete, {_, Resp}) -> + send_chunk(Resp, "\r\n]}"), + end_json_response(Resp), + {stop, Resp}; +all_docs_callback({error, Reason}, Resp) -> + chttpd:send_chunked_error(Resp, {error, Reason}). db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) -> % check for the existence of the doc to handle the 404 case. diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index f67ebf22..e6d3c76a 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -16,7 +16,7 @@ handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1, handle_uuids_req/1,handle_config_req/1,handle_log_req/1, handle_task_status_req/1,handle_sleep_req/1,handle_welcome_req/1, - handle_utils_dir_req/1]). + handle_utils_dir_req/1, handle_favicon_req/1]). -export([increment_update_seq_req/2]). @@ -57,6 +57,9 @@ get_version() -> end, list_to_binary(Version). +handle_favicon_req(Req) -> + handle_favicon_req(Req, couch_config:get("chttpd", "docroot")). + handle_favicon_req(#httpd{method='GET'}=Req, DocumentRoot) -> chttpd:serve_file(Req, "favicon.ico", DocumentRoot); handle_favicon_req(Req, _) -> -- cgit v1.2.3 From e7183bfda1bd88baec05f3461b42a098641c1f2d Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 16:37:22 -0400 Subject: allow _all_docs with keys, too --- src/chttpd_view.erl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index c9fb1c9f..b222246a 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -273,7 +273,8 @@ parse_view_params(Req, Keys, ViewType) -> end, Args = #view_query_args{ view_type=ViewType, - multi_get=IsMultiGet + multi_get=IsMultiGet, + keys=Keys }, QueryArgs = lists:foldl(fun({K, V}, Args2) -> validate_view_query(K, V, Args2) -- cgit v1.2.3 From e9c2571947b21f8a5ba4da5ed52ab4f7d0a3b395 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 16:46:22 -0400 Subject: minor fixes to _all_docs with keys --- src/chttpd_db.erl | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 50adc276..dead8a9b 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -297,13 +297,12 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> {Fields} = chttpd:json_body_obj(Req), - Keys = couch_util:get_value(<<"keys">>, Fields, nil), - case Keys of - Keys when is_list(Keys) -> ok; - nil -> ?LOG_DEBUG("POST to _all_docs with no keys member.", []); - _ -> throw({bad_request, "`keys` member must be a array."}) - end, - all_docs_view(Req, Db, Keys); + case couch_util:get_value(<<"keys">>, Fields) of + Keys when is_list(Keys) -> + all_docs_view(Req, Db, Keys); + _ -> + throw({bad_request, "`keys` body member must be an array."}) + end; db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) -> send_method_not_allowed(Req, "GET,HEAD,POST"); @@ -397,7 +396,7 @@ all_docs_callback({row, Row}, {Prepend, Resp}) -> all_docs_callback(complete, {_, Resp}) -> send_chunk(Resp, "\r\n]}"), end_json_response(Resp), - {stop, Resp}; + {ok, Resp}; all_docs_callback({error, Reason}, Resp) -> chttpd:send_chunked_error(Resp, {error, Reason}). -- cgit v1.2.3 From 2ca97804ed5e3dc00d4da78c37b8755c57952b9e Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 16:59:55 -0400 Subject: remove unused module --- ebin/chttpd.app | 1 - src/chttpd_server.erl | 3 --- 2 files changed, 4 deletions(-) delete mode 100644 src/chttpd_server.erl diff --git a/ebin/chttpd.app b/ebin/chttpd.app index 445b0969..679f61cc 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -9,7 +9,6 @@ chttpd_external, chttpd_misc, chttpd_oauth, - chttpd_server, chttpd_show, chttpd_stats, chttpd_sup, diff --git a/src/chttpd_server.erl b/src/chttpd_server.erl deleted file mode 100644 index 77fb5410..00000000 --- a/src/chttpd_server.erl +++ /dev/null @@ -1,3 +0,0 @@ --module(chttpd_server). - --include("chttpd.hrl"). \ No newline at end of file -- cgit v1.2.3 From a1fb7dcdfe46f32cda9e03ba351754c3c81b5cc0 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 17:07:05 -0400 Subject: updates to .app resource files --- ebin/chttpd.app | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ebin/chttpd.app b/ebin/chttpd.app index 679f61cc..c33b3112 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -15,8 +15,7 @@ chttpd_view, cloudant_auth ]}, - {registered, []}, + {registered, [chttpd_sup, chttpd]}, {applications, [kernel, stdlib, couch, fabric]}, - {mod, {chttpd_app,[]}}, - {start_phases, []} + {mod, {chttpd_app,[]}} ]}. \ No newline at end of file -- cgit v1.2.3 From 38efda2a71412ebeeadfc9097cb827afa8734c7c Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 21:18:47 -0400 Subject: map and reduce views working over HTTP now, too --- src/chttpd_db.erl | 4 ++- src/chttpd_view.erl | 73 +++++++++++++++++++++++++++-------------------------- 2 files changed, 40 insertions(+), 37 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index dead8a9b..306d6fea 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -376,10 +376,12 @@ db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) -> db_attachment_req(Req, Db, DocId, FileNameParts). all_docs_view(Req, Db, Keys) -> + % measure the time required to generate the etag, see if it's worth it T0 = now(), {ok, Info} = fabric:get_db_info(Db), Etag = couch_httpd:make_etag(Info), - ?LOG_INFO("_all_docs etag - ~p ms", [timer:now_diff(now(),T0) / 1000]), + DeltaT = timer:now_diff(now(), T0) / 1000, + couch_stats_collector:record({couchdb, all_docs_etag}, DeltaT), QueryArgs = chttpd_view:parse_view_params(Req, Keys, map), chttpd:etag_respond(Req, Etag, fun() -> {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index b222246a..9ca0c3a1 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -25,31 +25,45 @@ start_json_response/2, start_json_response/3, end_json_response/1, send_chunked_error/2]). -design_doc_view(Req, Db, Id, ViewName, Keys) -> - DesignId = <<"_design/", Id/binary>>, - {_ViewGroup, QueryArgs} = case ?COUCH:open_doc(Db, DesignId, []) of - {ok, Doc} -> - Group = couch_view_group:design_doc_to_view_group(Db, Doc), +design_doc_view(Req, Db, GroupId, ViewName, Keys) -> + % TODO open the ddoc once, not twice (here and fabric) + DesignId = <<"_design/", GroupId/binary>>, + case fabric:open_doc(Db, DesignId, []) of + {ok, DDoc} -> + Group = couch_view_group:design_doc_to_view_group(#db{name=Db}, DDoc), IsReduce = get_reduce_type(Req), ViewType = extract_view_type(ViewName, Group#group.views, IsReduce), - {Group, parse_view_params(Req, Keys, ViewType)}; + QueryArgs = parse_view_params(Req, Keys, ViewType), + % TODO proper calculation of etag + % Etag = view_group_etag(ViewGroup, Db, Keys), + Etag = couch_util:new_uuid(), + couch_stats_collector:increment({httpd, view_reads}), + chttpd:etag_respond(Req, Etag, fun() -> + {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), + CB = fun view_callback/2, + fabric:query_view(Db, DDoc, ViewName, QueryArgs, CB, {nil, Resp}) + end); {not_found, Reason} -> throw({not_found, Reason}) - end, - % this etag is wrong as current_seq == 0 right now, so no caching allowed - % Etag = view_group_etag(ViewGroup, Db, Keys), - Etag = couch_util:new_uuid(), - couch_stats_collector:increment({httpd, view_reads}), - chttpd:etag_respond(Req, Etag, fun() -> - {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), - case ?COUCH:design_view(Resp, Db, DesignId, ViewName, Keys, QueryArgs) of - {ok, Total, Result} -> - send_chunk(Resp, final_chunk(Total, Result)), - end_json_response(Resp); - {ok, Resp} -> - {ok, Resp} - end - end). + end. + +view_callback({total_and_offset, Total, Offset}, {nil, Resp}) -> + Chunk = "{\"total_rows\":~p,\"offset\":~p,\"rows\":[\r\n", + send_chunk(Resp, io_lib:format(Chunk, [Total, Offset])), + {ok, {"", Resp}}; +view_callback({row, Row}, {nil, Resp}) -> + % first row of a reduce view + send_chunk(Resp, ["{\"rows\":[\r\n", ?JSON_ENCODE(Row)]), + {ok, {",\r\n", Resp}}; +view_callback({row, Row}, {Prepend, Resp}) -> + send_chunk(Resp, [Prepend, ?JSON_ENCODE(Row)]), + {ok, {",\r\n", Resp}}; +view_callback(complete, {_, Resp}) -> + send_chunk(Resp, "\r\n]}"), + end_json_response(Resp), + {ok, Resp}; +view_callback({error, Reason}, Resp) -> + chttpd:send_chunked_error(Resp, {error, Reason}). extract_view_type(_ViewName, [], _IsReduce) -> throw({not_found, missing_named_view}); @@ -64,15 +78,6 @@ extract_view_type(ViewName, [View|Rest], IsReduce) -> end end. -final_chunk(Total, {_, _, undefined, _, nil}) -> - ?JSON_ENCODE({[{total_rows, Total}, {offset, Total}, {rows, []}]}); -final_chunk(Total, {_, _, undefined, _, Offset}) -> - ?JSON_ENCODE({[{total_rows, Total}, {offset, Offset}, {rows, []}]}); -final_chunk(_, {_, _, _, _, _}) -> - "\r\n]}"; -final_chunk(_, Error) -> - throw(Error). - handle_view_req(#httpd{method='GET', path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) -> design_doc_view(Req, Db, DName, ViewName, nil); @@ -80,15 +85,11 @@ handle_view_req(#httpd{method='GET', handle_view_req(#httpd{method='POST', path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) -> {Fields} = chttpd:json_body_obj(Req), - case couch_util:get_value(<<"keys">>, Fields, nil) of - nil -> - Fmt = "POST to view ~p/~p in database ~p with no keys member.", - ?LOG_DEBUG(Fmt, [DName, ViewName, Db]), - design_doc_view(Req, Db, DName, ViewName, nil); + case couch_util:get_value(<<"keys">>, Fields) of Keys when is_list(Keys) -> design_doc_view(Req, Db, DName, ViewName, Keys); _ -> - throw({bad_request, "`keys` member must be a array."}) + throw({bad_request, "`keys` body member must be an array."}) end; handle_view_req(Req, _Db) -> -- cgit v1.2.3 From 5069e69317219a6dd4a93aecd79647f3237feca7 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 9 Jun 2010 21:25:19 -0400 Subject: two more small ?COUCH -> fabric updates --- src/chttpd_show.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index bba05ec1..de10463f 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -451,7 +451,7 @@ send_doc_update_response(Lang, UpdateSrc, DocId, Doc, Req, Db) -> NewDoc = couch_doc:from_json_obj({NewJsonDoc}), Code = 201, % todo set location field - {ok, _NewRev} = ?COUCH:update_doc(Db, NewDoc, Options); + {ok, _NewRev} = fabric:update_doc(Db, NewDoc, Options); [<<"up">>, _Other, JsonResp] -> Code = 200, ok -- cgit v1.2.3 From 7b0cef7802587e13df8c67f77fa591d869c14fc5 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 11 Jun 2010 13:59:17 -0400 Subject: starting to move metrics hooks into chttpd --- src/chttpd.erl | 7 +++++-- src/chttpd_db.erl | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 1cc15d7e..649b23bc 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -137,14 +137,17 @@ handle_request(MochiReq) -> end, RequestTime = round(timer:now_diff(now(), Begin)/1000), - showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", [ + RequestInfo = [ MochiReq:get(peer), MochiReq:get_header_value("Host"), atom_to_list(Method1), RawUri, Resp:get(code), RequestTime - ]), + ], + Customer = cloudant_util:customer_name(HttpReq), + couch_metrics_req:notify({request, [Customer|RequestInfo]}), + showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", RequestInfo), couch_stats_collector:record({couchdb, request_time}, RequestTime), couch_stats_collector:increment({httpd, requests}), {ok, Resp}. diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 306d6fea..9e04ffa1 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -143,6 +143,7 @@ create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> case fabric:delete_db(DbName, [{user_ctx, UserCtx}]) of ok -> + couch_metrics_disk:notify({db_delete, {UserCtx#user_ctx.name, DbName}}), send_json(Req, 200, {[{ok, true}]}); Error -> throw(Error) -- cgit v1.2.3 From 37431f6686b263b467aa2ad8c0e391e6cb74ce07 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 11 Jun 2010 14:39:53 -0400 Subject: temporarily disable metrics --- src/chttpd.erl | 4 ++-- src/chttpd_db.erl | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 649b23bc..f3c4a9da 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -145,8 +145,8 @@ handle_request(MochiReq) -> Resp:get(code), RequestTime ], - Customer = cloudant_util:customer_name(HttpReq), - couch_metrics_req:notify({request, [Customer|RequestInfo]}), + % Customer = cloudant_util:customer_name(HttpReq), + % couch_metrics_req:notify({request, [Customer|RequestInfo]}), showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", RequestInfo), couch_stats_collector:record({couchdb, request_time}, RequestTime), couch_stats_collector:increment({httpd, requests}), diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 9e04ffa1..3b35220a 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -143,7 +143,7 @@ create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> case fabric:delete_db(DbName, [{user_ctx, UserCtx}]) of ok -> - couch_metrics_disk:notify({db_delete, {UserCtx#user_ctx.name, DbName}}), + % couch_metrics_disk:notify({db_delete, {UserCtx#user_ctx.name, DbName}}), send_json(Req, 200, {[{ok, true}]}); Error -> throw(Error) -- cgit v1.2.3 From f4d7191a3c0c37fe7b2ba290ba3e1afda0269863 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 11 Jun 2010 16:08:05 -0400 Subject: _changes feed via fabric --- src/chttpd_db.erl | 102 ++++++++++++++++++++++++++++++------------------------ 1 file changed, 57 insertions(+), 45 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 3b35220a..10388ff5 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -47,56 +47,69 @@ handle_request(#httpd{path_parts=[DbName|RestParts],method=Method, end. handle_changes_req(#httpd{method='GET'}=Req, Db) -> - MakeCallback = fun(Resp) -> - fun({change, Change, _}, "continuous") -> - send_chunk(Resp, [?JSON_ENCODE(Change) | "\n"]); - ({change, Change, Prepend}, _) -> - send_chunk(Resp, [Prepend, ?JSON_ENCODE(Change)]); - (start, "continuous") -> - ok; - (start, _) -> - send_chunk(Resp, "{\"results\":[\n"); - ({stop, EndSeq}, "continuous") -> - send_chunk( - Resp, - [?JSON_ENCODE({[{<<"last_seq">>, EndSeq}]}) | "\n"] - ), - end_json_response(Resp); - ({stop, EndSeq}, _) -> - send_chunk( - Resp, - io_lib:format("\n],\n\"last_seq\":~w}\n", [EndSeq]) - ), - end_json_response(Resp); - (timeout, _) -> - send_chunk(Resp, "\n") - end - end, ChangesArgs = parse_changes_query(Req), - ChangesFun = couch_changes:handle_changes(ChangesArgs, Req, Db), case ChangesArgs#changes_args.feed of "normal" -> - {ok, Info} = couch_db:get_db_info(Db), - CurrentEtag = chttpd:make_etag(Info), - chttpd:etag_respond( - Req, - CurrentEtag, - fun() -> - {ok, Resp} = chttpd:start_json_response( - Req, 200, [{"Etag", CurrentEtag}] - ), - ChangesFun(MakeCallback(Resp)) - end - ); - _ -> + T0 = now(), + {ok, Info} = fabric:get_db_info(Db), + Etag = chttpd:make_etag(Info), + DeltaT = timer:now_diff(now(), T0) / 1000, + couch_stats_collector:record({couchdb, dbinfo}, DeltaT), + chttpd:etag_respond(Req, Etag, fun() -> + {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), + fabric:changes(Db, ChangesArgs, fun changes_callback/2, + {"normal", Resp}) + end); + Feed -> % "longpoll" or "continuous" {ok, Resp} = chttpd:start_json_response(Req, 200), - ChangesFun(MakeCallback(Resp)) + fabric:changes(Db, ChangesArgs, fun changes_callback/2, {Feed, Resp}) end; - handle_changes_req(#httpd{path_parts=[_,<<"_changes">>]}=Req, _Db) -> send_method_not_allowed(Req, "GET,HEAD"). +% callbacks for continuous feed (newline-delimited JSON Objects) +changes_callback(start, {"continuous", _} = Acc) -> + {ok, Acc}; +changes_callback({change, Change}, {"continuous", Resp} = Acc) -> + send_chunk(Resp, [?JSON_ENCODE(Change) | "\n"]), + {ok, Acc}; +changes_callback({stop, EndSeq0}, {"continuous", Resp}) -> + EndSeq = case is_old_couch(Resp) of true -> 0; false -> EndSeq0 end, + send_chunk(Resp, [?JSON_ENCODE({[{<<"last_seq">>, EndSeq}]}) | "\n"]), + end_json_response(Resp); + +% callbacks for longpoll and normal (single JSON Object) +changes_callback(start, {_, Resp}) -> + send_chunk(Resp, "{\"results\":[\n"), + {ok, {"", Resp}}; +changes_callback({change, Change}, {Prepend, Resp}) -> + send_chunk(Resp, [Prepend, ?JSON_ENCODE(Change)]), + {ok, {",\r\n", Resp}}; +changes_callback({stop, EndSeq}, {_, Resp}) -> + case is_old_couch(Resp) of + true -> + send_chunk(Resp, "\n],\n\"last_seq\":0}\n"); + false -> + send_chunk(Resp, io_lib:format("\n],\n\"last_seq\":\"~s\"}\n",[EndSeq])) + end, + end_json_response(Resp); + +changes_callback(timeout, {Prepend, Resp}) -> + send_chunk(Resp, "\n"), + {ok, {Prepend, Resp}}; +changes_callback({error, Reason}, Resp) -> + chttpd:send_chunked_error(Resp, {error, Reason}). + +is_old_couch(Resp) -> + MochiReq = Resp:get(request), + case MochiReq:get_header_value("user-agent") of + undefined -> + false; + UserAgent -> + string:str(UserAgent, "CouchDB/0") > 0 + end. + handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, _Db) -> ok = ?COUCH:compact_view_group(DbName, Id), send_json(Req, 202, {[{ok, true}]}); @@ -382,7 +395,7 @@ all_docs_view(Req, Db, Keys) -> {ok, Info} = fabric:get_db_info(Db), Etag = couch_httpd:make_etag(Info), DeltaT = timer:now_diff(now(), T0) / 1000, - couch_stats_collector:record({couchdb, all_docs_etag}, DeltaT), + couch_stats_collector:record({couchdb, dbinfo}, DeltaT), QueryArgs = chttpd_view:parse_view_params(Req, Keys, map), chttpd:etag_respond(Req, Etag, fun() -> {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), @@ -398,8 +411,7 @@ all_docs_callback({row, Row}, {Prepend, Resp}) -> {ok, {",\r\n", Resp}}; all_docs_callback(complete, {_, Resp}) -> send_chunk(Resp, "\r\n]}"), - end_json_response(Resp), - {ok, Resp}; + end_json_response(Resp); all_docs_callback({error, Reason}, Resp) -> chttpd:send_chunked_error(Resp, {error, Reason}). @@ -780,7 +792,7 @@ parse_changes_query(Req) -> {"descending", "true"} -> Args#changes_args{dir=rev}; {"since", _} -> - Args#changes_args{since=list_to_integer(Value)}; + Args#changes_args{since=Value}; {"limit", _} -> Args#changes_args{limit=list_to_integer(Value)}; {"style", _} -> -- cgit v1.2.3 From 35bc59279c9d6de45e27ce87573eeeacfda9289e Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 11 Jun 2010 17:06:30 -0400 Subject: support for sorted=false map views over HTTP. BugzID 10073 --- src/chttpd_view.erl | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index 9ca0c3a1..f49e232d 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -51,8 +51,11 @@ view_callback({total_and_offset, Total, Offset}, {nil, Resp}) -> Chunk = "{\"total_rows\":~p,\"offset\":~p,\"rows\":[\r\n", send_chunk(Resp, io_lib:format(Chunk, [Total, Offset])), {ok, {"", Resp}}; +view_callback({total_and_offset, _, _}, Acc) -> + % a sorted=false view where the message came in late. Ignore. + {ok, Acc}; view_callback({row, Row}, {nil, Resp}) -> - % first row of a reduce view + % first row of a reduce view, or a sorted=false view send_chunk(Resp, ["{\"rows\":[\r\n", ?JSON_ENCODE(Row)]), {ok, {",\r\n", Resp}}; view_callback({row, Row}, {Prepend, Resp}) -> @@ -342,6 +345,8 @@ parse_view_param("callback", _) -> []; % Verified in the JSON response functions parse_view_param("show_total_rows", Value) -> [{show_total_rows, parse_bool_param(Value)}]; +parse_view_param("sorted", Value) -> + [{sorted, parse_bool_param(Value)}]; parse_view_param(Key, Value) -> [{extra, {Key, Value}}]. @@ -427,6 +432,10 @@ validate_view_query(show_total_rows, false, Args) -> Args#view_query_args{show_total_rows=false}; validate_view_query(show_total_rows, _Value, Args) -> Args; +validate_view_query(sorted, false, Args) -> + Args#view_query_args{sorted=false}; +validate_view_query(sorted, _Value, Args) -> + Args; validate_view_query(extra, _Value, Args) -> Args. -- cgit v1.2.3 From b61d25f5d15922cd36fd3768cada11ab42d87897 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 14 Jun 2010 11:46:14 -0400 Subject: more obvious file_exists create error handling --- src/chttpd.erl | 2 +- src/chttpd_db.erl | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index f3c4a9da..8a5af130 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -22,7 +22,7 @@ server_header/0, start_chunked_response/3,send_chunk/2, start_response_length/4, send/2, start_json_response/2, start_json_response/3, end_json_response/1, send_response/4, - send_method_not_allowed/2, send_error/4, send_redirect/2, + send_method_not_allowed/2, send_error/2, send_error/4, send_redirect/2, send_chunked_error/2, send_json/2,send_json/3,send_json/4]). start_link() -> diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 10388ff5..dc1f6df3 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -149,6 +149,8 @@ create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> ok -> DocUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)), send_json(Req, 201, [{"Location", DocUrl}], {[{ok, true}]}); + {error, file_exists} -> + chttpd:send_error(Req, file_exists); Error -> throw(Error) end. -- cgit v1.2.3 From c3ff956cfa00e71e652062231991b5a22ec520f5 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 14 Jun 2010 15:18:14 -0400 Subject: move ?b2a into couch_db.hrl --- src/chttpd_stats.erl | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/chttpd_stats.erl b/src/chttpd_stats.erl index 513a80f3..1e30c466 100644 --- a/src/chttpd_stats.erl +++ b/src/chttpd_stats.erl @@ -20,8 +20,6 @@ start_json_response/2,send_chunk/2,end_json_response/1, start_chunked_response/3, send_error/4]). --define(b2a(V), list_to_atom(binary_to_list(V))). - -record(stats_query_args, { range='0', flush=false -- cgit v1.2.3 From 6f4a995150978e67539546c3da804126e3a36389 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 14 Jun 2010 18:57:31 -0400 Subject: fix error clause of view callbacks --- src/chttpd_db.erl | 2 +- src/chttpd_view.erl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index dc1f6df3..1d558a19 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -414,7 +414,7 @@ all_docs_callback({row, Row}, {Prepend, Resp}) -> all_docs_callback(complete, {_, Resp}) -> send_chunk(Resp, "\r\n]}"), end_json_response(Resp); -all_docs_callback({error, Reason}, Resp) -> +all_docs_callback({error, Reason}, {_, Resp}) -> chttpd:send_chunked_error(Resp, {error, Reason}). db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) -> diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index f49e232d..8d071cac 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -65,7 +65,7 @@ view_callback(complete, {_, Resp}) -> send_chunk(Resp, "\r\n]}"), end_json_response(Resp), {ok, Resp}; -view_callback({error, Reason}, Resp) -> +view_callback({error, Reason}, {_, Resp}) -> chttpd:send_chunked_error(Resp, {error, Reason}). extract_view_type(_ViewName, [], _IsReduce) -> -- cgit v1.2.3 From 6b0e1f6da54c61714b6aef1a7a7991ab8c13b631 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 15 Jun 2010 11:29:08 -0400 Subject: use 0.11 couch_uuids module --- src/chttpd_auth.erl | 2 +- src/chttpd_db.erl | 4 ++-- src/chttpd_misc.erl | 19 +------------------ src/chttpd_show.erl | 2 +- src/chttpd_view.erl | 2 +- 5 files changed, 6 insertions(+), 23 deletions(-) diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index 3916f7cf..4159b764 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -312,7 +312,7 @@ create_user(#httpd{method='POST', mochi_req=MochiReq}=Req, Db) -> true -> ok end, Active = chttpd_view:parse_bool_param(couch_util:get_value("active", Form, "true")), - UserSalt = couch_util:new_uuid(), + UserSalt = couch_uuids:random(), UserDoc = #doc{ id = UserName, body = {[ diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 1d558a19..277a33bc 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -175,7 +175,7 @@ db_req(#httpd{method='POST',path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> Doc = couch_doc:from_json_obj(chttpd:json_body(Req)), Doc2 = case Doc#doc.id of <<"">> -> - Doc#doc{id=couch_util:new_uuid(), revs={0, []}}; + Doc#doc{id=couch_uuids:new(), revs={0, []}}; _ -> Doc end, @@ -254,7 +254,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, Doc = couch_doc:from_json_obj(JsonObj), validate_attachment_names(Doc), Id = case Doc#doc.id of - <<>> -> couch_util:new_uuid(); + <<>> -> couch_uuids:new(); Id0 -> Id0 end, case couch_util:get_value(<<"_rev">>, ObjProps) of diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index e6d3c76a..26297d34 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -131,25 +131,8 @@ handle_restart_req(Req) -> send_method_not_allowed(Req, "POST"). -handle_uuids_req(#httpd{method='GET'}=Req) -> - Count = list_to_integer(chttpd:qs_value(Req, "count", "1")), - % generate the uuids - UUIDs = [ couch_util:new_uuid() || _ <- lists:seq(1,Count)], - % send a JSON response - Etag = chttpd:make_etag(UUIDs), - chttpd:etag_respond(Req, Etag, fun() -> - CacheBustingHeaders = [ - {"Date", httpd_util:rfc1123_date()}, - {"Cache-Control", "no-cache"}, - % Past date, ON PURPOSE! - {"Expires", "Fri, 01 Jan 1990 00:00:00 GMT"}, - {"Pragma", "no-cache"}, - {"ETag", Etag} - ], - send_json(Req, 200, CacheBustingHeaders, {[{<<"uuids">>, UUIDs}]}) - end); handle_uuids_req(Req) -> - send_method_not_allowed(Req, "GET"). + couch_httpd_misc_handlers:handle_uuids_req(Req). % Config request handler diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index de10463f..9f4e0d02 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -148,7 +148,7 @@ send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db, Group, Keys) QueryArgs = chttpd_view:parse_view_params(Req, Keys, ViewType), {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), StartListRespFun = make_map_start_resp_fun(QueryServer, Db), - Etag = couch_util:new_uuid(), + Etag = couch_uuids:new(), chttpd:etag_respond(Req, Etag, fun() -> {ok, Total, Result} = ?COUCH:list_view(Req, Db, DesignId, ViewName, Keys, QueryArgs, QueryServer), diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index 8d071cac..a4d909a1 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -36,7 +36,7 @@ design_doc_view(Req, Db, GroupId, ViewName, Keys) -> QueryArgs = parse_view_params(Req, Keys, ViewType), % TODO proper calculation of etag % Etag = view_group_etag(ViewGroup, Db, Keys), - Etag = couch_util:new_uuid(), + Etag = couch_uuids:new(), couch_stats_collector:increment({httpd, view_reads}), chttpd:etag_respond(Req, Etag, fun() -> {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), -- cgit v1.2.3 From daf255771e50f720bad558a2218decd0d1a26bb1 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 15 Jun 2010 11:58:01 -0400 Subject: return 409 on update conflict --- src/chttpd.erl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 8a5af130..51edbef2 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -484,7 +484,7 @@ validate_callback([Char | Rest]) -> error_info({Error, Reason}) when is_list(Reason) -> - error_info({Error, ?l2b(Reason)}); + error_info({Error, couch_util:to_binary(Reason)}); error_info(bad_request) -> {400, <<"bad_request">>, <<>>}; error_info({bad_request, Reason}) -> @@ -499,6 +499,8 @@ error_info({not_acceptable, Reason}) -> {406, <<"not_acceptable">>, Reason}; error_info(conflict) -> {409, <<"conflict">>, <<"Document update conflict.">>}; +error_info({conflict, _}) -> + {409, <<"conflict">>, <<"Document update conflict.">>}; error_info({forbidden, Msg}) -> {403, <<"forbidden">>, Msg}; error_info({credentials_expired, Msg}) -> -- cgit v1.2.3 From 49e2051daeb274d7c919e64946d2e6c50e083a93 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 15 Jun 2010 13:34:34 -0400 Subject: use proper startkey and endkey for 0.11 --- src/chttpd_view.erl | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index a4d909a1..6984f3e6 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -255,8 +255,8 @@ output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) -> finish_reduce_fold(Req, Resp) end). -reverse_key_default(nil) -> {}; -reverse_key_default({}) -> nil; +reverse_key_default(?MIN_STR) -> ?MAX_STR; +reverse_key_default(?MAX_STR) -> ?MIN_STR; reverse_key_default(Key) -> Key. get_stale_type(Req) -> @@ -384,12 +384,8 @@ validate_view_query(descending, true, Args) -> fwd -> Args#view_query_args{ direction = rev, - start_key = - reverse_key_default(Args#view_query_args.start_key), start_docid = reverse_key_default(Args#view_query_args.start_docid), - end_key = - reverse_key_default(Args#view_query_args.end_key), end_docid = reverse_key_default(Args#view_query_args.end_docid) } -- cgit v1.2.3 From 50d87f96770fb54f2b2ea1728742b443c66134a3 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 15 Jun 2010 20:59:56 -0400 Subject: refactored couch metrics interface --- src/chttpd.erl | 28 ++++++++++++++++------------ src/chttpd_db.erl | 1 - src/chttpd_misc.erl | 13 ++++++++++++- 3 files changed, 28 insertions(+), 14 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 51edbef2..122a98a2 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -136,18 +136,21 @@ handle_request(MochiReq) -> send_error(HttpReq, Error) end, - RequestTime = round(timer:now_diff(now(), Begin)/1000), - RequestInfo = [ - MochiReq:get(peer), - MochiReq:get_header_value("Host"), - atom_to_list(Method1), - RawUri, - Resp:get(code), - RequestTime - ], - % Customer = cloudant_util:customer_name(HttpReq), - % couch_metrics_req:notify({request, [Customer|RequestInfo]}), - showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", RequestInfo), + RequestTime = timer:now_diff(now(), Begin)/1000, + Peer = MochiReq:get(peer), + Code = Resp:get(code), + Host = MochiReq:get_header_value("Host"), + couch_metrics_event:notify(#response{ + peer = Peer, + host = Host, + customer = Customer, + code = Code, + time = RequestTime, + method = Method1, + uri = RawUri + }), + showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", [Peer, Host, + atom_to_list(Method1), RawUri, Code, round(RequestTime)]), couch_stats_collector:record({couchdb, request_time}, RequestTime), couch_stats_collector:increment({httpd, requests}), {ok, Resp}. @@ -190,6 +193,7 @@ url_handler("_session") -> fun chttpd_auth:handle_session_req/1; url_handler("_user") -> fun chttpd_auth:handle_user_req/1; url_handler("_oauth") -> fun chttpd_oauth:handle_oauth_req/1; url_handler("_stats") -> fun chttpd_stats:handle_stats_req/1; +url_handler("_metrics") -> fun chttpd_misc:handle_metrics_req/1; url_handler("_restart") -> fun showroom_http:handle_restart_req/1; url_handler("_cloudant") -> fun showroom_httpd_admin:handle_cloudant_req/1; url_handler(_) -> fun chttpd_db:handle_request/1. diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 277a33bc..9aae4c6e 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -158,7 +158,6 @@ create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> case fabric:delete_db(DbName, [{user_ctx, UserCtx}]) of ok -> - % couch_metrics_disk:notify({db_delete, {UserCtx#user_ctx.name, DbName}}), send_json(Req, 200, {[{ok, true}]}); Error -> throw(Error) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 26297d34..3ed70bea 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -16,7 +16,7 @@ handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1, handle_uuids_req/1,handle_config_req/1,handle_log_req/1, handle_task_status_req/1,handle_sleep_req/1,handle_welcome_req/1, - handle_utils_dir_req/1, handle_favicon_req/1]). + handle_utils_dir_req/1, handle_favicon_req/1, handle_metrics_req/1]). -export([increment_update_seq_req/2]). @@ -214,4 +214,15 @@ handle_log_req(#httpd{method='GET'}=Req) -> handle_log_req(Req) -> send_method_not_allowed(Req, "GET"). +handle_metrics_req(#httpd{method='GET', path_parts=[_, Id]}=Req) -> + case chttpd:qs_value(Req, "slice") of + undefined -> + Data = couch_metrics_event:get_global_metrics(?b2a(Id)); + SliceStr -> + Slice = list_to_integer(SliceStr), + Data = couch_metrics_event:get_global_metrics(?b2a(Id), Slice) + end, + send_json(Req, Data); +handle_metrics_req(Req) -> + send_method_not_allowed(Req, "GET,HEAD"). -- cgit v1.2.3 From b229c6c4ec1e72a1ec7593d5626983e1d2065f67 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 16 Jun 2010 09:32:08 -0400 Subject: re-add GET /_metrics to see list of installed handlers --- src/chttpd_misc.erl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 3ed70bea..6a3261c6 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -214,6 +214,9 @@ handle_log_req(#httpd{method='GET'}=Req) -> handle_log_req(Req) -> send_method_not_allowed(Req, "GET"). +handle_metrics_req(#httpd{method='GET', path_parts=[_]}=Req) -> + {_, L} = lists:unzip(gen_event:which_handlers(couch_metrics_event_manager)), + send_json(Req, L); handle_metrics_req(#httpd{method='GET', path_parts=[_, Id]}=Req) -> case chttpd:qs_value(Req, "slice") of undefined -> -- cgit v1.2.3 From b83ece9a18cacddbd98cdf1038a2b40ca9378f16 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 16 Jun 2010 15:17:36 -0400 Subject: better cookie auth failures. BugzID 1522, BugzID 10157 --- src/chttpd.erl | 5 ++--- src/chttpd_auth.erl | 33 ++++++++++++++------------------- 2 files changed, 16 insertions(+), 22 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 122a98a2..ee55f3e8 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -100,7 +100,6 @@ handle_request(MochiReq) -> {ok, Resp} = try - erase(cookie_auth_failed), case authenticate_request(HttpReq, AuthenticationFuns) of #httpd{} = Req -> HandlerFun = url_handler(HandlerKey), @@ -507,8 +506,8 @@ error_info({conflict, _}) -> {409, <<"conflict">>, <<"Document update conflict.">>}; error_info({forbidden, Msg}) -> {403, <<"forbidden">>, Msg}; -error_info({credentials_expired, Msg}) -> - {403, <<"credentials_expired">>, Msg}; +error_info({forbidden, Error, Msg}) -> + {403, Error, Msg}; error_info({unauthorized, Msg}) -> {401, <<"unauthorized">>, Msg}; error_info(file_exists) -> diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index 4159b764..b13e12d1 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -76,17 +76,18 @@ cookie_authentication_handler(#httpd{path_parts=[<<"_session">>], % ignore any cookies sent with login request Req; cookie_authentication_handler(Req) -> - case cookie_auth_user(Req) of + try cookie_auth_user(Req) of nil -> Req; - cookie_auth_failed -> - put(cookie_auth_failed, true), - Req#httpd{auth=cookie_auth_failed}; + {cookie_auth_failed, _} = X -> + Req#httpd{auth=X}; Req2 -> Req2 + catch error:_ -> + Req#httpd{auth={cookie_auth_failed, {invalid_cookie, null}}} end. -cookie_auth_header(#httpd{auth=cookie_auth_failed}, Headers) -> +cookie_auth_header(#httpd{auth={cookie_auth_failed, _}}, Headers) -> % check for an AuthSession cookie from login handler CookieHeader = couch_util:get_value("Set-Cookie", Headers, ""), Cookies = mochiweb_cookies:parse_cookie(CookieHeader), @@ -113,13 +114,8 @@ cookie_auth_header(#httpd{user_ctx=Ctx, auth={Secret,true}}, Headers) -> true -> [] end; -cookie_auth_header(Req, Headers) -> - case get(cookie_auth_failed) of - true -> - cookie_auth_header(Req#httpd{auth=cookie_auth_failed}, Headers); - _ -> - [] - end. +cookie_auth_header(_Req, _Headers) -> + []. handle_session_req(#httpd{method='POST', mochi_req=MochiReq, user_ctx=Ctx}=Req) -> % login @@ -263,12 +259,12 @@ cookie_auth_user(#httpd{mochi_req=MochiReq}=Req) -> case couch_config:get("chttpd_auth", "secret") of undefined -> ?LOG_DEBUG("AuthSession cookie, but no secret in config!", []), - cookie_auth_failed; + {cookie_auth_failed, {internal_server_error, null}}; SecretStr -> case get_user(User) of nil -> - ?LOG_DEBUG("no record of user ~s", [User]), - cookie_auth_failed; + Msg = io_lib:format("no record of user ~s", [User]), + {cookie_auth_failed, {bad_user, ?l2b(Msg)}}; Result -> Secret = ?l2b(SecretStr), UserSalt = couch_util:get_value(<<"salt">>, Result), @@ -288,14 +284,13 @@ cookie_auth_user(#httpd{mochi_req=MochiReq}=Req) -> }, auth={FullSecret, TimeLeft < Timeout*0.9}}; true -> ?LOG_DEBUG("cookie for ~s was expired", [User]), - put(cookie_auth_failed, true), Msg = lists:concat(["Your session has expired after ", Timeout div 60, " minutes of inactivity"]), - throw({credentials_expired, ?l2b(Msg)}) + {cookie_auth_failed, {credentials_expired, ?l2b(Msg)}} end; _Else -> - ?LOG_DEBUG("cookie password hash was incorrect", []), - cookie_auth_failed + Msg = <<"cookie password hash was incorrect">>, + {cookie_auth_failed, {bad_password, Msg}} end end end -- cgit v1.2.3 From 17e63f456d7c62178a6536ad2004f6d5c632ef53 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 16 Jun 2010 16:46:35 -0400 Subject: nicer error message when querying a non-existent DB --- src/chttpd.erl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index ee55f3e8..d178604d 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -118,9 +118,9 @@ handle_request(MochiReq) -> showroom_log:message(notice, LogForClosedSocket, []), exit(normal); throw:Error -> - % ?LOG_DEBUG("Minor error in HTTP request: ~p",[Error]), - % ?LOG_DEBUG("Stacktrace: ~p",[erlang:get_stacktrace()]), send_error(HttpReq, Error); + error:database_does_not_exist -> + send_error(HttpReq, database_does_not_exist); error:badarg -> ?LOG_ERROR("Badarg error in HTTP request",[]), ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]), @@ -494,6 +494,8 @@ error_info({bad_request, Reason}) -> {400, <<"bad_request">>, Reason}; error_info({query_parse_error, Reason}) -> {400, <<"query_parse_error">>, Reason}; +error_info(database_does_not_exist) -> + {404, <<"not_found">>, <<"database_does_not_exist">>}; error_info(not_found) -> {404, <<"not_found">>, <<"missing">>}; error_info({not_found, Reason}) -> -- cgit v1.2.3 From ce41b0d7fd15f6cc20b19147f88003f7ea61c728 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 22 Jun 2010 07:46:15 -0400 Subject: fix passing options to fabric in _bulk_docs --- src/chttpd_db.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 9aae4c6e..2d2fadc2 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -271,7 +271,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, true -> [all_or_nothing|Options]; _ -> Options end, - case fabric:update_docs(Db, Docs, [Options2]) of + case fabric:update_docs(Db, Docs, Options2) of {ok, Results} -> % output the results DocResults = lists:zipwith(fun update_doc_result_to_json/2, -- cgit v1.2.3 From 89ccaa276043a979ab38e7c90e581d2b220261ea Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 22 Jun 2010 08:11:21 -0400 Subject: add json_req_obj/3, which accepts a docid as last argument --- src/chttpd_external.erl | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl index 9566ec28..fa9e7025 100644 --- a/src/chttpd_external.erl +++ b/src/chttpd_external.erl @@ -13,7 +13,7 @@ -module(chttpd_external). -export([handle_external_req/2, handle_external_req/3]). --export([send_external_response/2, json_req_obj/2]). +-export([send_external_response/2, json_req_obj/2, json_req_obj/3]). -export([default_or_content_type/2, parse_external_response/1]). -import(chttpd,[send_error/4]). @@ -54,11 +54,12 @@ process_external_req(HttpReq, Db, Name) -> send_external_response(HttpReq, Response) end. +json_req_obj(Req, Db) -> json_req_obj(Req, Db, null). json_req_obj(#httpd{mochi_req=Req, - method=Verb, + method=Method, path_parts=Path, req_body=ReqBody - } = HttpReq, Db) -> + } = HttpReq, Db, DocId) -> Body = case ReqBody of undefined -> Req:recv_body(); Else -> Else @@ -82,17 +83,18 @@ json_req_obj(#httpd{mochi_req=Req, NoCustomer -> NoCustomer end, - % add headers... {[{<<"info">>, {cloudant_util:customer_db_info(HttpReq, Info)}}, - {<<"verb">>, Verb}, + {<<"id">>, DocId}, + {<<"method">>, Method}, {<<"path">>, FixedPath}, - {<<"query">>, to_json_terms(Req:parse_qs())}, + {<<"query">>, json_query_keys(to_json_terms(Req:parse_qs()))}, {<<"headers">>, to_json_terms(Hlist)}, {<<"body">>, Body}, + {<<"peer">>, ?l2b(Req:get(peer))}, {<<"form">>, to_json_terms(ParsedForm)}, {<<"cookie">>, to_json_terms(Req:parse_cookie())}, - {<<"userCtx">>, couch_util:json_user_ctx(Db)}]}. + {<<"userCtx">>, couch_util:json_user_ctx(Db#db{name=hd(FixedPath)})}]}. to_json_terms(Data) -> to_json_terms(Data, []). @@ -103,6 +105,18 @@ to_json_terms([{Key, Value} | Rest], Acc) when is_atom(Key) -> to_json_terms([{Key, Value} | Rest], Acc) -> to_json_terms(Rest, [{list_to_binary(Key), list_to_binary(Value)} | Acc]). +json_query_keys({Json}) -> + json_query_keys(Json, []). +json_query_keys([], Acc) -> + {lists:reverse(Acc)}; +json_query_keys([{<<"startkey">>, Value} | Rest], Acc) -> + json_query_keys(Rest, [{<<"startkey">>, couch_util:json_decode(Value)}|Acc]); +json_query_keys([{<<"endkey">>, Value} | Rest], Acc) -> + json_query_keys(Rest, [{<<"endkey">>, couch_util:json_decode(Value)}|Acc]); +json_query_keys([{<<"key">>, Value} | Rest], Acc) -> + json_query_keys(Rest, [{<<"key">>, couch_util:json_decode(Value)}|Acc]); +json_query_keys([Term | Rest], Acc) -> + json_query_keys(Rest, [Term|Acc]). send_external_response(#httpd{mochi_req=MochiReq}, Response) -> #extern_resp_args{ -- cgit v1.2.3 From 4a4c4edd323db00d360db3355c5876b4f14a69a4 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 22 Jun 2010 09:56:54 -0400 Subject: switch to 0.11-style (3-arity) design handlers, fix _show _list and _update still need to be fixed, and _view reopens the ddoc --- src/chttpd.erl | 11 ++--- src/chttpd_db.erl | 23 ++++++---- src/chttpd_show.erl | 129 ++++++++++++++++++++++++++++++++++------------------ src/chttpd_view.erl | 51 ++------------------- 4 files changed, 109 insertions(+), 105 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index d178604d..a4bb1293 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -202,18 +202,17 @@ db_url_handlers() -> {<<"_view_cleanup">>, fun chttpd_view:handle_view_cleanup_req/2}, {<<"_compact">>, fun chttpd_db:handle_compact_req/2}, {<<"_design">>, fun chttpd_db:handle_design_req/2}, - {<<"_view">>, fun chttpd_db:handle_db_view_req/2}, {<<"_temp_view">>, fun chttpd_db:handle_temp_view_req/2}, {<<"_changes">>, fun chttpd_db:handle_changes_req/2} ]. design_url_handlers() -> [ - {<<"_view">>, fun chttpd_view:handle_view_req/2}, - {<<"_show">>, fun chttpd_show:handle_doc_show_req/2}, - {<<"_list">>, fun chttpd_show:handle_view_list_req/2}, - {<<"_update">>, fun chttpd_show:handle_doc_update_req/2}, - {<<"_info">>, fun chttpd_db:handle_design_info_req/2} + {<<"_view">>, fun chttpd_view:handle_view_req/3}, + {<<"_show">>, fun chttpd_show:handle_doc_show_req/3}, + {<<"_list">>, fun chttpd_show:handle_view_list_req/3}, + {<<"_update">>, fun chttpd_show:handle_doc_update_req/3}, + {<<"_info">>, fun chttpd_db:handle_design_info_req/3} ]. % Utilities diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 2d2fadc2..0317aed4 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -16,7 +16,7 @@ -export([handle_request/1, handle_compact_req/2, handle_design_req/2, db_req/2, couch_doc_open/4,handle_changes_req/2, update_doc_result_to_json/1, update_doc_result_to_json/2, - handle_design_info_req/2, handle_view_cleanup_req/2]). + handle_design_info_req/3, handle_view_cleanup_req/2]). -import(chttpd, [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, @@ -123,23 +123,30 @@ handle_view_cleanup_req(Req, _) -> chttpd:send_error(Req, 403, Msg). handle_design_req(#httpd{ - path_parts=[_DbName,_Design,_DesName, <<"_",_/binary>> = Action | _Rest], + path_parts=[_DbName, _Design, Name, <<"_",_/binary>> = Action | _Rest], design_url_handlers = DesignUrlHandlers }=Req, Db) -> - Handler = couch_util:get_value(Action, DesignUrlHandlers, fun db_req/2), - Handler(Req, Db); + case fabric:open_doc(Db, <<"_design/", Name/binary>>, []) of + {ok, DDoc} -> + % TODO we'll trigger a badarity here if ddoc attachment starts with "_", + % or if user tries an unknown Action + Handler = couch_util:get_value(Action, DesignUrlHandlers, fun db_req/2), + Handler(Req, Db, DDoc); + Error -> + throw(Error) + end; handle_design_req(Req, Db) -> db_req(Req, Db). -handle_design_info_req(#httpd{method='GET', path_parts=[_,_,Name,_]}=Req, Db) -> - {ok, GroupInfoList} = fabric:get_view_group_info(Db, Name), +handle_design_info_req(#httpd{method='GET'}=Req, Db, #doc{id=Id} = DDoc) -> + {ok, GroupInfoList} = fabric:get_view_group_info(Db, DDoc), send_json(Req, 200, {[ - {name, <<"_design/", Name/binary>>}, + {name, Id}, {view_index, {GroupInfoList}} ]}); -handle_design_info_req(Req, _Db) -> +handle_design_info_req(Req, _Db, _DDoc) -> send_method_not_allowed(Req, "GET"). create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index 9f4e0d02..f74b9dd6 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -12,38 +12,98 @@ -module(chttpd_show). --export([handle_doc_show_req/2, handle_doc_update_req/2, handle_view_list_req/2, - handle_doc_show/5, handle_view_list/7, start_list_resp/5, - send_list_row/6]). +-export([handle_doc_show_req/3, handle_doc_update_req/3, handle_view_list_req/3, + handle_view_list/7, get_fun_key/3]). -include("chttpd.hrl"). -import(chttpd, [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, - start_json_response/2,send_chunk/2,send_chunked_error/2, + start_json_response/2,send_chunk/2,last_chunk/1,send_chunked_error/2, start_chunked_response/3, send_error/4]). +% /db/_design/foo/_show/bar/docid +% show converts a json doc to a response of any content-type. +% it looks up the doc an then passes it to the query server. +% then it sends the response from the query server to the http client. + +maybe_open_doc(Db, DocId) -> + case fabric:open_doc(Db, DocId, [conflicts]) of + {ok, Doc} -> + Doc; + {not_found, _} -> + nil + end. + handle_doc_show_req(#httpd{ - method='GET', - path_parts=[_DbName, _Design, DesignName, _Show, ShowName, DocId] - }=Req, Db) -> - handle_doc_show(Req, DesignName, ShowName, DocId, Db); + path_parts=[_, _, _, _, ShowName, DocId] + }=Req, Db, DDoc) -> + + % open the doc + Doc = maybe_open_doc(Db, DocId), + + % we don't handle revs here b/c they are an internal api + % returns 404 if there is no doc with DocId + handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId); handle_doc_show_req(#httpd{ - path_parts=[_DbName, _Design, DesignName, _Show, ShowName] - }=Req, Db) -> - handle_doc_show(Req, DesignName, ShowName, nil, Db); + path_parts=[_, _, _, _, ShowName, DocId|Rest] + }=Req, Db, DDoc) -> + + DocParts = [DocId|Rest], + DocId1 = ?l2b(string:join([?b2l(P)|| P <- DocParts], "/")), -handle_doc_show_req(#httpd{method='GET'}=Req, _Db) -> - send_error(Req, 404, <<"show_error">>, <<"Invalid path.">>); + % open the doc + Doc = maybe_open_doc(Db, DocId1), -handle_doc_show_req(Req, _Db) -> - send_method_not_allowed(Req, "GET,POST,HEAD"). + % we don't handle revs here b/c they are an internal api + % pass 404 docs to the show function + handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId1); + +handle_doc_show_req(#httpd{ + path_parts=[_, _, _, _, ShowName] + }=Req, Db, DDoc) -> + % with no docid the doc is nil + handle_doc_show(Req, Db, DDoc, ShowName, nil); + +handle_doc_show_req(Req, _Db, _DDoc) -> + send_error(Req, 404, <<"show_error">>, <<"Invalid path.">>). + +handle_doc_show(Req, Db, DDoc, ShowName, Doc) -> + handle_doc_show(Req, Db, DDoc, ShowName, Doc, null). + +handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId) -> + % get responder for ddoc/showname + CurrentEtag = show_etag(Req, Doc, DDoc, []), + chttpd:etag_respond(Req, CurrentEtag, fun() -> + JsonReq = chttpd_external:json_req_obj(Req, Db, DocId), + JsonDoc = couch_query_servers:json_doc(Doc), + [<<"resp">>, ExternalResp] = + couch_query_servers:ddoc_prompt(DDoc, [<<"shows">>, ShowName], + [JsonDoc, JsonReq]), + JsonResp = apply_etag(ExternalResp, CurrentEtag), + chttpd_external:send_external_response(Req, JsonResp) + end). + + +show_etag(#httpd{user_ctx=UserCtx}=Req, Doc, DDoc, More) -> + Accept = chttpd:header_value(Req, "Accept"), + DocPart = case Doc of + nil -> nil; + Doc -> chttpd:doc_etag(Doc) + end, + couch_httpd:make_etag({couch_httpd:doc_etag(DDoc), DocPart, Accept, + UserCtx#user_ctx.roles, More}). + +get_fun_key(#doc{body={Props}}, Type, Name) -> + Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), + Src = couch_util:get_nested_json_value({Props}, [Type, Name]), + {Lang, Src}. handle_doc_update_req(#httpd{ method = 'PUT', path_parts=[_DbName, _Design, DesignName, _Update, UpdateName, DocId] - }=Req, Db) -> + }=Req, Db, _) -> DesignId = <<"_design/", DesignName/binary>>, #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), @@ -58,7 +118,7 @@ handle_doc_update_req(#httpd{ handle_doc_update_req(#httpd{ method = 'POST', path_parts=[_DbName, _Design, DesignName, _Update, UpdateName] - }=Req, Db) -> + }=Req, Db, _) -> DesignId = <<"_design/", DesignName/binary>>, #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), @@ -67,57 +127,38 @@ handle_doc_update_req(#httpd{ handle_doc_update_req(#httpd{ path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName, _DocId] - }=Req, _Db) -> + }=Req, _Db, _) -> send_method_not_allowed(Req, "PUT"); handle_doc_update_req(#httpd{ path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName] - }=Req, _Db) -> + }=Req, _Db, _) -> send_method_not_allowed(Req, "POST"); -handle_doc_update_req(Req, _Db) -> +handle_doc_update_req(Req, _Db, _) -> send_error(Req, 404, <<"update_error">>, <<"Invalid path.">>). - - - -handle_doc_show(Req, DesignName, ShowName, DocId, Db) -> - DesignId = <<"_design/", DesignName/binary>>, - #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), - Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), - ShowSrc = couch_util:get_nested_json_value({Props}, [<<"shows">>, ShowName]), - Doc = case DocId of - nil -> nil; - _ -> - try chttpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of - FoundDoc -> FoundDoc - catch - _ -> nil - end - end, - send_doc_show_response(Lang, ShowSrc, DocId, Doc, Req, Db). - % view-list request with view and list from same design doc. handle_view_list_req(#httpd{method='GET', - path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db) -> + path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db, _) -> handle_view_list(Req, DesignName, ListName, DesignName, ViewName, Db, nil); % view-list request with view and list from different design docs. handle_view_list_req(#httpd{method='GET', - path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewDesignName, ViewName]}=Req, Db) -> + path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewDesignName, ViewName]}=Req, Db, _) -> handle_view_list(Req, DesignName, ListName, ViewDesignName, ViewName, Db, nil); -handle_view_list_req(#httpd{method='GET'}=Req, _Db) -> +handle_view_list_req(#httpd{method='GET'}=Req, _Db, _) -> send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>); handle_view_list_req(#httpd{method='POST', - path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db) -> + path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db, _) -> ReqBody = chttpd:body(Req), {Props2} = ?JSON_DECODE(ReqBody), Keys = couch_util:get_value(<<"keys">>, Props2, nil), handle_view_list(Req#httpd{req_body=ReqBody}, DesignName, ListName, DesignName, ViewName, Db, Keys); -handle_view_list_req(Req, _Db) -> +handle_view_list_req(Req, _Db, _) -> send_method_not_allowed(Req, "GET,POST,HEAD"). handle_view_list(Req, ListDesignName, ListName, ViewDesignName, ViewName, Db, Keys) -> diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index 6984f3e6..6d93a5b9 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -13,7 +13,7 @@ -module(chttpd_view). -include("chttpd.hrl"). --export([handle_view_req/2,handle_temp_view_req/2,handle_db_view_req/2]). +-export([handle_view_req/3,handle_temp_view_req/2]). -export([get_stale_type/1, get_reduce_type/1, parse_view_params/3]). -export([make_view_fold_fun/6, finish_view_fold/3, view_row_obj/3]). @@ -82,11 +82,11 @@ extract_view_type(ViewName, [View|Rest], IsReduce) -> end. handle_view_req(#httpd{method='GET', - path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) -> + path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db, DDoc) -> design_doc_view(Req, Db, DName, ViewName, nil); handle_view_req(#httpd{method='POST', - path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db) -> + path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db, DDoc) -> {Fields} = chttpd:json_body_obj(Req), case couch_util:get_value(<<"keys">>, Fields) of Keys when is_list(Keys) -> @@ -95,50 +95,7 @@ handle_view_req(#httpd{method='POST', throw({bad_request, "`keys` body member must be an array."}) end; -handle_view_req(Req, _Db) -> - send_method_not_allowed(Req, "GET,POST,HEAD"). - -handle_db_view_req(#httpd{method='GET', - path_parts=[_Db, _View, DName, ViewName]}=Req, Db) -> - QueryArgs = chttpd_view:parse_view_params(Req, nil, nil), - #view_query_args{ - list = ListName - } = QueryArgs, - ?LOG_DEBUG("ici ~p", [ListName]), - case ListName of - nil -> chttpd_view:design_doc_view(Req, Db, DName, ViewName, nil); - _ -> - chttpd_show:handle_view_list(Req, DName, ListName, DName, ViewName, Db, nil) - end; - -handle_db_view_req(#httpd{method='POST', - path_parts=[_Db, _View, DName, ViewName]}=Req, Db) -> - QueryArgs = chttpd_view:parse_view_params(Req, nil, nil), - #view_query_args{ - list = ListName - } = QueryArgs, - case ListName of - nil -> - {Fields} = chttpd:json_body_obj(Req), - case couch_util:get_value(<<"keys">>, Fields, nil) of - nil -> - Fmt = "POST to view ~p/~p in database ~p with no keys member.", - ?LOG_DEBUG(Fmt, [DName, ViewName, Db]), - chttpd_view:design_doc_view(Req, Db, DName, ViewName, nil); - Keys when is_list(Keys) -> - chttpd_view:design_doc_view(Req, Db, DName, ViewName, Keys); - _ -> - throw({bad_request, "`keys` member must be a array."}) - end; - _ -> - ReqBody = chttpd:body(Req), - {Props2} = ?JSON_DECODE(ReqBody), - Keys = couch_util:get_value(<<"keys">>, Props2, nil), - chttpd_show:handle_view_list(Req#httpd{req_body=ReqBody}, - DName, ListName, DName, ViewName, Db, Keys) - end; - -handle_db_view_req(Req, _Db) -> +handle_view_req(Req, _Db, _DDoc) -> send_method_not_allowed(Req, "GET,POST,HEAD"). handle_temp_view_req(#httpd{method='POST'}=Req, Db) -> -- cgit v1.2.3 From af93b3f4b32f95d7a915540a2e6f49607bea6082 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 22 Jun 2010 10:05:51 -0400 Subject: ddoc is already opened for us now --- src/chttpd_view.erl | 43 ++++++++++++++++++------------------------- 1 file changed, 18 insertions(+), 25 deletions(-) diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index 6d93a5b9..c87d5a73 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -25,27 +25,20 @@ start_json_response/2, start_json_response/3, end_json_response/1, send_chunked_error/2]). -design_doc_view(Req, Db, GroupId, ViewName, Keys) -> - % TODO open the ddoc once, not twice (here and fabric) - DesignId = <<"_design/", GroupId/binary>>, - case fabric:open_doc(Db, DesignId, []) of - {ok, DDoc} -> - Group = couch_view_group:design_doc_to_view_group(#db{name=Db}, DDoc), - IsReduce = get_reduce_type(Req), - ViewType = extract_view_type(ViewName, Group#group.views, IsReduce), - QueryArgs = parse_view_params(Req, Keys, ViewType), - % TODO proper calculation of etag - % Etag = view_group_etag(ViewGroup, Db, Keys), - Etag = couch_uuids:new(), - couch_stats_collector:increment({httpd, view_reads}), - chttpd:etag_respond(Req, Etag, fun() -> - {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), - CB = fun view_callback/2, - fabric:query_view(Db, DDoc, ViewName, QueryArgs, CB, {nil, Resp}) - end); - {not_found, Reason} -> - throw({not_found, Reason}) - end. +design_doc_view(Req, Db, DDoc, ViewName, Keys) -> + Group = couch_view_group:design_doc_to_view_group(#db{name=Db}, DDoc), + IsReduce = get_reduce_type(Req), + ViewType = extract_view_type(ViewName, Group#group.views, IsReduce), + QueryArgs = parse_view_params(Req, Keys, ViewType), + % TODO proper calculation of etag + % Etag = view_group_etag(ViewGroup, Db, Keys), + Etag = couch_uuids:new(), + couch_stats_collector:increment({httpd, view_reads}), + chttpd:etag_respond(Req, Etag, fun() -> + {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), + CB = fun view_callback/2, + fabric:query_view(Db, DDoc, ViewName, QueryArgs, CB, {nil, Resp}) + end). view_callback({total_and_offset, Total, Offset}, {nil, Resp}) -> Chunk = "{\"total_rows\":~p,\"offset\":~p,\"rows\":[\r\n", @@ -82,15 +75,15 @@ extract_view_type(ViewName, [View|Rest], IsReduce) -> end. handle_view_req(#httpd{method='GET', - path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db, DDoc) -> - design_doc_view(Req, Db, DName, ViewName, nil); + path_parts=[_, _, _, _, ViewName]}=Req, Db, DDoc) -> + design_doc_view(Req, Db, DDoc, ViewName, nil); handle_view_req(#httpd{method='POST', - path_parts=[_Db, _Design, DName, _View, ViewName]}=Req, Db, DDoc) -> + path_parts=[_, _, _, _, ViewName]}=Req, Db, DDoc) -> {Fields} = chttpd:json_body_obj(Req), case couch_util:get_value(<<"keys">>, Fields) of Keys when is_list(Keys) -> - design_doc_view(Req, Db, DName, ViewName, Keys); + design_doc_view(Req, Db, DDoc, ViewName, Keys); _ -> throw({bad_request, "`keys` body member must be an array."}) end; -- cgit v1.2.3 From 1a7f56db98becf1bed2f7a91c18184f68fce68ef Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 22 Jun 2010 10:40:31 -0400 Subject: fix _update handlers --- src/chttpd_show.erl | 115 ++++++++++++++++------------------------------------ 1 file changed, 36 insertions(+), 79 deletions(-) diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index f74b9dd6..dbb99437 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -100,44 +100,49 @@ get_fun_key(#doc{body={Props}}, Type, Name) -> Src = couch_util:get_nested_json_value({Props}, [Type, Name]), {Lang, Src}. -handle_doc_update_req(#httpd{ - method = 'PUT', - path_parts=[_DbName, _Design, DesignName, _Update, UpdateName, DocId] - }=Req, Db, _) -> - DesignId = <<"_design/", DesignName/binary>>, - #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), - Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), - UpdateSrc = couch_util:get_nested_json_value({Props}, [<<"updates">>, UpdateName]), - Doc = try chttpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of - FoundDoc -> FoundDoc - catch - _ -> nil - end, - send_doc_update_response(Lang, UpdateSrc, DocId, Doc, Req, Db); - -handle_doc_update_req(#httpd{ - method = 'POST', - path_parts=[_DbName, _Design, DesignName, _Update, UpdateName] - }=Req, Db, _) -> - DesignId = <<"_design/", DesignName/binary>>, - #doc{body={Props}} = chttpd_db:couch_doc_open(Db, DesignId, nil, []), - Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), - UpdateSrc = couch_util:get_nested_json_value({Props}, [<<"updates">>, UpdateName]), - send_doc_update_response(Lang, UpdateSrc, nil, nil, Req, Db); +% /db/_design/foo/update/bar/docid +% updates a doc based on a request +% handle_doc_update_req(#httpd{method = 'GET'}=Req, _Db, _DDoc) -> +% % anything but GET +% send_method_not_allowed(Req, "POST,PUT,DELETE,ETC"); handle_doc_update_req(#httpd{ - path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName, _DocId] - }=Req, _Db, _) -> - send_method_not_allowed(Req, "PUT"); + path_parts=[_, _, _, _, UpdateName, DocId] + }=Req, Db, DDoc) -> + Doc = maybe_open_doc(Db, DocId), + send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId); handle_doc_update_req(#httpd{ - path_parts=[_DbName, _Design, _DesignName, _Update, _UpdateName] - }=Req, _Db, _) -> - send_method_not_allowed(Req, "POST"); + path_parts=[_, _, _, _, UpdateName] + }=Req, Db, DDoc) -> + send_doc_update_response(Req, Db, DDoc, UpdateName, nil, null); -handle_doc_update_req(Req, _Db, _) -> +handle_doc_update_req(Req, _Db, _DDoc) -> send_error(Req, 404, <<"update_error">>, <<"Invalid path.">>). +send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) -> + JsonReq = chttpd_external:json_req_obj(Req, Db, DocId), + JsonDoc = couch_query_servers:json_doc(Doc), + Cmd = [<<"updates">>, UpdateName], + case couch_query_servers:ddoc_prompt(DDoc, Cmd, [JsonDoc, JsonReq]) of + [<<"up">>, {NewJsonDoc}, JsonResp] -> + case chttpd:header_value(Req, "X-Couch-Full-Commit", "false") of + "true" -> + Options = [full_commit, {user_ctx, Req#httpd.user_ctx}]; + _ -> + Options = [{user_ctx, Req#httpd.user_ctx}] + end, + NewDoc = couch_doc:from_json_obj({NewJsonDoc}), + Code = 201, + {ok, _NewRev} = fabric:update_doc(Db, NewDoc, Options); + [<<"up">>, _Other, JsonResp] -> + Code = 200 + end, + JsonResp2 = json_apply_field({<<"code">>, Code}, JsonResp), + % todo set location field + chttpd_external:send_external_response(Req, JsonResp2). + + % view-list request with view and list from same design doc. handle_view_list_req(#httpd{method='GET', path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db, _) -> @@ -452,54 +457,6 @@ render_head_for_empty_list(StartListRespFun, Req, Etag, null) -> render_head_for_empty_list(StartListRespFun, Req, Etag, TotalRows) -> StartListRespFun(Req, Etag, TotalRows, null, []). -send_doc_show_response(Lang, ShowSrc, DocId, nil, #httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Db) -> - % compute etag with no doc - Headers = MReq:get(headers), - Hlist = mochiweb_headers:to_list(Headers), - Accept = couch_util:get_value('Accept', Hlist), - CurrentEtag = chttpd:make_etag({Lang, ShowSrc, nil, Accept, UserCtx}), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - [<<"resp">>, ExternalResp] = couch_query_servers:render_doc_show(Lang, ShowSrc, - DocId, nil, Req, Db), - JsonResp = apply_etag(ExternalResp, CurrentEtag), - chttpd_external:send_external_response(Req, JsonResp) - end); - -send_doc_show_response(Lang, ShowSrc, DocId, #doc{revs=Revs}=Doc, #httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Db) -> - % calculate the etag - Headers = MReq:get(headers), - Hlist = mochiweb_headers:to_list(Headers), - Accept = couch_util:get_value('Accept', Hlist), - CurrentEtag = chttpd:make_etag({Lang, ShowSrc, Revs, Accept, UserCtx}), - % We know our etag now - chttpd:etag_respond(Req, CurrentEtag, fun() -> - [<<"resp">>, ExternalResp] = couch_query_servers:render_doc_show(Lang, ShowSrc, - DocId, Doc, Req, Db), - JsonResp = apply_etag(ExternalResp, CurrentEtag), - chttpd_external:send_external_response(Req, JsonResp) - end). - -send_doc_update_response(Lang, UpdateSrc, DocId, Doc, Req, Db) -> - case couch_query_servers:render_doc_update(Lang, UpdateSrc, - DocId, Doc, Req, Db) of - [<<"up">>, {NewJsonDoc}, JsonResp] -> - Options = case chttpd:header_value(Req, "X-Couch-Full-Commit", "false") of - "true" -> - [full_commit]; - _ -> - [] - end, - NewDoc = couch_doc:from_json_obj({NewJsonDoc}), - Code = 201, - % todo set location field - {ok, _NewRev} = fabric:update_doc(Db, NewDoc, Options); - [<<"up">>, _Other, JsonResp] -> - Code = 200, - ok - end, - JsonResp2 = json_apply_field({<<"code">>, Code}, JsonResp), - chttpd_external:send_external_response(Req, JsonResp2). - % Maybe this is in the proplists API % todo move to couch_util json_apply_field(H, {L}) -> -- cgit v1.2.3 From a73edef9c693bb2fd29ba2b13a7a52b81afbed95 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Thu, 24 Jun 2010 17:38:08 -0700 Subject: finally implemented _list --- src/chttpd_external.erl | 6 +- src/chttpd_show.erl | 391 +++++++++++++----------------------------------- 2 files changed, 107 insertions(+), 290 deletions(-) diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl index fa9e7025..53420211 100644 --- a/src/chttpd_external.erl +++ b/src/chttpd_external.erl @@ -110,11 +110,11 @@ json_query_keys({Json}) -> json_query_keys([], Acc) -> {lists:reverse(Acc)}; json_query_keys([{<<"startkey">>, Value} | Rest], Acc) -> - json_query_keys(Rest, [{<<"startkey">>, couch_util:json_decode(Value)}|Acc]); + json_query_keys(Rest, [{<<"startkey">>, ?JSON_DECODE(Value)}|Acc]); json_query_keys([{<<"endkey">>, Value} | Rest], Acc) -> - json_query_keys(Rest, [{<<"endkey">>, couch_util:json_decode(Value)}|Acc]); + json_query_keys(Rest, [{<<"endkey">>, ?JSON_DECODE(Value)}|Acc]); json_query_keys([{<<"key">>, Value} | Rest], Acc) -> - json_query_keys(Rest, [{<<"key">>, couch_util:json_decode(Value)}|Acc]); + json_query_keys(Rest, [{<<"key">>, ?JSON_DECODE(Value)}|Acc]); json_query_keys([Term | Rest], Acc) -> json_query_keys(Rest, [Term|Acc]). diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index dbb99437..37143386 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -12,8 +12,7 @@ -module(chttpd_show). --export([handle_doc_show_req/3, handle_doc_update_req/3, handle_view_list_req/3, - handle_view_list/7, get_fun_key/3]). +-export([handle_doc_show_req/3, handle_doc_update_req/3, handle_view_list_req/3]). -include("chttpd.hrl"). @@ -22,6 +21,15 @@ start_json_response/2,send_chunk/2,last_chunk/1,send_chunked_error/2, start_chunked_response/3, send_error/4]). +-record(lacc, { + req, + resp = nil, + qserver, + lname, + db, + etag +}). + % /db/_design/foo/_show/bar/docid % show converts a json doc to a response of any content-type. % it looks up the doc an then passes it to the query server. @@ -95,11 +103,6 @@ show_etag(#httpd{user_ctx=UserCtx}=Req, Doc, DDoc, More) -> couch_httpd:make_etag({couch_httpd:doc_etag(DDoc), DocPart, Accept, UserCtx#user_ctx.roles, More}). -get_fun_key(#doc{body={Props}}, Type, Name) -> - Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>), - Src = couch_util:get_nested_json_value({Props}, [Type, Name]), - {Lang, Src}. - % /db/_design/foo/update/bar/docid % updates a doc based on a request % handle_doc_update_req(#httpd{method = 'GET'}=Req, _Db, _DDoc) -> @@ -145,317 +148,131 @@ send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) -> % view-list request with view and list from same design doc. handle_view_list_req(#httpd{method='GET', - path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db, _) -> - handle_view_list(Req, DesignName, ListName, DesignName, ViewName, Db, nil); + path_parts=[_, _, DesignName, _, ListName, ViewName]}=Req, Db, DDoc) -> + handle_view_list(Req, Db, DDoc, ListName, {DesignName, ViewName}, nil); % view-list request with view and list from different design docs. handle_view_list_req(#httpd{method='GET', - path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewDesignName, ViewName]}=Req, Db, _) -> - handle_view_list(Req, DesignName, ListName, ViewDesignName, ViewName, Db, nil); + path_parts=[_, _, _, _, ListName, DesignName, ViewName]}=Req, Db, DDoc) -> + handle_view_list(Req, Db, DDoc, ListName, {DesignName, ViewName}, nil); -handle_view_list_req(#httpd{method='GET'}=Req, _Db, _) -> +handle_view_list_req(#httpd{method='GET'}=Req, _Db, _DDoc) -> send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>); handle_view_list_req(#httpd{method='POST', - path_parts=[_DbName, _Design, DesignName, _List, ListName, ViewName]}=Req, Db, _) -> - ReqBody = chttpd:body(Req), + path_parts=[_, _, DesignName, _, ListName, ViewName]}=Req, Db, DDoc) -> + ReqBody = couch_httpd:body(Req), {Props2} = ?JSON_DECODE(ReqBody), - Keys = couch_util:get_value(<<"keys">>, Props2, nil), - handle_view_list(Req#httpd{req_body=ReqBody}, DesignName, ListName, DesignName, ViewName, Db, Keys); + Keys = proplists:get_value(<<"keys">>, Props2, nil), + handle_view_list(Req#httpd{req_body=ReqBody}, Db, DDoc, ListName, + {DesignName, ViewName}, Keys); -handle_view_list_req(Req, _Db, _) -> - send_method_not_allowed(Req, "GET,POST,HEAD"). - -handle_view_list(Req, ListDesignName, ListName, ViewDesignName, ViewName, Db, Keys) -> - ListDesignId = <<"_design/", ListDesignName/binary>>, - #doc{body={ListProps}} = chttpd_db:couch_doc_open(Db, ListDesignId, nil, []), - if - ViewDesignName == ListDesignName -> - ViewProps = ListProps, - ViewDesignId = ListDesignId; - true -> - ViewDesignId = <<"_design/", ViewDesignName/binary>>, - #doc{body={ViewProps}} = chttpd_db:couch_doc_open(Db, ViewDesignId, nil, []) - end, +handle_view_list_req(#httpd{method='POST', + path_parts=[_, _, _, _, ListName, DesignName, ViewName]}=Req, Db, DDoc) -> + ReqBody = couch_httpd:body(Req), + {Props2} = ?JSON_DECODE(ReqBody), + Keys = proplists:get_value(<<"keys">>, Props2, nil), + handle_view_list(Req#httpd{req_body=ReqBody}, Db, DDoc, ListName, + {DesignName, ViewName}, Keys); - ViewLang = couch_util:get_value(<<"language">>, ViewProps, <<"javascript">>), - ListSrc = couch_util:get_nested_json_value({ListProps}, [<<"lists">>, ListName]), - Group = couch_view_group:design_doc_to_view_group(Db, #doc{id=ViewDesignId, - body={ViewProps}}), - send_view_list_response(ViewLang, ListSrc, ViewName, ViewDesignId, Req, Db, - Group, Keys). - % send_view_list_response(ViewLang, ListSrc, ViewName, ViewDesignId, Req, Db, Keys). +handle_view_list_req(#httpd{method='POST'}=Req, _Db, _DDoc) -> + send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>); +handle_view_list_req(Req, _Db, _DDoc) -> + send_method_not_allowed(Req, "GET,POST,HEAD"). -send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db, Group, Keys) -> +handle_view_list(Req, Db, DDoc, LName, {ViewDesignName, ViewName}, Keys) -> + {ok, VDoc} = fabric:open_doc(Db, <<"_design/", ViewDesignName/binary>>, []), + Group = couch_view_group:design_doc_to_view_group(Db, VDoc), IsReduce = chttpd_view:get_reduce_type(Req), ViewType = chttpd_view:extract_view_type(ViewName, Group#group.views, IsReduce), QueryArgs = chttpd_view:parse_view_params(Req, Keys, ViewType), - {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), - StartListRespFun = make_map_start_resp_fun(QueryServer, Db), + CB = fun list_callback/2, Etag = couch_uuids:new(), chttpd:etag_respond(Req, Etag, fun() -> - {ok, Total, Result} = ?COUCH:list_view(Req, Db, DesignId, ViewName, - Keys, QueryArgs, QueryServer), - finish_list(Req, QueryServer, Etag, Result, StartListRespFun, Total) + couch_query_servers:with_ddoc_proc(DDoc, fun(QServer) -> + Acc0 = #lacc{ + lname = LName, + req = Req, + qserver = QServer, + db = Db, + etag = Etag + }, + fabric:query_view(Db, VDoc, ViewName, QueryArgs, CB, Acc0) + end) end). -send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db, Keys) -> - Stale = chttpd_view:get_stale_type(Req), - Reduce = chttpd_view:get_reduce_type(Req), - case ?COUCH:get_map_view(Db, DesignId, ViewName, Stale) of - {ok, View, Group} -> - QueryArgs = chttpd_view:parse_view_params(Req, Keys, map), - output_map_list(Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys); - {not_found, _Reason} -> - case ?COUCH:get_reduce_view(Db, DesignId, ViewName, Stale) of - {ok, ReduceView, Group} -> - case Reduce of - false -> - QueryArgs = chttpd_view:parse_view_params( - Req, Keys, map_red - ), - MapView = ?COUCH:extract_map_view(ReduceView), - output_map_list(Req, Lang, ListSrc, MapView, Group, Db, QueryArgs, Keys); - _ -> - QueryArgs = chttpd_view:parse_view_params( - Req, Keys, reduce - ), - output_reduce_list(Req, Lang, ListSrc, ReduceView, Group, Db, QueryArgs, Keys) - end; - {not_found, Reason} -> - throw({not_found, Reason}) - end - end. - - -output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, nil) -> - #view_query_args{ - limit = Limit, - direction = Dir, - skip = SkipCount, - start_key = StartKey, - start_docid = StartDocId - } = QueryArgs, - {ok, RowCount} = ?COUCH:get_row_count(View), - Start = {StartKey, StartDocId}, - Headers = MReq:get(headers), - Hlist = mochiweb_headers:to_list(Headers), - Accept = couch_util:get_value('Accept', Hlist), - CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - % get the os process here - % pass it into the view fold with closures - {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), - - StartListRespFun = make_map_start_resp_fun(QueryServer, Db), - SendListRowFun = make_map_send_row_fun(QueryServer), - - FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount, - #view_fold_helper_funs{ - reduce_count = fun ?COUCH:reduce_to_count/1, - start_response = StartListRespFun, - send_row = SendListRowFun - }), - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - {ok, FoldResult} = ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAccInit), - finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount) - end); - -output_map_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys) -> - #view_query_args{ - limit = Limit, - direction = Dir, - skip = SkipCount, - start_docid = StartDocId - } = QueryArgs, - {ok, RowCount} = ?COUCH:get_row_count(View), - Headers = MReq:get(headers), - Hlist = mochiweb_headers:to_list(Headers), - Accept = couch_util:get_value('Accept', Hlist), - CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - % get the os process here - % pass it into the view fold with closures - {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), - - StartListRespFun = make_map_start_resp_fun(QueryServer, Db), - SendListRowFun = make_map_send_row_fun(QueryServer), - - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - {ok, FoldResult} = lists:foldl( - fun(Key, {ok, FoldAcc}) -> - FoldlFun = chttpd_view:make_view_fold_fun(Req, QueryArgs#view_query_args{ - start_key = Key, - end_key = Key - }, CurrentEtag, Db, RowCount, - #view_fold_helper_funs{ - reduce_count = fun ?COUCH:reduce_to_count/1, - start_response = StartListRespFun, - send_row = SendListRowFun - }), - ?COUCH:view_fold(View, {Key, StartDocId}, Dir, FoldlFun, FoldAcc) - end, {ok, FoldAccInit}, Keys), - finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, RowCount) - end). - -make_map_start_resp_fun(QueryServer, Db) -> - fun(Req, Etag, TotalRows, Offset, _Acc) -> - Head = {[{<<"total_rows">>, TotalRows}, {<<"offset">>, Offset}]}, - start_list_resp(QueryServer, Req, Db, Head, Etag) - end. - -make_reduce_start_resp_fun(QueryServer, _Req, Db, _CurrentEtag) -> - fun(Req2, Etag, _Acc) -> - start_list_resp(QueryServer, Req2, Db, {[]}, Etag) - end. - -start_list_resp(QueryServer, Req, Db, Head, Etag) -> - [<<"start">>,Chunks,JsonResp] = couch_query_servers:render_list_head(QueryServer, - Req, Db, Head), +list_callback({total_and_offset, Total, Offset}, #lacc{resp=nil} = Acc) -> + start_list_resp({[{<<"total_rows">>, Total}, {<<"offset">>, Offset}]}, Acc); +list_callback({total_and_offset, _, _}, Acc) -> + % a sorted=false view where the message came in late. Ignore. + {ok, Acc}; +list_callback({row, Row}, #lacc{resp=nil} = Acc) -> + % first row of a reduce view, or a sorted=false view + {ok, NewAcc} = start_list_resp({[]}, Acc), + send_list_row(Row, NewAcc); +list_callback({row, Row}, Acc) -> + send_list_row(Row, Acc); +list_callback(complete, Acc) -> + #lacc{qserver = {Proc, _}, resp = Resp0} = Acc, + if Resp0 =:= nil -> + {ok, #lacc{resp = Resp}} = start_list_resp({[]}, Acc); + true -> + Resp = Resp0 + end, + [<<"end">>, Chunk] = couch_query_servers:proc_prompt(Proc, [<<"list_end">>]), + send_non_empty_chunk(Resp, Chunk), + couch_httpd:last_chunk(Resp), + {ok, Resp}; +list_callback({error, Reason}, {_, Resp}) -> + chttpd:send_chunked_error(Resp, {error, Reason}). + +start_list_resp(Head, Acc) -> + #lacc{ + req = Req, + db = Db, + qserver = QServer, + lname = LName, + etag = Etag + } = Acc, + + % use a separate process because we're already in a receive loop, and + % json_req_obj calls fabric:get_db_info() + spawn_monitor(fun() -> exit(chttpd_external:json_req_obj(Req, Db)) end), + receive {'DOWN', _, _, _, JsonReq} -> ok end, + + [<<"start">>,Chunk,JsonResp] = couch_query_servers:ddoc_proc_prompt(QServer, + [<<"lists">>, LName], [Head, JsonReq]), JsonResp2 = apply_etag(JsonResp, Etag), #extern_resp_args{ code = Code, ctype = CType, headers = ExtHeaders - } = chttpd_external:parse_external_response(JsonResp2), - JsonHeaders = chttpd_external:default_or_content_type(CType, ExtHeaders), + } = couch_httpd_external:parse_external_response(JsonResp2), + JsonHeaders = couch_httpd_external:default_or_content_type(CType, ExtHeaders), {ok, Resp} = start_chunked_response(Req, Code, JsonHeaders), - {ok, Resp, ?b2l(?l2b(Chunks))}. - -make_map_send_row_fun(QueryServer) -> - fun(Resp, Db, Row, IncludeDocs, RowFront) -> - send_list_row(Resp, QueryServer, Db, Row, RowFront, IncludeDocs) - end. + send_non_empty_chunk(Resp, Chunk), + {ok, Acc#lacc{resp=Resp}}. -make_reduce_send_row_fun(QueryServer, Db) -> - fun(Resp, Row, RowFront) -> - send_list_row(Resp, QueryServer, Db, Row, RowFront, false) - end. - -send_list_row(Resp, QueryServer, Db, Row, RowFront, IncludeDoc) -> - try - [Go,Chunks] = couch_query_servers:render_list_row(QueryServer, Db, Row, IncludeDoc), - Chunk = RowFront ++ ?b2l(?l2b(Chunks)), +send_list_row(Row, #lacc{qserver = {Proc, _}, resp = Resp} = Acc) -> + try couch_query_servers:proc_prompt(Proc, [<<"list_row">>, Row]) of + [<<"chunks">>, Chunk] -> + send_non_empty_chunk(Resp, Chunk), + {ok, Acc}; + [<<"end">>, Chunk] -> send_non_empty_chunk(Resp, Chunk), - case Go of - <<"chunks">> -> - {ok, ""}; - <<"end">> -> - {stop, stop} - end - catch - throw:Error -> - send_chunked_error(Resp, Error), - throw({already_sent, Resp, Error}) + couch_httpd:last_chunk(Resp), + {stop, Resp} + catch Error -> + chttpd:send_chunked_error(Resp, Error), + {stop, Resp} end. +send_non_empty_chunk(_, []) -> + ok; send_non_empty_chunk(Resp, Chunk) -> - case Chunk of - [] -> ok; - _ -> send_chunk(Resp, Chunk) - end. - -output_reduce_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, nil) -> - #view_query_args{ - limit = Limit, - direction = Dir, - skip = SkipCount, - start_key = StartKey, - start_docid = StartDocId, - end_key = EndKey, - end_docid = EndDocId, - group_level = GroupLevel - } = QueryArgs, - Headers = MReq:get(headers), - Hlist = mochiweb_headers:to_list(Headers), - Accept = couch_util:get_value('Accept', Hlist), - CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx}), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - % get the os process here - % pass it into the view fold with closures - {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), - StartListRespFun = make_reduce_start_resp_fun(QueryServer, Req, Db, CurrentEtag), - SendListRowFun = make_reduce_send_row_fun(QueryServer, Db), - - {ok, GroupRowsFun, RespFun} = chttpd_view:make_reduce_fold_funs(Req, - GroupLevel, QueryArgs, CurrentEtag, - #reduce_fold_helper_funs{ - start_response = StartListRespFun, - send_row = SendListRowFun - }), - FoldAccInit = {Limit, SkipCount, undefined, []}, - {ok, FoldResult} = ?COUCH:view_fold_reduce(View, Dir, {StartKey, StartDocId}, - {EndKey, EndDocId}, GroupRowsFun, RespFun, - FoldAccInit), - finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, null) - end); - -output_reduce_list(#httpd{mochi_req=MReq, user_ctx=UserCtx}=Req, Lang, ListSrc, View, Group, Db, QueryArgs, Keys) -> - #view_query_args{ - limit = Limit, - direction = Dir, - skip = SkipCount, - start_docid = StartDocId, - end_docid = EndDocId, - group_level = GroupLevel - } = QueryArgs, - Headers = MReq:get(headers), - Hlist = mochiweb_headers:to_list(Headers), - Accept = couch_util:get_value('Accept', Hlist), - CurrentEtag = chttpd_view:view_group_etag(Group, Db, {Lang, ListSrc, Accept, UserCtx, Keys}), - - chttpd:etag_respond(Req, CurrentEtag, fun() -> - % get the os process here - % pass it into the view fold with closures - {ok, QueryServer} = couch_query_servers:start_view_list(Lang, ListSrc), - StartListRespFun = make_reduce_start_resp_fun(QueryServer, Req, Db, CurrentEtag), - SendListRowFun = make_reduce_send_row_fun(QueryServer, Db), - - {ok, GroupRowsFun, RespFun} = chttpd_view:make_reduce_fold_funs(Req, - GroupLevel, QueryArgs, CurrentEtag, - #reduce_fold_helper_funs{ - start_response = StartListRespFun, - send_row = SendListRowFun - }), - FoldAccInit = {Limit, SkipCount, undefined, []}, - {ok, FoldResult} = lists:foldl( - fun(Key, {ok, FoldAcc}) -> - ?COUCH:view_fold_reduce(View, Dir, {Key, StartDocId}, - {Key, EndDocId}, GroupRowsFun, RespFun, FoldAcc) - end, {ok, FoldAccInit}, Keys), - finish_list(Req, QueryServer, CurrentEtag, FoldResult, StartListRespFun, null) - end). - -finish_list(Req, QueryServer, Etag, FoldResult, StartFun, TotalRows) -> - FoldResult2 = case FoldResult of - {Limit, SkipCount, Response, RowAcc} -> - {Limit, SkipCount, Response, RowAcc, nil}; - Else -> - Else - end, - case FoldResult2 of - {_, _, undefined, _, _} -> - {ok, Resp, BeginBody} = - render_head_for_empty_list(StartFun, Req, Etag, TotalRows), - [<<"end">>, Chunks] = couch_query_servers:render_list_tail(QueryServer), - Chunk = BeginBody ++ ?b2l(?l2b(Chunks)), - send_non_empty_chunk(Resp, Chunk); - {_, _, Resp, stop, _} -> - ok; - {_, _, Resp, _, _} -> - [<<"end">>, Chunks] = couch_query_servers:render_list_tail(QueryServer), - send_non_empty_chunk(Resp, ?b2l(?l2b(Chunks))) - end, - couch_query_servers:stop_doc_map(QueryServer), - send_chunk(Resp, []). - - -render_head_for_empty_list(StartListRespFun, Req, Etag, null) -> - StartListRespFun(Req, Etag, []); % for reduce -render_head_for_empty_list(StartListRespFun, Req, Etag, TotalRows) -> - StartListRespFun(Req, Etag, TotalRows, null, []). + send_chunk(Resp, Chunk). % Maybe this is in the proplists API % todo move to couch_util -- cgit v1.2.3 From f1e8fb2466468f6a3d8508de536fbb5a2760b411 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 25 Jun 2010 20:50:37 -0400 Subject: make _ensure_full_commit a no-op in cluster interface --- src/chttpd_db.erl | 25 ++----------------------- 1 file changed, 2 insertions(+), 23 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 0317aed4..211eed2d 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -211,31 +211,10 @@ db_req(#httpd{method='POST',path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> db_req(#httpd{path_parts=[_DbName]}=Req, _Db) -> send_method_not_allowed(Req, "DELETE,GET,HEAD,POST"); -db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, Db) -> - UpdateSeq = ?COUCH:get_update_seq(Db), - CommittedSeq = ?COUCH:get_committed_update_seq(Db), - {ok, StartTime} = - case chttpd:qs_value(Req, "seq") of - undefined -> - committed = couch_batch_save:commit_now(Db#db.name, Db#db.user_ctx), - ?COUCH:ensure_full_commit(Db); - RequiredStr -> - RequiredSeq = list_to_integer(RequiredStr), - if RequiredSeq > UpdateSeq -> - throw({bad_request, - "can't do a full commit ahead of current update_seq"}); - RequiredSeq > CommittedSeq -> - % user asked for an explicit sequence, don't commit any batches - ?COUCH:ensure_full_commit(Db); - true -> - %% hack to make sure we always get cluster max time - APK - ?COUCH:ensure_full_commit(Db) - % {ok, Db#db.instance_start_time} - end - end, +db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) -> send_json(Req, 201, {[ {ok, true}, - {instance_start_time, StartTime} + {instance_start_time, <<"0">>} ]}); db_req(#httpd{path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) -> -- cgit v1.2.3 From de56d07198d5b52c461a0eef4c91c9a02d433310 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 25 Jun 2010 21:24:24 -0400 Subject: basic support for filtered _changes --- src/chttpd_db.erl | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 211eed2d..8a152372 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -47,7 +47,8 @@ handle_request(#httpd{path_parts=[DbName|RestParts],method=Method, end. handle_changes_req(#httpd{method='GET'}=Req, Db) -> - ChangesArgs = parse_changes_query(Req), + #changes_args{filter=Filter} = Args0 = parse_changes_query(Req), + ChangesArgs = Args0#changes_args{filter=make_filter_fun(Filter, Req, Db)}, case ChangesArgs#changes_args.feed of "normal" -> T0 = now(), @@ -799,6 +800,34 @@ parse_changes_query(Req) -> end end, #changes_args{}, chttpd:qs(Req)). +make_filter_fun(Filter, _, _) when is_function(Filter, 1) -> + Filter; +make_filter_fun(FilterName, Req, Db) -> + case [?l2b(chttpd:unquote(X)) || X <- string:tokens(FilterName, "/")] of + [DName, FName] -> + case fabric:open_doc(Db, <<"_design/", DName/binary>>, []) of + {ok, #doc{body={Props}} = DDoc} -> + couch_util:get_nested_json_value({Props}, [<<"filters">>, FName]), + JsonReq = chttpd_external:json_req_obj(Req, Db), + fun(DocInfos) -> + Docs = [Doc || {ok, Doc} <- [ + {ok, _Doc} = fabric:open_doc(Db, Id, [deleted, conflicts]) + || #doc_info{id=Id} <- DocInfos]], + {ok, Passes} = couch_query_servers:filter_docs( + {json_req,JsonReq}, Db, DDoc, FName, Docs + ), + [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]} + || #doc_info{revs=[#rev_info{rev=Rev}|_]} <- DocInfos, + Pass <- Passes, Pass == true] + end; + Error -> + throw(Error) + end; + _Else -> + throw({bad_request, + "filter parameter must be of the form `designname/filtername`"}) + end. + extract_header_rev(Req, ExplicitRev) when is_binary(ExplicitRev) or is_list(ExplicitRev)-> extract_header_rev(Req, couch_doc:parse_rev(ExplicitRev)); extract_header_rev(Req, ExplicitRev) -> -- cgit v1.2.3 From 46a2ebd50abc40a8a81a4ae6b4c48f0c678daadf Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 21 Jun 2010 13:15:59 -0400 Subject: support for cluster -> cluster replication --- src/chttpd_misc.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 6a3261c6..535932ae 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -105,7 +105,7 @@ handle_task_status_req(Req) -> handle_replicate_req(#httpd{method='POST'}=Req) -> PostBody = get(post_body), - try ?COUCH:replicate_db(PostBody, Req#httpd.user_ctx) of + try showroom_api:replicate_db(PostBody, Req#httpd.user_ctx) of {ok, {continuous, RepId}} -> send_json(Req, 202, {[{ok, true}, {<<"_local_id">>, RepId}]}); {ok, {JsonResults}} -> -- cgit v1.2.3 From 604569c71035ac3b3f65a3fbf660cc555fc12730 Mon Sep 17 00:00:00 2001 From: Jason David Davies Date: Tue, 12 Jan 2010 19:29:23 +0000 Subject: Add utility for verifying hashes. git-svn-id: https://svn.apache.org/repos/asf/couchdb/trunk@898477 13f79535-47bb-0310-9956-ffa450edef68 --- src/chttpd_auth.erl | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index b13e12d1..416195d3 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -56,8 +56,9 @@ default_authentication_handler(Req) -> Props -> ExpectedHash = couch_util:get_value(<<"password_sha">>, Props), Salt = couch_util:get_value(<<"salt">>, Props), - case hash_password(?l2b(Password), Salt) of - ExpectedHash -> + PasswordHash = hash_password(?l2b(Password), Salt), + case couch_util:verify(ExpectedHash, PasswordHash) of + true -> Ctx = #user_ctx{ name = couch_util:get_value(<<"username">>, Props), roles = couch_util:get_value(<<"roles">>, Props) @@ -132,8 +133,9 @@ handle_session_req(#httpd{method='POST', mochi_req=MochiReq, user_ctx=Ctx}=Req) false -> Password = extract_password(Form), ExpectedHash = couch_util:get_value(<<"password_sha">>, User), - case hash_password(Password, UserSalt) of - ExpectedHash -> + PasswordHash = hash_password(Password, UserSalt), + case couch_util:verify(ExpectedHash, PasswordHash) of + true -> ok; _Else -> throw({forbidden, <<"Name or password is incorrect.">>}) @@ -270,8 +272,9 @@ cookie_auth_user(#httpd{mochi_req=MochiReq}=Req) -> UserSalt = couch_util:get_value(<<"salt">>, Result), FullSecret = <>, ExpectedHash = crypto:sha_mac(FullSecret, [User, ":", TimeStr]), - case ?l2b(string:join(HashParts, ":")) of - ExpectedHash -> + PasswordHash = ?l2b(string:join(HashParts, ":")), + case couch_util:verify(ExpectedHash, PasswordHash) of + true -> TimeStamp = erlang:list_to_integer(TimeStr, 16), Timeout = erlang:list_to_integer(couch_config:get( "chttpd_auth", "timeout", "600")), -- cgit v1.2.3 From 6a850802179686bdf1ddddbf59697d1222820d72 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 28 Jun 2010 15:42:16 -0400 Subject: die, users replication, die --- src/chttpd_auth.erl | 54 ++++++++++++++++++++++++----------------------------- 1 file changed, 24 insertions(+), 30 deletions(-) diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index 416195d3..4add8ebd 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -163,26 +163,20 @@ handle_session_req(Req) -> send_method_not_allowed(Req, "GET,HEAD,POST,DELETE"). handle_user_req(#httpd{method='POST'}=Req) -> - DbName = couch_config:get("chttpd_auth", "authentication_db"), - {ok, Db} = ensure_users_db_exists(?l2b(DbName)), - Result = create_user(Req, Db), - couch_db:close(Db), - Result; + DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + ensure_users_db_exists(DbName), + create_user(Req, DbName); handle_user_req(#httpd{method=Method, path_parts=[_]}=_Req) when Method == 'PUT' orelse Method == 'DELETE' -> throw({bad_request, <<"Username is missing">>}); handle_user_req(#httpd{method='PUT', path_parts=[_, UserName]}=Req) -> - DbName = couch_config:get("chttpd_auth", "authentication_db"), - {ok, Db} = ensure_users_db_exists(?l2b(DbName)), - Result = update_user(Req, Db, UserName), - couch_db:close(Db), - Result; + DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + ensure_users_db_exists(DbName), + update_user(Req, DbName, UserName); handle_user_req(#httpd{method='DELETE', path_parts=[_, UserName]}=Req) -> - DbName = couch_config:get("chttpd_auth", "authentication_db"), - {ok, Db} = ensure_users_db_exists(?l2b(DbName)), - Result = delete_user(Req, Db, UserName), - couch_db:close(Db), - Result; + DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + ensure_users_db_exists(DbName), + delete_user(Req, DbName, UserName); handle_user_req(Req) -> send_method_not_allowed(Req, "DELETE,POST,PUT"). @@ -210,9 +204,8 @@ get_user(UserName) -> end. load_user_from_db(UserName) -> - DbName = couch_config:get("chttpd_auth", "authentication_db"), - {ok, Db} = ensure_users_db_exists(?l2b(DbName)), - UserProps = case couch_db:open_doc(Db, UserName, []) of + DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + try fabric:open_doc(DbName, UserName, []) of {ok, Doc} -> ?LOG_INFO("cache miss on username ~s", [UserName]), {Props} = couch_doc:to_json_obj(Doc, []), @@ -220,17 +213,18 @@ load_user_from_db(UserName) -> _Else -> ?LOG_INFO("no record of user ~s", [UserName]), nil - end, - couch_db:close(Db), - UserProps. + catch error:database_does_not_exist -> + nil + end. ensure_users_db_exists(DbName) -> - Options = [{user_ctx, #user_ctx{roles=[<<"_admin">>]}}], - case couch_db:open(DbName, Options) of - {ok, Db} -> - {ok, Db}; - {error, _} -> - couch_db:create(DbName, Options) + try fabric:get_doc_count(DbName) of + {ok, N} when is_integer(N) -> + ok; + {error, _} -> + fabric:create_db(DbName, []) + catch error:database_does_not_exist -> + fabric:create_db(DbName, []) end. % internal functions @@ -323,7 +317,7 @@ create_user(#httpd{method='POST', mochi_req=MochiReq}=Req, Db) -> {<<"username">>, UserName} ]} }, - {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []), + {ok, _Rev} = fabric:update_doc(Db, UserDoc, []), ?LOG_DEBUG("User ~s (~s) with password, ~s created.", [UserName, UserName, Password]), send_response(Req); @@ -351,7 +345,7 @@ delete_user(#httpd{user_ctx=UserCtx}=Req, Db, UserName) -> revs = {Pos, [Rev]}, deleted = true }, - {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []), + {ok, _Rev} = fabric:update_doc(Db, UserDoc, []), send_response(Req) end. @@ -473,7 +467,7 @@ update_user(#httpd{mochi_req=MochiReq, user_ctx=UserCtx}=Req, Db, UserName) -> {<<"username">>, UserName} ]} }, - {ok, _Rev} = couch_db:update_doc(Db, UserDoc, []), + {ok, _Rev} = fabric:update_doc(Db, UserDoc, []), ?LOG_DEBUG("User ~s updated.", [UserName]), send_response(Req) end. -- cgit v1.2.3 From e6d7899a6ebf42cd7d65e6ab6da0b6fdbf49647f Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 28 Jun 2010 16:14:11 -0400 Subject: oops, we already have a #db{} here --- src/chttpd_view.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index c87d5a73..7308f4bd 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -26,7 +26,7 @@ send_chunked_error/2]). design_doc_view(Req, Db, DDoc, ViewName, Keys) -> - Group = couch_view_group:design_doc_to_view_group(#db{name=Db}, DDoc), + Group = couch_view_group:design_doc_to_view_group(Db, DDoc), IsReduce = get_reduce_type(Req), ViewType = extract_view_type(ViewName, Group#group.views, IsReduce), QueryArgs = parse_view_params(Req, Keys, ViewType), -- cgit v1.2.3 From 237b0b07ee225dfee684971a6cb4d1cf691d97b8 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 2 Jul 2010 10:57:45 -0400 Subject: supply proper default values to fabric on db create --- src/chttpd_db.erl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 8a152372..bffc29bf 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -151,8 +151,8 @@ handle_design_info_req(Req, _Db, _DDoc) -> send_method_not_allowed(Req, "GET"). create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> - N = chttpd:qs_value(Req, "n"), - Q = chttpd:qs_value(Req, "q"), + N = chttpd:qs_value(Req, "n", couch_config:get("cluster", "n", "3")), + Q = chttpd:qs_value(Req, "q", couch_config:get("cluster", "q", "8")), case fabric:create_db(DbName, [{user_ctx, UserCtx},{n,N},{q,Q}]) of ok -> DocUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)), -- cgit v1.2.3 From 04974d382c5b530bacfead74de18770f346352ab Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 2 Jul 2010 10:58:55 -0400 Subject: support for URL _rewrites --- ebin/chttpd.app | 1 + src/chttpd.erl | 3 +- src/chttpd_rewrite.erl | 418 +++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 421 insertions(+), 1 deletion(-) create mode 100644 src/chttpd_rewrite.erl diff --git a/ebin/chttpd.app b/ebin/chttpd.app index c33b3112..c85e58e8 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -9,6 +9,7 @@ chttpd_external, chttpd_misc, chttpd_oauth, + chttpd_rewrite, chttpd_show, chttpd_stats, chttpd_sup, diff --git a/src/chttpd.erl b/src/chttpd.erl index a4bb1293..93047b80 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -212,7 +212,8 @@ design_url_handlers() -> {<<"_show">>, fun chttpd_show:handle_doc_show_req/3}, {<<"_list">>, fun chttpd_show:handle_view_list_req/3}, {<<"_update">>, fun chttpd_show:handle_doc_update_req/3}, - {<<"_info">>, fun chttpd_db:handle_design_info_req/3} + {<<"_info">>, fun chttpd_db:handle_design_info_req/3}, + {<<"_rewrite">>, fun chttpd_rewrite:handle_rewrite_req/3} ]. % Utilities diff --git a/src/chttpd_rewrite.erl b/src/chttpd_rewrite.erl new file mode 100644 index 00000000..4aeaf31c --- /dev/null +++ b/src/chttpd_rewrite.erl @@ -0,0 +1,418 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. +% +% bind_path is based on bind method from Webmachine + + +%% @doc Module for URL rewriting by pattern matching. + +-module(chttpd_rewrite). +-export([handle_rewrite_req/3]). +-include("chttpd.hrl"). + +-define(SEPARATOR, $\/). +-define(MATCH_ALL, '*'). + + +%% doc The http rewrite handler. All rewriting is done from +%% /dbname/_design/ddocname/_rewrite by default. +%% +%% each rules should be in rewrites member of the design doc. +%% Ex of a complete rule : +%% +%% { +%% .... +%% "rewrites": [ +%% { +%% "from": "", +%% "to": "index.html", +%% "method": "GET", +%% "query": {} +%% } +%% ] +%% } +%% +%% from: is the path rule used to bind current uri to the rule. It +%% use pattern matching for that. +%% +%% to: rule to rewrite an url. It can contain variables depending on binding +%% variables discovered during pattern matching and query args (url args and from +%% the query member.) +%% +%% method: method to bind the request method to the rule. by default "*" +%% query: query args you want to define they can contain dynamic variable +%% by binding the key to the bindings +%% +%% +%% to and from are path with patterns. pattern can be string starting with ":" or +%% "*". ex: +%% /somepath/:var/* +%% +%% This path is converted in erlang list by splitting "/". Each var are +%% converted in atom. "*" is converted to '*' atom. The pattern matching is done +%% by splitting "/" in request url in a list of token. A string pattern will +%% match equal token. The star atom ('*' in single quotes) will match any number +%% of tokens, but may only be present as the last pathtern in a pathspec. If all +%% tokens are matched and all pathterms are used, then the pathspec matches. It works +%% like webmachine. Each identified token will be reused in to rule and in query +%% +%% The pattern matching is done by first matching the request method to a rule. by +%% default all methods match a rule. (method is equal to "*" by default). Then +%% It will try to match the path to one rule. If no rule match, then a 404 error +%% is displayed. +%% +%% Once a rule is found we rewrite the request url using the "to" and +%% "query" members. The identified token are matched to the rule and +%% will replace var. if '*' is found in the rule it will contain the remaining +%% part if it exists. +%% +%% Examples: +%% +%% Dispatch rule URL TO Tokens +%% +%% {"from": "/a/b", /a/b?k=v /some/b?k=v var =:= b +%% "to": "/some/"} k = v +%% +%% {"from": "/a/b", /a/b /some/b?var=b var =:= b +%% "to": "/some/:var"} +%% +%% {"from": "/a", /a /some +%% "to": "/some/*"} +%% +%% {"from": "/a/*", /a/b/c /some/b/c +%% "to": "/some/*"} +%% +%% {"from": "/a", /a /some +%% "to": "/some/*"} +%% +%% {"from": "/a/:foo/*", /a/b/c /some/b/c?foo=b foo =:= b +%% "to": "/some/:foo/*"} +%% +%% {"from": "/a/:foo", /a/b /some/?k=b&foo=b foo =:= b +%% "to": "/some", +%% "query": { +%% "k": ":foo" +%% }} +%% +%% {"from": "/a", /a?foo=b /some/b foo =:= b +%% "to": "/some/:foo", +%% }} + + + +handle_rewrite_req(#httpd{ + path_parts=[DbName, <<"_design">>, DesignName, _Rewrite|PathParts], + method=Method, + mochi_req=MochiReq}=Req, _Db, DDoc) -> + + % we are in a design handler + DesignId = <<"_design/", DesignName/binary>>, + Prefix = <<"/", DbName/binary, "/", DesignId/binary>>, + QueryList = couch_httpd:qs(Req), + QueryList1 = [{to_atom(K), V} || {K, V} <- QueryList], + + #doc{body={Props}} = DDoc, + + % get rules from ddoc + case couch_util:get_value(<<"rewrites">>, Props) of + undefined -> + couch_httpd:send_error(Req, 404, <<"rewrite_error">>, + <<"Invalid path.">>); + Rules -> + % create dispatch list from rules + DispatchList = [make_rule(Rule) || {Rule} <- Rules], + + %% get raw path by matching url to a rule. + RawPath = case try_bind_path(DispatchList, Method, PathParts, + QueryList1) of + no_dispatch_path -> + throw(not_found); + {NewPathParts, Bindings} -> + Parts = [mochiweb_util:quote_plus(X) || X <- NewPathParts], + + % build new path, reencode query args, eventually convert + % them to json + Path = lists:append( + string:join(Parts, [?SEPARATOR]), + case Bindings of + [] -> []; + _ -> [$?, encode_query(Bindings)] + end), + + % if path is relative detect it and rewrite path + case mochiweb_util:safe_relative_path(Path) of + undefined -> + ?b2l(Prefix) ++ "/" ++ Path; + P1 -> + ?b2l(Prefix) ++ "/" ++ P1 + end + + end, + + % normalize final path (fix levels "." and "..") + RawPath1 = ?b2l(iolist_to_binary(normalize_path(RawPath))), + + ?LOG_DEBUG("rewrite to ~p ~n", [RawPath1]), + + % build a new mochiweb request + MochiReq1 = mochiweb_request:new(MochiReq:get(socket), + MochiReq:get(method), + RawPath1, + MochiReq:get(version), + MochiReq:get(headers)), + + % cleanup, It force mochiweb to reparse raw uri. + MochiReq1:cleanup(), + + chttpd:handle_request(MochiReq1) + end. + + + +%% @doc Try to find a rule matching current url. If none is found +%% 404 error not_found is raised +try_bind_path([], _Method, _PathParts, _QueryList) -> + no_dispatch_path; +try_bind_path([Dispatch|Rest], Method, PathParts, QueryList) -> + [{PathParts1, Method1}, RedirectPath, QueryArgs] = Dispatch, + case bind_method(Method1, Method) of + true -> + case bind_path(PathParts1, PathParts, []) of + {ok, Remaining, Bindings} -> + Bindings1 = Bindings ++ QueryList, + + % we parse query args from the rule and fill + % it eventually with bindings vars + QueryArgs1 = make_query_list(QueryArgs, Bindings1, []), + + % remove params in QueryLists1 that are already in + % QueryArgs1 + Bindings2 = lists:foldl(fun({K, V}, Acc) -> + K1 = to_atom(K), + KV = case couch_util:get_value(K1, QueryArgs1) of + undefined -> [{K1, V}]; + _V1 -> [] + end, + Acc ++ KV + end, [], Bindings1), + + FinalBindings = Bindings2 ++ QueryArgs1, + NewPathParts = make_new_path(RedirectPath, FinalBindings, + Remaining, []), + {NewPathParts, FinalBindings}; + fail -> + try_bind_path(Rest, Method, PathParts, QueryList) + end; + false -> + try_bind_path(Rest, Method, PathParts, QueryList) + end. + +%% rewriting dynamically the quey list given as query member in +%% rewrites. Each value is replaced by one binding or an argument +%% passed in url. +make_query_list([], _Bindings, Acc) -> + Acc; +make_query_list([{Key, {Value}}|Rest], Bindings, Acc) -> + Value1 = to_json({Value}), + make_query_list(Rest, Bindings, [{to_atom(Key), Value1}|Acc]); +make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_binary(Value) -> + Value1 = replace_var(Key, Value, Bindings), + make_query_list(Rest, Bindings, [{to_atom(Key), Value1}|Acc]); +make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_list(Value) -> + Value1 = replace_var(Key, Value, Bindings), + make_query_list(Rest, Bindings, [{to_atom(Key), Value1}|Acc]); +make_query_list([{Key, Value}|Rest], Bindings, Acc) -> + make_query_list(Rest, Bindings, [{to_atom(Key), Value}|Acc]). + +replace_var(Key, Value, Bindings) -> + case Value of + <<":", Var/binary>> -> + get_var(Var, Bindings, Value); + _ when is_list(Value) -> + Value1 = lists:foldr(fun(V, Acc) -> + V1 = case V of + <<":", VName/binary>> -> + case get_var(VName, Bindings, V) of + V2 when is_list(V2) -> + iolist_to_binary(V2); + V2 -> V2 + end; + _ -> + + V + end, + [V1|Acc] + end, [], Value), + to_json(Value1); + _ when is_binary(Value) -> + Value; + _ -> + case Key of + <<"key">> -> to_json(Value); + <<"startkey">> -> to_json(Value); + <<"endkey">> -> to_json(Value); + _ -> + lists:flatten(?JSON_ENCODE(Value)) + end + end. + + +get_var(VarName, Props, Default) -> + VarName1 = list_to_atom(binary_to_list(VarName)), + couch_util:get_value(VarName1, Props, Default). + +%% doc: build new patch from bindings. bindings are query args +%% (+ dynamic query rewritten if needed) and bindings found in +%% bind_path step. +make_new_path([], _Bindings, _Remaining, Acc) -> + lists:reverse(Acc); +make_new_path([?MATCH_ALL], _Bindings, Remaining, Acc) -> + Acc1 = lists:reverse(Acc) ++ Remaining, + Acc1; +make_new_path([?MATCH_ALL|_Rest], _Bindings, Remaining, Acc) -> + Acc1 = lists:reverse(Acc) ++ Remaining, + Acc1; +make_new_path([P|Rest], Bindings, Remaining, Acc) when is_atom(P) -> + P2 = case couch_util:get_value(P, Bindings) of + undefined -> << "undefined">>; + P1 -> P1 + end, + make_new_path(Rest, Bindings, Remaining, [P2|Acc]); +make_new_path([P|Rest], Bindings, Remaining, Acc) -> + make_new_path(Rest, Bindings, Remaining, [P|Acc]). + + +%% @doc If method of the query fith the rule method. If the +%% method rule is '*', which is the default, all +%% request method will bind. It allows us to make rules +%% depending on HTTP method. +bind_method(?MATCH_ALL, _Method) -> + true; +bind_method(Method, Method) -> + true; +bind_method(_, _) -> + false. + + +%% @doc bind path. Using the rule from we try to bind variables given +%% to the current url by pattern matching +bind_path([], [], Bindings) -> + {ok, [], Bindings}; +bind_path([?MATCH_ALL], Rest, Bindings) when is_list(Rest) -> + {ok, Rest, Bindings}; +bind_path(_, [], _) -> + fail; +bind_path([Token|RestToken],[Match|RestMatch],Bindings) when is_atom(Token) -> + bind_path(RestToken, RestMatch, [{Token, Match}|Bindings]); +bind_path([Token|RestToken], [Token|RestMatch], Bindings) -> + bind_path(RestToken, RestMatch, Bindings); +bind_path(_, _, _) -> + fail. + + +%% normalize path. +normalize_path(Path) -> + "/" ++ string:join(normalize_path1(string:tokens(Path, + "/"), []), [?SEPARATOR]). + + +normalize_path1([], Acc) -> + lists:reverse(Acc); +normalize_path1([".."|Rest], Acc) -> + Acc1 = case Acc of + [] -> [".."|Acc]; + [T|_] when T =:= ".." -> [".."|Acc]; + [_|R] -> R + end, + normalize_path1(Rest, Acc1); +normalize_path1(["."|Rest], Acc) -> + normalize_path1(Rest, Acc); +normalize_path1([Path|Rest], Acc) -> + normalize_path1(Rest, [Path|Acc]). + + +%% @doc transform json rule in erlang for pattern matching +make_rule(Rule) -> + Method = case couch_util:get_value(<<"method">>, Rule) of + undefined -> '*'; + M -> list_to_atom(?b2l(M)) + end, + QueryArgs = case couch_util:get_value(<<"query">>, Rule) of + undefined -> []; + {Args} -> Args + end, + FromParts = case couch_util:get_value(<<"from">>, Rule) of + undefined -> ['*']; + From -> + parse_path(From) + end, + ToParts = case couch_util:get_value(<<"to">>, Rule) of + undefined -> + throw({error, invalid_rewrite_target}); + To -> + parse_path(To) + end, + [{FromParts, Method}, ToParts, QueryArgs]. + +parse_path(Path) -> + {ok, SlashRE} = re:compile(<<"\\/">>), + path_to_list(re:split(Path, SlashRE), [], 0). + +%% @doc convert a path rule (from or to) to an erlang list +%% * and path variable starting by ":" are converted +%% in erlang atom. +path_to_list([], Acc, _DotDotCount) -> + lists:reverse(Acc); +path_to_list([<<>>|R], Acc, DotDotCount) -> + path_to_list(R, Acc, DotDotCount); +path_to_list([<<"*">>|R], Acc, DotDotCount) -> + path_to_list(R, [?MATCH_ALL|Acc], DotDotCount); +path_to_list([<<"..">>|R], Acc, DotDotCount) when DotDotCount == 2 -> + case couch_config:get("httpd", "secure_rewrites", "true") of + "false" -> + path_to_list(R, [<<"..">>|Acc], DotDotCount+1); + _Else -> + ?LOG_INFO("insecure_rewrite_rule ~p blocked", [lists:reverse(Acc) ++ [<<"..">>] ++ R]), + throw({insecure_rewrite_rule, "too many ../.. segments"}) + end; +path_to_list([<<"..">>|R], Acc, DotDotCount) -> + path_to_list(R, [<<"..">>|Acc], DotDotCount+1); +path_to_list([P|R], Acc, DotDotCount) -> + P1 = case P of + <<":", Var/binary>> -> + list_to_atom(binary_to_list(Var)); + _ -> P + end, + path_to_list(R, [P1|Acc], DotDotCount). + +encode_query(Props) -> + Props1 = lists:foldl(fun ({K, V}, Acc) -> + V1 = case is_list(V) of + true -> V; + false when is_binary(V) -> + V; + false -> + mochiweb_util:quote_plus(V) + end, + [{K, V1} | Acc] + end, [], Props), + lists:flatten(mochiweb_util:urlencode(Props1)). + +to_atom(V) when is_atom(V) -> + V; +to_atom(V) when is_binary(V) -> + to_atom(?b2l(V)); +to_atom(V) -> + list_to_atom(V). + +to_json(V) -> + iolist_to_binary(?JSON_ENCODE(V)). -- cgit v1.2.3 From 6e5d1c7dd44696617d8624bef9328092619f6780 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 2 Jul 2010 11:10:55 -0400 Subject: show distributed tasks in _active_tasks --- src/chttpd_misc.erl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 535932ae..0aa666f6 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -98,8 +98,12 @@ handle_all_dbs_req(Req) -> handle_task_status_req(#httpd{method='GET'}=Req) -> - % convert the list of prop lists to a list of json objects - send_json(Req, [{Props} || Props <- couch_task_status:all()]); + {Replies, _BadNodes} = gen_server:multi_call(couch_task_status, all), + Response = lists:flatmap(fun({Node, Tasks}) -> + [{[{node,Node} | Task]} || Task <- Tasks] + end, Replies), + % TODO filter by customer + send_json(Req, Response); handle_task_status_req(Req) -> send_method_not_allowed(Req, "GET,HEAD"). -- cgit v1.2.3 From 328a918ade7cf765ff8b1249241a2e80beeac927 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 2 Jul 2010 11:11:30 -0400 Subject: ugly dual-compatibility for ours/apache's _session formats --- src/chttpd_auth.erl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index 4add8ebd..33b6d176 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -154,7 +154,8 @@ handle_session_req(#httpd{method='GET', user_ctx=UserCtx}=Req) -> {null, "true"} -> throw({unauthorized, <<"Please login.">>}); _False -> - send_json(Req, {[{ok,true}, {name,Name}, {roles,Roles}]}) + Props = [{name,Name}, {roles,Roles}], + send_json(Req, {[{ok,true}, {userCtx, {Props}} | Props]}) end; handle_session_req(#httpd{method='DELETE'}=Req) -> % logout -- cgit v1.2.3 From f62e1352026d0d94c4908cd989ebd1fe5b0bd30a Mon Sep 17 00:00:00 2001 From: Brad Anderson Date: Mon, 5 Jul 2010 13:22:11 -0400 Subject: add rebar, makefile, readme to chttpd --- Makefile | 10 ++++++++++ README.md | 21 +++++++++++++++++++++ rebar | Bin 0 -> 63470 bytes 3 files changed, 31 insertions(+) create mode 100644 Makefile create mode 100644 README.md create mode 100755 rebar diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..206156f9 --- /dev/null +++ b/Makefile @@ -0,0 +1,10 @@ +all: compile + +compile: + @./rebar compile + +clean: + @./rebar clean + +compilenative: + @./rebar compile erl_opts=native diff --git a/README.md b/README.md new file mode 100644 index 00000000..55d642e7 --- /dev/null +++ b/README.md @@ -0,0 +1,21 @@ +## chttpd + +chttpd is a cluster-aware http layer for CouchDB[1]. It is used in cloudant-core[2] as the http front-end. + +### Getting Started +Dependencies: + Erlang R13B-03 (or higher) + +Build with rebar: + make + +### License +Apache 2.0[3] + +### Contact + http://cloudant.com + info@cloudant.com + +[1]: http://couchdb.apache.org +[2]: http://github.com/cloudant/cloudant-core +[3]: http://www.apache.org/licenses/LICENSE-2.0.html diff --git a/rebar b/rebar new file mode 100755 index 00000000..68e1ec23 Binary files /dev/null and b/rebar differ -- cgit v1.2.3 From 791281a70ba9966adf76a2f18e7bbba45038ed96 Mon Sep 17 00:00:00 2001 From: Brad Anderson Date: Mon, 5 Jul 2010 13:28:42 -0400 Subject: working on my markdown-fu --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 55d642e7..adbd927d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ ## chttpd -chttpd is a cluster-aware http layer for CouchDB[1]. It is used in cloudant-core[2] as the http front-end. +chttpd is a cluster-aware http layer for [CouchDB][1]. It is used in [cloudant-core][2] as the http front-end. ### Getting Started Dependencies: @@ -10,11 +10,11 @@ Build with rebar: make ### License -Apache 2.0[3] +[Apache 2.0][3] ### Contact - http://cloudant.com - info@cloudant.com + * [http://cloudant.com][http://cloudant.com] + * [info@cloudant.com][mailto:info@cloudant.com] [1]: http://couchdb.apache.org [2]: http://github.com/cloudant/cloudant-core -- cgit v1.2.3 From dbc11ba54fe6f24422698d308941fe7ca237f7cd Mon Sep 17 00:00:00 2001 From: Brad Anderson Date: Mon, 5 Jul 2010 13:31:03 -0400 Subject: good enough README for now --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index adbd927d..176f39c1 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ chttpd is a cluster-aware http layer for [CouchDB][1]. It is used in [cloudant- ### Getting Started Dependencies: - Erlang R13B-03 (or higher) + * Erlang R13B-03 (or higher) Build with rebar: make @@ -13,9 +13,11 @@ Build with rebar: [Apache 2.0][3] ### Contact - * [http://cloudant.com][http://cloudant.com] - * [info@cloudant.com][mailto:info@cloudant.com] + * [http://cloudant.com][4] + * [info@cloudant.com][5] [1]: http://couchdb.apache.org [2]: http://github.com/cloudant/cloudant-core [3]: http://www.apache.org/licenses/LICENSE-2.0.html +[4]: http://cloudant.com +[5]: mailto:info@cloudant.com -- cgit v1.2.3 From b0568afbb3b602095934e31bbeddea88bf126f42 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 5 Jul 2010 15:34:02 -0400 Subject: update include path for couch_db.hrl --- include/chttpd.hrl | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/include/chttpd.hrl b/include/chttpd.hrl index 49c0f74a..54285bf2 100644 --- a/include/chttpd.hrl +++ b/include/chttpd.hrl @@ -1,7 +1,2 @@ --define(FABRIC, true). - --ifndef(COUCH). --include("../../couch/src/couch_db.hrl"). --endif. - +-include_lib("couch/include/couch_db.hrl"). -include_lib("eunit/include/eunit.hrl"). -- cgit v1.2.3 From 3cc7699cb3cae998ac95827e1239e76191f8c12f Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 5 Jul 2010 15:49:43 -0400 Subject: more cleanup of the includes --- include/chttpd.hrl | 2 -- src/chttpd.erl | 2 +- src/chttpd_app.erl | 2 -- src/chttpd_auth.erl | 2 +- src/chttpd_db.erl | 2 +- src/chttpd_external.erl | 2 +- src/chttpd_misc.erl | 2 +- src/chttpd_oauth.erl | 2 +- src/chttpd_rewrite.erl | 2 +- src/chttpd_show.erl | 2 +- src/chttpd_stats.erl | 4 ++-- src/chttpd_sup.erl | 2 -- src/chttpd_view.erl | 2 +- 13 files changed, 11 insertions(+), 17 deletions(-) delete mode 100644 include/chttpd.hrl diff --git a/include/chttpd.hrl b/include/chttpd.hrl deleted file mode 100644 index 54285bf2..00000000 --- a/include/chttpd.hrl +++ /dev/null @@ -1,2 +0,0 @@ --include_lib("couch/include/couch_db.hrl"). --include_lib("eunit/include/eunit.hrl"). diff --git a/src/chttpd.erl b/src/chttpd.erl index 93047b80..9851c97b 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -11,7 +11,7 @@ % the License. -module(chttpd). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -export([start_link/0, stop/0, handle_request/1, config_change/2, primary_header_value/2, header_value/2, header_value/3, qs_value/2, diff --git a/src/chttpd_app.erl b/src/chttpd_app.erl index 4b8356fb..e8d6f21f 100644 --- a/src/chttpd_app.erl +++ b/src/chttpd_app.erl @@ -2,8 +2,6 @@ -behaviour(application). -export([start/2, stop/1]). --include("chttpd.hrl"). - start(_Type, StartArgs) -> chttpd_sup:start_link(StartArgs). diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index 33b6d176..b15bc933 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -11,7 +11,7 @@ % the License. -module(chttpd_auth). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -export([special_test_authentication_handler/1, null_authentication_handler/1, cookie_authentication_handler/1, default_authentication_handler/1, diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index bffc29bf..579ac623 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -11,7 +11,7 @@ % the License. -module(chttpd_db). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -export([handle_request/1, handle_compact_req/2, handle_design_req/2, db_req/2, couch_doc_open/4,handle_changes_req/2, diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl index 53420211..0b6b775d 100644 --- a/src/chttpd_external.erl +++ b/src/chttpd_external.erl @@ -18,7 +18,7 @@ -import(chttpd,[send_error/4]). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). % handle_external_req/2 % for the old type of config usage: diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 0aa666f6..98e6fbf8 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -21,7 +21,7 @@ -export([increment_update_seq_req/2]). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -import(chttpd, [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, diff --git a/src/chttpd_oauth.erl b/src/chttpd_oauth.erl index 58fafbb8..f0bffb36 100644 --- a/src/chttpd_oauth.erl +++ b/src/chttpd_oauth.erl @@ -11,7 +11,7 @@ % the License. -module(chttpd_oauth). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -export([oauth_authentication_handler/1, handle_oauth_req/1, consumer_lookup/2]). diff --git a/src/chttpd_rewrite.erl b/src/chttpd_rewrite.erl index 4aeaf31c..fbf246ab 100644 --- a/src/chttpd_rewrite.erl +++ b/src/chttpd_rewrite.erl @@ -17,7 +17,7 @@ -module(chttpd_rewrite). -export([handle_rewrite_req/3]). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -define(SEPARATOR, $\/). -define(MATCH_ALL, '*'). diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index 37143386..9d67774a 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -14,7 +14,7 @@ -export([handle_doc_show_req/3, handle_doc_update_req/3, handle_view_list_req/3]). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -import(chttpd, [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, diff --git a/src/chttpd_stats.erl b/src/chttpd_stats.erl index 1e30c466..1200713a 100644 --- a/src/chttpd_stats.erl +++ b/src/chttpd_stats.erl @@ -11,8 +11,8 @@ % the License. -module(chttpd_stats). --include("chttpd.hrl"). --include("../../couch/src/couch_stats.hrl"). +-include_lib("couch/include/couch_db.hrl"). +-include_lib("couch/include/couch_stats.hrl"). -export([handle_stats_req/1]). -import(chttpd, diff --git a/src/chttpd_sup.erl b/src/chttpd_sup.erl index c710ec37..4786350b 100644 --- a/src/chttpd_sup.erl +++ b/src/chttpd_sup.erl @@ -4,8 +4,6 @@ -export([start_link/1]). --include("chttpd.hrl"). - start_link(Args) -> supervisor:start_link({local,?MODULE}, ?MODULE, Args). diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index 7308f4bd..f3376af7 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -11,7 +11,7 @@ % the License. -module(chttpd_view). --include("chttpd.hrl"). +-include_lib("couch/include/couch_db.hrl"). -export([handle_view_req/3,handle_temp_view_req/2]). -- cgit v1.2.3 From a0b4c5dea139fa5829c99de0529b2f7e14bf05a9 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 5 Jul 2010 17:00:55 -0400 Subject: move db_path logic to chttpd --- src/chttpd.erl | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 9851c97b..9df127dd 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -64,7 +64,8 @@ handle_request(MochiReq) -> % removed, but URL quoting left intact RawUri = MochiReq:get(raw_path), Customer = cloudant_util:customer_name(#httpd{mochi_req=MochiReq}), - Path = ?COUCH:db_path(RawUri, Customer), + {Path, _, _} = mochiweb_util:urlsplit_path(generate_customer_path(RawUri, + Customer)), {HandlerKey, _, _} = mochiweb_util:partition(Path, "/"), LogForClosedSocket = io_lib:format("mochiweb_recv_error for ~s - ~p ~s", [ @@ -154,6 +155,20 @@ handle_request(MochiReq) -> couch_stats_collector:increment({httpd, requests}), {ok, Resp}. +generate_customer_path("/", _Customer) -> + ""; +generate_customer_path("/favicon.ico", _Customer) -> + "favicon.ico"; +generate_customer_path([$/,$_|Rest], _Customer) -> + lists:flatten([$_|Rest]); +generate_customer_path([$/|RawPath], Customer) -> + case Customer of + "" -> + RawPath; + Else -> + lists:flatten([Else, "%2F", RawPath]) + end. + % Try authentication handlers in order until one returns a result authenticate_request(#httpd{user_ctx=#user_ctx{}} = Req, _AuthFuns) -> Req; -- cgit v1.2.3 From 02767bad6e66b88ebf3e74fbd70ca49a4fbfae5a Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 6 Jul 2010 11:40:38 -0400 Subject: rearrange fabric arguments to match couch_btree --- src/chttpd_db.erl | 8 ++++---- src/chttpd_show.erl | 2 +- src/chttpd_view.erl | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 579ac623..39f1cfc7 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -58,13 +58,13 @@ handle_changes_req(#httpd{method='GET'}=Req, Db) -> couch_stats_collector:record({couchdb, dbinfo}, DeltaT), chttpd:etag_respond(Req, Etag, fun() -> {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), - fabric:changes(Db, ChangesArgs, fun changes_callback/2, - {"normal", Resp}) + fabric:changes(Db, fun changes_callback/2, {"normal", Resp}, + ChangesArgs) end); Feed -> % "longpoll" or "continuous" {ok, Resp} = chttpd:start_json_response(Req, 200), - fabric:changes(Db, ChangesArgs, fun changes_callback/2, {Feed, Resp}) + fabric:changes(Db, fun changes_callback/2, {Feed, Resp}, ChangesArgs) end; handle_changes_req(#httpd{path_parts=[_,<<"_changes">>]}=Req, _Db) -> send_method_not_allowed(Req, "GET,HEAD"). @@ -387,7 +387,7 @@ all_docs_view(Req, Db, Keys) -> QueryArgs = chttpd_view:parse_view_params(Req, Keys, map), chttpd:etag_respond(Req, Etag, fun() -> {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), - fabric:all_docs(Db, QueryArgs, fun all_docs_callback/2, {nil, Resp}) + fabric:all_docs(Db, fun all_docs_callback/2, {nil, Resp}, QueryArgs) end). all_docs_callback({total_and_offset, Total, Offset}, {_, Resp}) -> diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index 9d67774a..fba7f040 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -199,7 +199,7 @@ handle_view_list(Req, Db, DDoc, LName, {ViewDesignName, ViewName}, Keys) -> db = Db, etag = Etag }, - fabric:query_view(Db, VDoc, ViewName, QueryArgs, CB, Acc0) + fabric:query_view(Db, VDoc, ViewName, CB, Acc0, QueryArgs) end) end). diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index f3376af7..6d29101a 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -37,7 +37,7 @@ design_doc_view(Req, Db, DDoc, ViewName, Keys) -> chttpd:etag_respond(Req, Etag, fun() -> {ok, Resp} = chttpd:start_json_response(Req, 200, [{"Etag",Etag}]), CB = fun view_callback/2, - fabric:query_view(Db, DDoc, ViewName, QueryArgs, CB, {nil, Resp}) + fabric:query_view(Db, DDoc, ViewName, CB, {nil, Resp}, QueryArgs) end). view_callback({total_and_offset, Total, Offset}, {nil, Resp}) -> -- cgit v1.2.3 From 1a2e1790388e21174ac431a223147eb2ea4949be Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 7 Jul 2010 10:28:31 -0400 Subject: remove unused view code --- src/chttpd_view.erl | 399 +--------------------------------------------------- 1 file changed, 5 insertions(+), 394 deletions(-) diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index 6d29101a..42972870 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -13,12 +13,9 @@ -module(chttpd_view). -include_lib("couch/include/couch_db.hrl"). --export([handle_view_req/3,handle_temp_view_req/2]). - --export([get_stale_type/1, get_reduce_type/1, parse_view_params/3]). --export([make_view_fold_fun/6, finish_view_fold/3, view_row_obj/3]). --export([view_group_etag/2, view_group_etag/3, make_reduce_fold_funs/5]). --export([design_doc_view/5, parse_bool_param/1, extract_view_type/3]). +-export([handle_view_req/3, handle_temp_view_req/2, get_reduce_type/1, + parse_view_params/3, view_group_etag/2, view_group_etag/3, + parse_bool_param/1, extract_view_type/3]). -import(chttpd, [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,send_chunk/2, @@ -91,127 +88,14 @@ handle_view_req(#httpd{method='POST', handle_view_req(Req, _Db, _DDoc) -> send_method_not_allowed(Req, "GET,POST,HEAD"). -handle_temp_view_req(#httpd{method='POST'}=Req, Db) -> - throw(not_implemented), - couch_stats_collector:increment({httpd, temporary_view_reads}), - {Props} = chttpd:json_body_obj(Req), - Language = couch_util:get_value(<<"language">>, Props, <<"javascript">>), - {DesignOptions} = couch_util:get_value(<<"options">>, Props, {[]}), - MapSrc = couch_util:get_value(<<"map">>, Props), - Keys = couch_util:get_value(<<"keys">>, Props, nil), - case couch_util:get_value(<<"reduce">>, Props, null) of - null -> - QueryArgs = parse_view_params(Req, Keys, map), - {ok, View, Group} = ?COUCH:get_temp_map_view(Db, Language, - DesignOptions, MapSrc), - output_map_view(Req, View, Group, Db, QueryArgs, Keys); - RedSrc -> - QueryArgs = parse_view_params(Req, Keys, reduce), - {ok, View, Group} = ?COUCH:get_temp_reduce_view(Db, Language, - DesignOptions, MapSrc, RedSrc), - output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) - end; - handle_temp_view_req(Req, _Db) -> - send_method_not_allowed(Req, "POST"). - -output_map_view(Req, View, Group, Db, QueryArgs, nil) -> - #view_query_args{ - limit = Limit, - direction = Dir, - skip = SkipCount, - start_key = StartKey, - start_docid = StartDocId - } = QueryArgs, - CurrentEtag = view_group_etag(Group, Db), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - {ok, RowCount} = ?COUCH:get_row_count(View), - Start = {StartKey, StartDocId}, - FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount, #view_fold_helper_funs{reduce_count=fun ?COUCH:reduce_to_count/1}), - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - FoldResult = ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAccInit), - finish_view_fold(Req, RowCount, FoldResult) - end); - -output_map_view(Req, View, Group, Db, QueryArgs, Keys) -> - #view_query_args{ - limit = Limit, - direction = Dir, - skip = SkipCount, - start_docid = StartDocId - } = QueryArgs, - CurrentEtag = view_group_etag(Group, Db, Keys), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - {ok, RowCount} = ?COUCH:get_row_count(View), - FoldAccInit = {Limit, SkipCount, undefined, [], nil}, - FoldResult = lists:foldl( - fun(Key, {ok, FoldAcc}) -> - Start = {Key, StartDocId}, - FoldlFun = make_view_fold_fun(Req, - QueryArgs#view_query_args{ - start_key = Key, - end_key = Key - }, CurrentEtag, Db, RowCount, - #view_fold_helper_funs{ - reduce_count = fun ?COUCH:reduce_to_count/1 - }), - ?COUCH:view_fold(View, Start, Dir, FoldlFun, FoldAcc) - end, {ok, FoldAccInit}, Keys), - finish_view_fold(Req, RowCount, FoldResult) - end). - -output_reduce_view(Req, Db, View, Group, QueryArgs, nil) -> - #view_query_args{ - start_key = StartKey, - end_key = EndKey, - limit = Limit, - skip = Skip, - direction = Dir, - start_docid = StartDocId, - end_docid = EndDocId, - group_level = GroupLevel - } = QueryArgs, - CurrentEtag = view_group_etag(Group, Db), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}), - FoldAccInit = {Limit, Skip, undefined, []}, - {ok, {_, _, Resp, _}} = ?COUCH:view_fold_reduce(View, Dir, {StartKey, StartDocId}, - {EndKey, EndDocId}, GroupRowsFun, RespFun, FoldAccInit), - finish_reduce_fold(Req, Resp) - end); - -output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) -> - #view_query_args{ - limit = Limit, - skip = Skip, - direction = Dir, - start_docid = StartDocId, - end_docid = EndDocId, - group_level = GroupLevel - } = QueryArgs, - CurrentEtag = view_group_etag(Group, Db), - chttpd:etag_respond(Req, CurrentEtag, fun() -> - {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}), - {Resp, _RedAcc3} = lists:foldl( - fun(Key, {Resp, RedAcc}) -> - % run the reduce once for each key in keys, with limit etc reapplied for each key - FoldAccInit = {Limit, Skip, Resp, RedAcc}, - {_, {_, _, Resp2, RedAcc2}} = ?COUCH:view_fold_reduce(View, Dir, {Key, StartDocId}, - {Key, EndDocId}, GroupRowsFun, RespFun, FoldAccInit), - % Switch to comma - {Resp2, RedAcc2} - end, - {undefined, []}, Keys), % Start with no comma - finish_reduce_fold(Req, Resp) - end). + Msg = <<"Temporary views are not supported by Cloudant">>, + chttpd:send_error(Req, 403, Msg). reverse_key_default(?MIN_STR) -> ?MAX_STR; reverse_key_default(?MAX_STR) -> ?MIN_STR; reverse_key_default(Key) -> Key. -get_stale_type(Req) -> - list_to_atom(chttpd:qs_value(Req, "stale", "nil")). - get_reduce_type(Req) -> list_to_atom(chttpd:qs_value(Req, "reduce", "true")). @@ -385,214 +269,6 @@ validate_view_query(sorted, _Value, Args) -> validate_view_query(extra, _Value, Args) -> Args. -make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) -> - #view_query_args{ - end_key = EndKey, - end_docid = EndDocId, - inclusive_end = InclusiveEnd, - direction = Dir - } = QueryArgs, - - #view_fold_helper_funs{ - passed_end = PassedEndFun, - start_response = StartRespFun, - send_row = SendRowFun, - reduce_count = ReduceCountFun - } = apply_default_helper_funs(HelperFuns, - {Dir, EndKey, EndDocId, InclusiveEnd}), - - #view_query_args{ - include_docs = IncludeDocs - } = QueryArgs, - - fun({{Key, DocId}, Value}, OffsetReds, {AccLimit, AccSkip, Resp, RowFunAcc, - OffsetAcc}) -> - PassedEnd = PassedEndFun(Key, DocId), - case {PassedEnd, AccLimit, AccSkip, Resp} of - {true, _, _, _} -> - % The stop key has been passed, stop looping. - % We may need offset so calcluate it here. - % Checking Resp is an optimization that tells - % us its already been calculated (and sent). - NewOffset = case Resp of - undefined -> ReduceCountFun(OffsetReds); - _ -> nil - end, - {stop, {AccLimit, AccSkip, Resp, RowFunAcc, NewOffset}}; - {_, 0, _, _} -> - % we've done "limit" rows, stop foldling - {stop, {0, 0, Resp, RowFunAcc, OffsetAcc}}; - {_, _, AccSkip, _} when AccSkip > 0 -> - % just keep skipping - {ok, {AccLimit, AccSkip - 1, Resp, RowFunAcc, OffsetAcc}}; - {_, _, _, undefined} -> - % rendering the first row, first we start the response - Offset = ReduceCountFun(OffsetReds), - {ok, Resp2, RowFunAcc0} = StartRespFun(Req, Etag, - TotalViewCount, Offset, RowFunAcc), - {Go, RowFunAcc2} = SendRowFun(Resp2, Db, {{Key, DocId}, Value}, - IncludeDocs, RowFunAcc0), - {Go, {AccLimit - 1, 0, Resp2, RowFunAcc2, Offset}}; - {_, AccLimit, _, Resp} when (AccLimit > 0) -> - % rendering all other rows - {Go, RowFunAcc2} = SendRowFun(Resp, Db, {{Key, DocId}, Value}, - IncludeDocs, RowFunAcc), - {Go, {AccLimit - 1, 0, Resp, RowFunAcc2, OffsetAcc}} - end - end. - -make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) -> - #reduce_fold_helper_funs{ - start_response = StartRespFun, - send_row = SendRowFun - } = apply_default_helper_funs(HelperFuns), - - GroupRowsFun = - fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 -> - true; - ({Key1,_}, {Key2,_}) - when is_integer(GroupLevel) and is_list(Key1) and is_list(Key2) -> - lists:sublist(Key1, GroupLevel) == lists:sublist(Key2, GroupLevel); - ({Key1,_}, {Key2,_}) -> - Key1 == Key2 - end, - - RespFun = fun - (_Key, _Red, {AccLimit, AccSkip, Resp, RowAcc}) when AccSkip > 0 -> - % keep skipping - {ok, {AccLimit, AccSkip - 1, Resp, RowAcc}}; - (_Key, _Red, {0, _AccSkip, Resp, RowAcc}) -> - % we've exhausted limit rows, stop - {stop, {0, _AccSkip, Resp, RowAcc}}; - - (_Key, Red, {AccLimit, 0, undefined, RowAcc0}) when GroupLevel == 0 -> - % we haven't started responding yet and group=false - {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0), - {Go, RowAcc2} = SendRowFun(Resp2, {null, Red}, RowAcc), - {Go, {AccLimit - 1, 0, Resp2, RowAcc2}}; - (_Key, Red, {AccLimit, 0, Resp, RowAcc}) when GroupLevel == 0 -> - % group=false but we've already started the response - {Go, RowAcc2} = SendRowFun(Resp, {null, Red}, RowAcc), - {Go, {AccLimit - 1, 0, Resp, RowAcc2}}; - - (Key, Red, {AccLimit, 0, undefined, RowAcc0}) - when is_integer(GroupLevel), is_list(Key) -> - % group_level and we haven't responded yet - {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0), - {Go, RowAcc2} = SendRowFun(Resp2, {lists:sublist(Key, GroupLevel), Red}, RowAcc), - {Go, {AccLimit - 1, 0, Resp2, RowAcc2}}; - (Key, Red, {AccLimit, 0, Resp, RowAcc}) - when is_integer(GroupLevel), is_list(Key) -> - % group_level and we've already started the response - {Go, RowAcc2} = SendRowFun(Resp, {lists:sublist(Key, GroupLevel), Red}, RowAcc), - {Go, {AccLimit - 1, 0, Resp, RowAcc2}}; - - (Key, Red, {AccLimit, 0, undefined, RowAcc0}) -> - % group=true and we haven't responded yet - {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0), - {Go, RowAcc2} = SendRowFun(Resp2, {Key, Red}, RowAcc), - {Go, {AccLimit - 1, 0, Resp2, RowAcc2}}; - (Key, Red, {AccLimit, 0, Resp, RowAcc}) -> - % group=true and we've already started the response - {Go, RowAcc2} = SendRowFun(Resp, {Key, Red}, RowAcc), - {Go, {AccLimit - 1, 0, Resp, RowAcc2}} - end, - {ok, GroupRowsFun, RespFun}. - -apply_default_helper_funs(#view_fold_helper_funs{ - passed_end = PassedEnd, - start_response = StartResp, - send_row = SendRow -}=Helpers, {Dir, EndKey, EndDocId, InclusiveEnd}) -> - PassedEnd2 = case PassedEnd of - undefined -> make_passed_end_fun(Dir, EndKey, EndDocId, InclusiveEnd); - _ -> PassedEnd - end, - - StartResp2 = case StartResp of - undefined -> fun json_view_start_resp/5; - _ -> StartResp - end, - - SendRow2 = case SendRow of - undefined -> fun send_json_view_row/5; - _ -> SendRow - end, - - Helpers#view_fold_helper_funs{ - passed_end = PassedEnd2, - start_response = StartResp2, - send_row = SendRow2 - }. - -apply_default_helper_funs(#reduce_fold_helper_funs{ - start_response = StartResp, - send_row = SendRow -}=Helpers) -> - StartResp2 = case StartResp of - undefined -> fun json_reduce_start_resp/3; - _ -> StartResp - end, - - SendRow2 = case SendRow of - undefined -> fun send_json_reduce_row/3; - _ -> SendRow - end, - - Helpers#reduce_fold_helper_funs{ - start_response = StartResp2, - send_row = SendRow2 - }. - -make_passed_end_fun(fwd, EndKey, EndDocId, InclusiveEnd) -> - case InclusiveEnd of - true -> - fun(ViewKey, ViewId) -> - couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId]) - end; - false -> - fun - (ViewKey, _ViewId) when ViewKey == EndKey -> - true; - (ViewKey, ViewId) -> - couch_view:less_json([EndKey, EndDocId], [ViewKey, ViewId]) - end - end; - -make_passed_end_fun(rev, EndKey, EndDocId, InclusiveEnd) -> - case InclusiveEnd of - true -> - fun(ViewKey, ViewId) -> - couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId]) - end; - false-> - fun - (ViewKey, _ViewId) when ViewKey == EndKey -> - true; - (ViewKey, ViewId) -> - couch_view:less_json([ViewKey, ViewId], [EndKey, EndDocId]) - end - end. - -json_view_start_resp(Req, Etag, TotalViewCount, Offset, _Acc) -> - {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]), - BeginBody = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n", - [TotalViewCount, Offset]), - {ok, Resp, BeginBody}. - -send_json_view_row(Resp, Db, {{Key, DocId}, Value}, IncludeDocs, RowFront) -> - JsonObj = view_row_obj(Db, {{Key, DocId}, Value}, IncludeDocs), - send_chunk(Resp, RowFront ++ ?JSON_ENCODE(JsonObj)), - {ok, ",\r\n"}. - -json_reduce_start_resp(Req, Etag, _Acc0) -> - {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]), - {ok, Resp, "{\"rows\":[\r\n"}. - -send_json_reduce_row(Resp, {Key, Value}, RowFront) -> - send_chunk(Resp, RowFront ++ ?JSON_ENCODE({[{key, Key}, {value, Value}]})), - {ok, ",\r\n"}. - view_group_etag(Group, Db) -> view_group_etag(Group, Db, nil). @@ -604,71 +280,6 @@ view_group_etag(#group{sig=Sig,current_seq=CurrentSeq}, _Db, Extra) -> % track of the last Db seq that caused an index change. chttpd:make_etag({Sig, CurrentSeq, Extra}). -% the view row has an error -view_row_obj(_Db, {{Key, error}, Value}, _IncludeDocs) -> - {[{key, Key}, {error, Value}]}; -% include docs in the view output -view_row_obj(Db, {{Key, DocId}, {Props}}, true) -> - Rev = case couch_util:get_value(<<"_rev">>, Props) of - undefined -> - nil; - Rev0 -> - couch_doc:parse_rev(Rev0) - end, - view_row_with_doc(Db, {{Key, DocId}, {Props}}, Rev); -view_row_obj(Db, {{Key, DocId}, Value}, true) -> - view_row_with_doc(Db, {{Key, DocId}, Value}, nil); -% the normal case for rendering a view row -view_row_obj(_Db, {{Key, DocId}, Value}, _IncludeDocs) -> - {[{id, DocId}, {key, Key}, {value, Value}]}. - -view_row_with_doc(Db, {{Key, DocId}, Value}, Rev) -> - ?LOG_DEBUG("Include Doc: ~p ~p", [DocId, Rev]), - case (catch chttpd_db:couch_doc_open(Db, DocId, Rev, [])) of - {{not_found, missing}, _RevId} -> - {[{id, DocId}, {key, Key}, {value, Value}, {error, missing}]}; - {not_found, missing} -> - {[{id, DocId}, {key, Key}, {value, Value}, {error, missing}]}; - {not_found, deleted} -> - {[{id, DocId}, {key, Key}, {value, Value}]}; - Doc -> - JsonDoc = couch_doc:to_json_obj(Doc, []), - {[{id, DocId}, {key, Key}, {value, Value}, {doc, JsonDoc}]} - end. - -finish_view_fold(Req, TotalRows, FoldResult) -> - case FoldResult of - {ok, {_, _, undefined, _, Offset}} -> - % nothing found in the view, nothing has been returned - % send empty view - NewOffset = case Offset of - nil -> TotalRows; - _ -> Offset - end, - send_json(Req, 200, {[ - {total_rows, TotalRows}, - {offset, NewOffset}, - {rows, []} - ]}); - {ok, {_, _, Resp, _, _}} -> - % end the view - send_chunk(Resp, "\r\n]}"), - end_json_response(Resp); - Error -> - throw(Error) - end. - -finish_reduce_fold(Req, Resp) -> - case Resp of - undefined -> - send_json(Req, 200, {[ - {rows, []} - ]}); - Resp -> - send_chunk(Resp, "\r\n]}"), - end_json_response(Resp) - end. - parse_bool_param("true") -> true; parse_bool_param("false") -> false; parse_bool_param(Val) -> -- cgit v1.2.3 From 84d7e93630c49d6258e87b6d113d7ebbd489b822 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Thu, 8 Jul 2010 12:45:25 -0400 Subject: update attachment handling for new format, untested --- src/chttpd_db.erl | 108 ++++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 93 insertions(+), 15 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 39f1cfc7..aa7ba1c7 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -634,18 +634,62 @@ db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) -> case [A || A <- Atts, A#att.name == FileName] of [] -> throw({not_found, "Document is missing attachment"}); - [#att{type=Type, len=Len}=Att] -> + [#att{type=Type, encoding=Enc, disk_len=DiskLen, att_len=AttLen}=Att] -> Etag = chttpd:doc_etag(Doc), - chttpd:etag_respond(Req, Etag, fun() -> - {ok, Resp} = start_response_length(Req, 200, [ - {"ETag", Etag}, - {"Cache-Control", "must-revalidate"}, - {"Content-Type", binary_to_list(Type)} - ], integer_to_list(Len)), - couch_doc:att_foldl(Att, fun(BinSegment, _) -> - send(Resp, BinSegment) - end, {ok, Resp}) - end) + ReqAcceptsAttEnc = lists:member( + atom_to_list(Enc), + chttpd:accepted_encodings(Req) + ), + Headers = [ + {"ETag", Etag}, + {"Cache-Control", "must-revalidate"}, + {"Content-Type", binary_to_list(Type)} + ] ++ case ReqAcceptsAttEnc of + true -> + [{"Content-Encoding", atom_to_list(Enc)}]; + _ -> + [] + end, + Len = case {Enc, ReqAcceptsAttEnc} of + {identity, _} -> + % stored and served in identity form + DiskLen; + {_, false} when DiskLen =/= AttLen -> + % Stored encoded, but client doesn't accept the encoding we used, + % so we need to decode on the fly. DiskLen is the identity length + % of the attachment. + DiskLen; + {_, true} -> + % Stored and served encoded. AttLen is the encoded length. + AttLen; + _ -> + % We received an encoded attachment and stored it as such, so we + % don't know the identity length. The client doesn't accept the + % encoding, and since we cannot serve a correct Content-Length + % header we'll fall back to a chunked response. + undefined + end, + AttFun = case ReqAcceptsAttEnc of + false -> + fun couch_doc:att_foldl_decode/3; + true -> + fun couch_doc:att_foldl/3 + end, + couch_httpd:etag_respond( + Req, + Etag, + fun() -> + case Len of + undefined -> + {ok, Resp} = start_chunked_response(Req, 200, Headers), + AttFun(Att, fun(Seg, _) -> send_chunk(Resp, Seg) end, ok), + couch_httpd:last_chunk(Resp); + _ -> + {ok, Resp} = start_response_length(Req, 200, Headers, Len), + AttFun(Att, fun(Seg, _) -> send(Resp, Seg) end, ok) + end + end + ) end; @@ -670,14 +714,28 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa CType -> list_to_binary(CType) end, - data = ?COUCH:att_receiver(Req, chttpd:body_length(Req)), - len = case chttpd:header_value(Req,"Content-Length") of + data = fabric:att_receiver(Req, chttpd:body_length(Req)), + att_len = case chttpd:header_value(Req,"Content-Length") of undefined -> undefined; Length -> list_to_integer(Length) - end - }] + end, + md5 = get_md5_header(Req), + encoding = case string:to_lower(string:strip( + chttpd:header_value(Req,"Content-Encoding","identity") + )) of + "identity" -> + identity; + "gzip" -> + gzip; + _ -> + throw({ + bad_ctype, + "Only gzip and identity content-encodings are supported" + }) + end + }] end, Doc = case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of @@ -728,6 +786,26 @@ parse_doc_format(FormatStr) when is_list(FormatStr) -> parse_doc_format(_BadFormatStr) -> throw({bad_request, <<"Invalid doc format">>}). +get_md5_header(Req) -> + ContentMD5 = couch_httpd:header_value(Req, "Content-MD5"), + Length = couch_httpd:body_length(Req), + Trailer = couch_httpd:header_value(Req, "Trailer"), + case {ContentMD5, Length, Trailer} of + _ when is_list(ContentMD5) orelse is_binary(ContentMD5) -> + base64:decode(ContentMD5); + {_, chunked, undefined} -> + <<>>; + {_, chunked, _} -> + case re:run(Trailer, "\\bContent-MD5\\b", [caseless]) of + {match, _} -> + md5_in_footer; + _ -> + <<>> + end; + _ -> + <<>> + end. + parse_doc_query(Req) -> lists:foldl(fun({Key,Value}, Args) -> case {Key, Value} of -- cgit v1.2.3 From f8f690c293a52b961e232a36819777a7105abb92 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Thu, 8 Jul 2010 16:06:09 -0400 Subject: CouchDB is 1.0.0 --- src/chttpd_misc.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 98e6fbf8..0482947b 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -36,7 +36,7 @@ handle_welcome_req(Req) -> handle_welcome_req(#httpd{method='GET'}=Req, WelcomeMessage) -> send_json(Req, {[ {couchdb, WelcomeMessage}, - {version, <<"0.10.1+">>}, + {version, list_to_binary(couch:version())}, {cloudant_build, get_version()} ]}); handle_welcome_req(Req, _) -> -- cgit v1.2.3 From ff2c290a40754cb9aa87dbba0bdbb633e1fc4295 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 9 Jul 2010 21:32:40 -0400 Subject: use base64 library instead of old couch_util impl --- src/chttpd_auth.erl | 2 +- src/chttpd_external.erl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index b15bc933..d1e9a7a5 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -233,7 +233,7 @@ ensure_users_db_exists(DbName) -> basic_username_pw(Req) -> case chttpd:header_value(Req, "Authorization") of "Basic " ++ Base64Value -> - case string:tokens(?b2l(couch_util:decodeBase64(Base64Value)),":") of + case string:tokens(?b2l(base64:decode(Base64Value)),":") of [User, Pass] -> {User, Pass}; [User] -> diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl index 0b6b775d..3aff3291 100644 --- a/src/chttpd_external.erl +++ b/src/chttpd_external.erl @@ -146,7 +146,7 @@ parse_external_response({Response}) -> Args#extern_resp_args{data=Value, ctype="text/html; charset=utf-8"}; {<<"base64">>, Value} -> Args#extern_resp_args{ - data=couch_util:decodeBase64(Value), + data=base64:decode(Value), ctype="application/binary" }; {<<"headers">>, {Headers}} -> -- cgit v1.2.3 From 75c76c2ac385b1b88d1d1202e50e6cab929e0f89 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Sat, 10 Jul 2010 15:45:46 -0400 Subject: remove unnecessary chttpd_stats module --- ebin/chttpd.app | 1 - src/chttpd.erl | 1 - src/chttpd_stats.erl | 60 ---------------------------------------------------- 3 files changed, 62 deletions(-) delete mode 100644 src/chttpd_stats.erl diff --git a/ebin/chttpd.app b/ebin/chttpd.app index c85e58e8..93364fc4 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -11,7 +11,6 @@ chttpd_oauth, chttpd_rewrite, chttpd_show, - chttpd_stats, chttpd_sup, chttpd_view, cloudant_auth diff --git a/src/chttpd.erl b/src/chttpd.erl index 9df127dd..ac5ee38c 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -206,7 +206,6 @@ url_handler("_sleep") -> fun chttpd_misc:handle_sleep_req/1; url_handler("_session") -> fun chttpd_auth:handle_session_req/1; url_handler("_user") -> fun chttpd_auth:handle_user_req/1; url_handler("_oauth") -> fun chttpd_oauth:handle_oauth_req/1; -url_handler("_stats") -> fun chttpd_stats:handle_stats_req/1; url_handler("_metrics") -> fun chttpd_misc:handle_metrics_req/1; url_handler("_restart") -> fun showroom_http:handle_restart_req/1; url_handler("_cloudant") -> fun showroom_httpd_admin:handle_cloudant_req/1; diff --git a/src/chttpd_stats.erl b/src/chttpd_stats.erl deleted file mode 100644 index 1200713a..00000000 --- a/src/chttpd_stats.erl +++ /dev/null @@ -1,60 +0,0 @@ -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - --module(chttpd_stats). --include_lib("couch/include/couch_db.hrl"). --include_lib("couch/include/couch_stats.hrl"). - --export([handle_stats_req/1]). --import(chttpd, - [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2, - start_json_response/2,send_chunk/2,end_json_response/1, - start_chunked_response/3, send_error/4]). - --record(stats_query_args, { - range='0', - flush=false -}). - -handle_stats_req(#httpd{method='GET', path_parts=[_]}=Req) -> - send_json(Req, couch_stats_aggregator:all()); - -handle_stats_req(#httpd{method='GET', path_parts=[_Stats, Module, Key]}=Req) -> - #stats_query_args{ - range=Range, - flush=Flush - } = parse_stats_query(Req), - - case Flush of - true -> - couch_stats_aggregator:time_passed(); - _ -> ok - end, - - Stats = couch_stats_aggregator:get_json({?b2a(Module), ?b2a(Key)}, Range), - Response = {[{Module, {[{Key, Stats}]}}]}, - send_json(Req, Response); - -handle_stats_req(Req) -> - send_method_not_allowed(Req, "GET"). - -parse_stats_query(Req) -> - lists:foldl(fun({Key,Value}, Args) -> - case {Key, Value} of - {"range", Range} -> - Args#stats_query_args{range=list_to_atom(Range)}; - {"flush", "true"} -> - Args#stats_query_args{flush=true}; - _Else -> % unknown key value pair, ignore. - Args - end - end, #stats_query_args{}, chttpd:qs(Req)). -- cgit v1.2.3 From 25b9b2d936c07162b904b923e3308b56207888d7 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Sat, 10 Jul 2010 15:50:17 -0400 Subject: thank you dialyzer --- src/chttpd_db.erl | 21 ++++++++++----------- src/chttpd_view.erl | 2 +- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index aa7ba1c7..dfdba6a0 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -111,17 +111,13 @@ is_old_couch(Resp) -> string:str(UserAgent, "CouchDB/0") > 0 end. -handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, _Db) -> - ok = ?COUCH:compact_view_group(DbName, Id), - send_json(Req, 202, {[{ok, true}]}); - handle_compact_req(Req, _) -> Msg = <<"Compaction is handled automatically by Cloudant">>, - chttpd:send_error(Req, 403, Msg). + couch_httpd:send_error(Req, 403, forbidden, Msg). handle_view_cleanup_req(Req, _) -> Msg = <<"Old view indices are purged automatically by Cloudant">>, - chttpd:send_error(Req, 403, Msg). + couch_httpd:send_error(Req, 403, forbidden, Msg). handle_design_req(#httpd{ path_parts=[_DbName, _Design, Name, <<"_",_/binary>> = Action | _Rest], @@ -282,11 +278,14 @@ db_req(#httpd{path_parts=[_,<<"_bulk_docs">>]}=Req, _Db) -> db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) -> {IdsRevs} = chttpd:json_body_obj(Req), IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs], - - case ?COUCH:purge_docs(Db, IdsRevs2) of + case fabric:purge_docs(Db, IdsRevs2) of {ok, PurgeSeq, PurgedIdsRevs} -> - PurgedIdsRevs2 = [{Id, couch_doc:rev_to_strs(Revs)} || {Id, Revs} <- PurgedIdsRevs], - send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs2}}]}); + PurgedIdsRevs2 = [{Id, couch_doc:revs_to_strs(Revs)} || {Id, Revs} + <- PurgedIdsRevs], + send_json(Req, 200, {[ + {<<"purge_seq">>, PurgeSeq}, + {<<"purged">>, {PurgedIdsRevs2}} + ]}); Error -> throw(Error) end; @@ -638,7 +637,7 @@ db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) -> Etag = chttpd:doc_etag(Doc), ReqAcceptsAttEnc = lists:member( atom_to_list(Enc), - chttpd:accepted_encodings(Req) + couch_httpd:accepted_encodings(Req) ), Headers = [ {"ETag", Etag}, diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index 42972870..e09487c8 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -90,7 +90,7 @@ handle_view_req(Req, _Db, _DDoc) -> handle_temp_view_req(Req, _Db) -> Msg = <<"Temporary views are not supported by Cloudant">>, - chttpd:send_error(Req, 403, Msg). + chttpd:send_error(Req, 403, forbidden, Msg). reverse_key_default(?MIN_STR) -> ?MAX_STR; reverse_key_default(?MAX_STR) -> ?MIN_STR; -- cgit v1.2.3 From c459a41601da688ef54664d645c9280ca5aa7a69 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Sat, 10 Jul 2010 22:26:42 -0400 Subject: showroom_api disappeared, use showroom_rep for now --- src/chttpd_misc.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 0482947b..3fd5e919 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -109,7 +109,7 @@ handle_task_status_req(Req) -> handle_replicate_req(#httpd{method='POST'}=Req) -> PostBody = get(post_body), - try showroom_api:replicate_db(PostBody, Req#httpd.user_ctx) of + try showroom_rep:replicate(PostBody, Req#httpd.user_ctx) of {ok, {continuous, RepId}} -> send_json(Req, 202, {[{ok, true}, {<<"_local_id">>, RepId}]}); {ok, {JsonResults}} -> -- cgit v1.2.3 From 288da4c5ad0f2d4c6d83240923a69810f462c0b5 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 12 Jul 2010 17:05:19 -0400 Subject: update PUT /db/docid, but multipart is probably still broken --- src/chttpd.erl | 2 +- src/chttpd_db.erl | 180 +++++++++++++++++++++++++++++------------------------- 2 files changed, 97 insertions(+), 85 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index ac5ee38c..42b4ab41 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -289,7 +289,7 @@ absolute_uri(#httpd{mochi_req=MochiReq} = Req, Path) -> end end, CustomerRegex = ["^/", cloudant_util:customer_name(Req), "[/%2F]+"], - NewPath = re:replace(Path, CustomerRegex, "/"), + NewPath = re:replace(Path, CustomerRegex, "/", [{return,list}]), Scheme ++ "://" ++ Host ++ NewPath. unquote(UrlEncodedString) -> diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index dfdba6a0..0719623f 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -28,7 +28,8 @@ options = [], rev = nil, open_revs = [], - show = nil + update_type = interactive_edit, + atts_since = nil }). % Database request handlers @@ -414,52 +415,45 @@ db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) -> db_doc_req(#httpd{method='GET'}=Req, Db, DocId) -> #doc_query_args{ - show = Format, rev = Rev, open_revs = Revs, options = Options } = parse_doc_query(Req), - case Format of - nil -> - case Revs of + case Revs of + [] -> + Doc = couch_doc_open(Db, DocId, Rev, Options), + DiskEtag = chttpd:doc_etag(Doc), + case Doc#doc.meta of [] -> - Doc = couch_doc_open(Db, DocId, Rev, Options), - DiskEtag = chttpd:doc_etag(Doc), - case Doc#doc.meta of - [] -> - % output etag only when we have no meta - chttpd:etag_respond(Req, DiskEtag, fun() -> - send_json(Req, 200, [{"Etag", DiskEtag}], couch_doc:to_json_obj(Doc, Options)) - end); - _ -> - send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options)) - end; + % output etag only when we have no meta + chttpd:etag_respond(Req, DiskEtag, fun() -> + send_json(Req, 200, [{"Etag", DiskEtag}], couch_doc:to_json_obj(Doc, Options)) + end); _ -> - {ok, Results} = fabric:open_revs(Db, DocId, Revs, Options), - {ok, Resp} = start_json_response(Req, 200), - send_chunk(Resp, "["), - % We loop through the docs. The first time through the separator - % is whitespace, then a comma on subsequent iterations. - lists:foldl( - fun(Result, AccSeparator) -> - case Result of - {ok, Doc} -> - JsonDoc = couch_doc:to_json_obj(Doc, Options), - Json = ?JSON_ENCODE({[{ok, JsonDoc}]}), - send_chunk(Resp, AccSeparator ++ Json); - {{not_found, missing}, RevId} -> - Json = ?JSON_ENCODE({[{"missing", RevId}]}), - send_chunk(Resp, AccSeparator ++ Json) - end, - "," % AccSeparator now has a comma - end, - "", Results), - send_chunk(Resp, "]"), - end_json_response(Resp) + send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options)) end; _ -> - {DesignName, ShowName} = Format, - chttpd_show:handle_doc_show(Req, DesignName, ShowName, DocId, Db) + {ok, Results} = fabric:open_revs(Db, DocId, Revs, Options), + {ok, Resp} = start_json_response(Req, 200), + send_chunk(Resp, "["), + % We loop through the docs. The first time through the separator + % is whitespace, then a comma on subsequent iterations. + lists:foldl( + fun(Result, AccSeparator) -> + case Result of + {ok, Doc} -> + JsonDoc = couch_doc:to_json_obj(Doc, Options), + Json = ?JSON_ENCODE({[{ok, JsonDoc}]}), + send_chunk(Resp, AccSeparator ++ Json); + {{not_found, missing}, RevId} -> + Json = ?JSON_ENCODE({[{"missing", RevId}]}), + send_chunk(Resp, AccSeparator ++ Json) + end, + "," % AccSeparator now has a comma + end, + "", Results), + send_chunk(Resp, "]"), + end_json_response(Resp) end; db_doc_req(#httpd{method='POST', user_ctx=Ctx}=Req, Db, DocId) -> @@ -500,23 +494,43 @@ db_doc_req(#httpd{method='POST', user_ctx=Ctx}=Req, Db, DocId) -> {rev, couch_doc:rev_to_str(NewRev)} ]}); -db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) -> +db_doc_req(#httpd{method='PUT', user_ctx=Ctx}=Req, Db, DocId) -> + #doc_query_args{ + update_type = UpdateType + } = parse_doc_query(Req), couch_doc:validate_docid(DocId), - Json = chttpd:json_body(Req), - case chttpd:qs_value(Req, "batch") of - "ok" -> - % batch - Doc = couch_doc_from_req(Req, DocId, Json), - ok = couch_batch_save:eventually_save_doc(Db#db.name, Doc, Db#db.user_ctx), - send_json(Req, 202, [], {[ - {ok, true}, - {id, DocId} - ]}); - _Normal -> - % normal - DbName = couch_db:name(Db), - Location = absolute_uri(Req, <<"/", DbName/binary, "/", DocId/binary>>), - update_doc(Req, Db, DocId, Json, [{"Location", Location}]) + + Loc = absolute_uri(Req, [$/, Db#db.name, $/, DocId]), + RespHeaders = [{"Location", Loc}], + case couch_util:to_list(couch_httpd:header_value(Req, "Content-Type")) of + ("multipart/related;" ++ _) = ContentType -> + {ok, Doc0} = couch_doc:doc_from_multi_part_stream(ContentType, + fun() -> receive_request_data(Req) end), + Doc = couch_doc_from_req(Req, DocId, Doc0), + update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType); + _Else -> + case couch_httpd:qs_value(Req, "batch") of + "ok" -> + % batch + Doc = couch_doc_from_req(Req, DocId, couch_httpd:json_body(Req)), + + spawn(fun() -> + case catch(fabric:update_doc(Db, Doc, [{user_ctx,Ctx}])) of + {ok, _} -> ok; + Error -> + ?LOG_INFO("Batch doc error (~s): ~p",[DocId, Error]) + end + end), + send_json(Req, 202, [], {[ + {ok, true}, + {id, DocId} + ]}); + _Normal -> + % normal + Body = couch_httpd:json_body(Req), + Doc = couch_doc_from_req(Req, DocId, Body), + update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType) + end end; db_doc_req(#httpd{method='COPY', user_ctx=Ctx}=Req, Db, SourceDocId) -> @@ -539,6 +553,8 @@ db_doc_req(#httpd{method='COPY', user_ctx=Ctx}=Req, Db, SourceDocId) -> db_doc_req(Req, _Db, _DocId) -> send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY"). +receive_request_data(Req) -> + {couch_httpd:recv(Req, 0), fun() -> receive_request_data(Req) end}. update_doc_result_to_json({{Id, Rev}, Error}) -> {_Code, Err, Msg} = chttpd:error_info(Error), @@ -557,28 +573,27 @@ update_doc_result_to_json(DocId, Error) -> update_doc(Req, Db, DocId, Json) -> update_doc(Req, Db, DocId, Json, []). -update_doc(#httpd{user_ctx=Ctx} = Req, Db, DocId, Json, Headers) -> - #doc{deleted=Deleted} = Doc = couch_doc_from_req(Req, DocId, Json), +update_doc(Req, Db, DocId, Doc, Headers) -> + update_doc(Req, Db, DocId, Doc, Headers, interactive_edit). - case chttpd:header_value(Req, "X-Couch-Full-Commit") of +update_doc(#httpd{user_ctx=Ctx} = Req, Db, DocId, #doc{deleted=Deleted}=Doc, + Headers, UpdateType) -> + case couch_httpd:header_value(Req, "X-Couch-Full-Commit") of "true" -> - Options = [full_commit, {user_ctx,Ctx}]; + Options = [full_commit, UpdateType, {user_ctx,Ctx}]; "false" -> - Options = [delay_commit, {user_ctx,Ctx}]; + Options = [delay_commit, UpdateType, {user_ctx,Ctx}]; _ -> - Options = [{user_ctx,Ctx}] - end, - {Status, NewRev} = case fabric:update_doc(Db, Doc, Options) of - {ok, NewRev1} -> {201, NewRev1}; - {accepted, NewRev1} -> {202, NewRev1} + Options = [UpdateType, {user_ctx,Ctx}] end, + {ok, NewRev} = fabric:update_doc(Db, Doc, Options), NewRevStr = couch_doc:rev_to_str(NewRev), - ResponseHeaders = [{"Etag", <<"\"", NewRevStr/binary, "\"">>}] ++ Headers, - send_json(Req, if Deleted -> 200; true -> Status end, - ResponseHeaders, {[ - {ok, true}, - {id, DocId}, - {rev, NewRevStr}]}). + ResponseHeaders = [{"Etag", <<"\"", NewRevStr/binary, "\"">>} | Headers], + send_json(Req, if Deleted -> 200; true -> 201 end, ResponseHeaders, {[ + {ok, true}, + {id, DocId}, + {rev, NewRevStr} + ]}). couch_doc_from_req(Req, DocId, Json) -> Doc = couch_doc:from_json_obj(Json), @@ -774,17 +789,6 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa db_attachment_req(Req, _Db, _DocId, _FileNameParts) -> send_method_not_allowed(Req, "DELETE,GET,HEAD,PUT"). -parse_doc_format(FormatStr) when is_binary(FormatStr) -> - parse_doc_format(?b2l(FormatStr)); -parse_doc_format(FormatStr) when is_list(FormatStr) -> - SplitFormat = lists:splitwith(fun($/) -> false; (_) -> true end, FormatStr), - case SplitFormat of - {DesignName, [$/ | ShowName]} -> {?l2b(DesignName), ?l2b(ShowName)}; - _Else -> throw({bad_request, <<"Invalid doc format">>}) - end; -parse_doc_format(_BadFormatStr) -> - throw({bad_request, <<"Invalid doc format">>}). - get_md5_header(Req) -> ContentMD5 = couch_httpd:header_value(Req, "Content-MD5"), Length = couch_httpd:body_length(Req), @@ -836,8 +840,16 @@ parse_doc_query(Req) -> {"open_revs", RevsJsonStr} -> JsonArray = ?JSON_DECODE(RevsJsonStr), Args#doc_query_args{open_revs=[couch_doc:parse_rev(Rev) || Rev <- JsonArray]}; - {"show", FormatStr} -> - Args#doc_query_args{show=parse_doc_format(FormatStr)}; + {"atts_since", RevsJsonStr} -> + JsonArray = ?JSON_DECODE(RevsJsonStr), + Args#doc_query_args{atts_since = couch_doc:parse_revs(JsonArray)}; + {"new_edits", "false"} -> + Args#doc_query_args{update_type=replicated_changes}; + {"new_edits", "true"} -> + Args#doc_query_args{update_type=interactive_edit}; + {"att_encoding_info", "true"} -> + Options = [att_encoding_info | Args#doc_query_args.options], + Args#doc_query_args{options=Options}; {"r", R} -> Options = [{r,R} | Args#doc_query_args.options], Args#doc_query_args{options=Options}; -- cgit v1.2.3 From 828219b891a9642b7140ca092fcf913075f7a1ce Mon Sep 17 00:00:00 2001 From: Brad Anderson Date: Tue, 13 Jul 2010 12:23:35 -0400 Subject: whitespace --- src/chttpd.erl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 42b4ab41..3ba7d265 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -14,15 +14,15 @@ -include_lib("couch/include/couch_db.hrl"). -export([start_link/0, stop/0, handle_request/1, config_change/2, - primary_header_value/2, header_value/2, header_value/3, qs_value/2, + primary_header_value/2, header_value/2, header_value/3, qs_value/2, qs_value/3, qs/1, path/1, absolute_uri/2, body_length/1, verify_is_server_admin/1, unquote/1, quote/1, recv/2,recv_chunked/4, error_info/1, parse_form/1, json_body/1, json_body_obj/1, body/1, doc_etag/1, make_etag/1, etag_respond/3, partition/1, serve_file/3, - server_header/0, start_chunked_response/3,send_chunk/2, + server_header/0, start_chunked_response/3,send_chunk/2, start_response_length/4, send/2, start_json_response/2, start_json_response/3, end_json_response/1, send_response/4, - send_method_not_allowed/2, send_error/2, send_error/4, send_redirect/2, + send_method_not_allowed/2, send_error/2, send_error/4, send_redirect/2, send_chunked_error/2, send_json/2,send_json/3,send_json/4]). start_link() -> @@ -268,7 +268,7 @@ absolute_uri(#httpd{mochi_req=MochiReq} = Req, Path) -> Host = case MochiReq:get_header_value(XHost) of undefined -> case MochiReq:get_header_value("Host") of - undefined -> + undefined -> {ok, {Address, Port}} = inet:sockname(MochiReq:get(socket)), inet_parse:ntoa(Address) ++ ":" ++ integer_to_list(Port); Value1 -> @@ -309,7 +309,7 @@ recv_chunked(#httpd{mochi_req=MochiReq}, MaxChunkSize, ChunkFun, InitState) -> % Fun({Length, Binary}, State) % called with Length == 0 on the last time. MochiReq:stream_body(MaxChunkSize, ChunkFun, InitState). - + body_length(Req) -> case header_value(Req, "Transfer-Encoding") of undefined -> -- cgit v1.2.3 From c2942b1ae7bf7d7d7a67ae550dd9322e8ec15689 Mon Sep 17 00:00:00 2001 From: Brad Anderson Date: Tue, 13 Jul 2010 12:25:19 -0400 Subject: add _membership handler to cluster http layer, and get good node list --- src/chttpd.erl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/chttpd.erl b/src/chttpd.erl index 3ba7d265..47d4c6a7 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -208,6 +208,7 @@ url_handler("_user") -> fun chttpd_auth:handle_user_req/1; url_handler("_oauth") -> fun chttpd_oauth:handle_oauth_req/1; url_handler("_metrics") -> fun chttpd_misc:handle_metrics_req/1; url_handler("_restart") -> fun showroom_http:handle_restart_req/1; +url_handler("_membership") -> fun mem3_httpd:handle_membership_req/1; url_handler("_cloudant") -> fun showroom_httpd_admin:handle_cloudant_req/1; url_handler(_) -> fun chttpd_db:handle_request/1. -- cgit v1.2.3 From 7a40a6f9bb6110f83d1a229636cca1322fdfb7e8 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 13 Jul 2010 18:35:48 -0400 Subject: many updates to doc handling, batch=ok works now --- src/chttpd_db.erl | 338 ++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 211 insertions(+), 127 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 0719623f..c185d1e8 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -48,8 +48,10 @@ handle_request(#httpd{path_parts=[DbName|RestParts],method=Method, end. handle_changes_req(#httpd{method='GET'}=Req, Db) -> - #changes_args{filter=Filter} = Args0 = parse_changes_query(Req), - ChangesArgs = Args0#changes_args{filter=make_filter_fun(Filter, Req, Db)}, + #changes_args{filter=Raw, style=Style} = Args0 = parse_changes_query(Req), + ChangesArgs = Args0#changes_args{ + filter = couch_changes:make_filter_fun(Raw, Style, Req, Db) + }, case ChangesArgs#changes_args.feed of "normal" -> T0 = now(), @@ -148,9 +150,9 @@ handle_design_info_req(Req, _Db, _DDoc) -> send_method_not_allowed(Req, "GET"). create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) -> - N = chttpd:qs_value(Req, "n", couch_config:get("cluster", "n", "3")), - Q = chttpd:qs_value(Req, "q", couch_config:get("cluster", "q", "8")), - case fabric:create_db(DbName, [{user_ctx, UserCtx},{n,N},{q,Q}]) of + N = couch_httpd:qs_value(Req, "n", couch_config:get("cluster", "n", "3")), + Q = couch_httpd:qs_value(Req, "q", couch_config:get("cluster", "q", "8")), + case fabric:create_db(DbName, [{user_ctx,UserCtx}, {n,N}, {q,Q}]) of ok -> DocUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)), send_json(Req, 201, [{"Location", DocUrl}], {[{ok, true}]}); @@ -175,8 +177,9 @@ db_req(#httpd{method='GET',path_parts=[DbName]}=Req, _Db) -> {ok, DbInfo} = fabric:get_db_info(DbName), send_json(Req, {cloudant_util:customer_db_info(Req, DbInfo)}); -db_req(#httpd{method='POST',path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> - Doc = couch_doc:from_json_obj(chttpd:json_body(Req)), +db_req(#httpd{method='POST', path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> + couch_httpd:validate_ctype(Req, "application/json"), + Doc = couch_doc:from_json_obj(couch_httpd:json_body(Req)), Doc2 = case Doc#doc.id of <<"">> -> Doc#doc{id=couch_uuids:new(), revs={0, []}}; @@ -184,20 +187,25 @@ db_req(#httpd{method='POST',path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> Doc end, DocId = Doc2#doc.id, - case chttpd:qs_value(Req, "batch") of + case couch_httpd:qs_value(Req, "batch") of "ok" -> - % batch - ok = couch_batch_save:eventually_save_doc( - Db#db.name, Doc2, Db#db.user_ctx), + % async_batching + spawn(fun() -> + case catch(fabric:update_doc(Db, Doc2, [{user_ctx, Ctx}])) of + {ok, _} -> ok; + Error -> + ?LOG_INFO("Batch doc error (~s): ~p",[DocId, Error]) + end + end), + send_json(Req, 202, [], {[ {ok, true}, {id, DocId} ]}); _Normal -> % normal - {ok, NewRev} = fabric:update_doc(Db, Doc2, [{user_ctx,Ctx}]), - DocUrl = absolute_uri( - Req, binary_to_list(<<"/",DbName/binary,"/", DocId/binary>>)), + {ok, NewRev} = fabric:update_doc(Db, Doc2, [{user_ctx, Ctx}]), + DocUrl = absolute_uri(Req, [$/, DbName, $/, DocId]), send_json(Req, 201, [{"Location", DocUrl}], {[ {ok, true}, {id, DocId}, @@ -205,7 +213,6 @@ db_req(#httpd{method='POST',path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> ]}) end; - db_req(#httpd{path_parts=[_DbName]}=Req, _Db) -> send_method_not_allowed(Req, "DELETE,GET,HEAD,POST"); @@ -220,6 +227,7 @@ db_req(#httpd{path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) -> db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, Db) -> couch_stats_collector:increment({httpd, bulk_requests}), + couch_httpd:validate_ctype(Req, "application/json"), {JsonProps} = chttpd:json_body_obj(Req), DocsArray = couch_util:get_value(<<"docs">>, JsonProps), case chttpd:header_value(Req, "X-Couch-Full-Commit") of @@ -268,15 +276,17 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, end; false -> Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray], + [validate_attachment_names(D) || D <- Docs], {ok, Errors} = fabric:update_docs(Db, Docs, [replicated_changes|Options]), - ErrorsJson = - lists:map(fun update_doc_result_to_json/1, Errors), + ErrorsJson = lists:map(fun update_doc_result_to_json/1, Errors), send_json(Req, 201, ErrorsJson) end; + db_req(#httpd{path_parts=[_,<<"_bulk_docs">>]}=Req, _Db) -> send_method_not_allowed(Req, "POST"); db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) -> + couch_httpd:validate_ctype(Req, "application/json"), {IdsRevs} = chttpd:json_body_obj(Req), IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs], case fabric:purge_docs(Db, IdsRevs2) of @@ -299,9 +309,11 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) -> {Fields} = chttpd:json_body_obj(Req), - case couch_util:get_value(<<"keys">>, Fields) of + case couch_util:get_value(<<"keys">>, Fields, nil) of Keys when is_list(Keys) -> all_docs_view(Req, Db, Keys); + nil -> + all_docs_view(Req, Db, nil); _ -> throw({bad_request, "`keys` body member must be an array."}) end; @@ -310,9 +322,9 @@ db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) -> send_method_not_allowed(Req, "GET,HEAD,POST"); db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) -> - {JsonDocIdRevs} = chttpd:json_body_obj(Req), + {JsonDocIdRevs} = couch_httpd:json_body_obj(Req), {ok, Results} = fabric:get_missing_revs(Db, JsonDocIdRevs), - Results2 = [{Id, [couch_doc:rev_to_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results], + Results2 = [{Id, couch_doc:revs_to_strs(Revs)} || {Id, Revs} <- Results], send_json(Req, {[ {missing_revs, {Results2}} ]}); @@ -320,30 +332,52 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) -> db_req(#httpd{path_parts=[_,<<"_missing_revs">>]}=Req, _Db) -> send_method_not_allowed(Req, "POST"); -db_req(#httpd{method='PUT',path_parts=[_,<<"_admins">>]}=Req, - Db) -> - Admins = chttpd:json_body(Req), - ok = ?COUCH:set_admins(Db, Admins), +db_req(#httpd{method='POST',path_parts=[_,<<"_revs_diff">>]}=Req, Db) -> + {JsonDocIdRevs} = couch_httpd:json_body_obj(Req), + {ok, Results} = fabric:get_missing_revs(Db, JsonDocIdRevs), + Results2 = + lists:map(fun({Id, MissingRevs, PossibleAncestors}) -> + {Id, + {[{missing, couch_doc:revs_to_strs(MissingRevs)}] ++ + if PossibleAncestors == [] -> + []; + true -> + [{possible_ancestors, + couch_doc:revs_to_strs(PossibleAncestors)}] + end}} + end, Results), + send_json(Req, {Results2}); + +db_req(#httpd{path_parts=[_,<<"_revs_diff">>]}=Req, _Db) -> + send_method_not_allowed(Req, "POST"); + +db_req(#httpd{method='PUT',path_parts=[_,<<"_security">>]}=Req, Db) -> + SecObj = couch_httpd:json_body(Req), + ok = fabric:set_security(Db, SecObj), send_json(Req, {[{<<"ok">>, true}]}); -db_req(#httpd{method='GET',path_parts=[_,<<"_admins">>]}=Req, Db) -> - send_json(Req, ?COUCH:get_admins(Db)); +db_req(#httpd{method='GET',path_parts=[_,<<"_security">>]}=Req, Db) -> + send_json(Req, fabric:get_security(Db)); -db_req(#httpd{path_parts=[_,<<"_admins">>]}=Req, _Db) -> +db_req(#httpd{path_parts=[_,<<"_security">>]}=Req, _Db) -> send_method_not_allowed(Req, "PUT,GET"); db_req(#httpd{method='PUT',path_parts=[_,<<"_revs_limit">>]}=Req, Db) -> Limit = chttpd:json_body(Req), - ok = ?COUCH:set_revs_limit(Db, Limit), + ok = fabric:set_revs_limit(Db, Limit), send_json(Req, {[{<<"ok">>, true}]}); db_req(#httpd{method='GET',path_parts=[_,<<"_revs_limit">>]}=Req, Db) -> - send_json(Req, ?COUCH:get_revs_limit(Db)); + send_json(Req, fabric:get_revs_limit(Db)); db_req(#httpd{path_parts=[_,<<"_revs_limit">>]}=Req, _Db) -> send_method_not_allowed(Req, "PUT,GET"); +% vanilla CouchDB sends a 301 here, but we just handle the request +db_req(#httpd{path_parts=[DbName,<<"_design/",Name/binary>>|Rest]}=Req, Db) -> + db_req(Req#httpd{path_parts=[DbName, <<"_design">>, Name | Rest]}, Db); + % Special case to enable using an unencoded slash in the URL of design docs, % as slashes in document IDs must otherwise be URL encoded. db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name]}=Req, Db) -> @@ -356,19 +390,16 @@ db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name|FileNameParts]}=Req, Db) -> % Special case to allow for accessing local documents without %2F % encoding the docid. Throws out requests that don't have the second % path part or that specify an attachment name. -db_req(#httpd{path_parts=[_DbName, <<"_local">>, Name]}=Req, Db) -> - db_doc_req(Req, Db, <<"_local/", Name/binary>>); - db_req(#httpd{path_parts=[_DbName, <<"_local">>]}, _Db) -> - throw({bad_request, <<"Missing _local document id.">>}); + throw({bad_request, <<"Invalid _local document id.">>}); db_req(#httpd{path_parts=[_DbName, <<"_local/">>]}, _Db) -> - throw({bad_request, <<"Missing _local document id.">>}); + throw({bad_request, <<"Invalid _local document id.">>}); -db_req(#httpd{path_parts=[_DbName, <<"_local">> | _Rest]}, _Db) -> - throw({bad_request, <<"_local documents do not accept attachments.">>}); +db_req(#httpd{path_parts=[_DbName, <<"_local">>, Name]}=Req, Db) -> + db_doc_req(Req, Db, <<"_local/", Name/binary>>); -db_req(#httpd{path_parts=[_DbName, <<"_local/", _/binary>>, _ | _]}, _Db) -> +db_req(#httpd{path_parts=[_DbName, <<"_local">> | _Rest]}, _Db) -> throw({bad_request, <<"_local documents do not accept attachments.">>}); db_req(#httpd{path_parts=[_, DocId]}=Req, Db) -> @@ -408,66 +439,75 @@ db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) -> couch_doc_open(Db, DocId, nil, []), case chttpd:qs_value(Req, "rev") of undefined -> - update_doc(Req, Db, DocId, {[{<<"_deleted">>,true}]}); + Body = {[{<<"_deleted">>,true}]}; Rev -> - update_doc(Req, Db, DocId, {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]}) - end; + Body = {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]} + end, + update_doc(Req, Db, DocId, couch_doc_from_req(Req, DocId, Body)); db_doc_req(#httpd{method='GET'}=Req, Db, DocId) -> #doc_query_args{ rev = Rev, open_revs = Revs, - options = Options + options = Options, + atts_since = AttsSince } = parse_doc_query(Req), case Revs of [] -> - Doc = couch_doc_open(Db, DocId, Rev, Options), - DiskEtag = chttpd:doc_etag(Doc), - case Doc#doc.meta of - [] -> - % output etag only when we have no meta - chttpd:etag_respond(Req, DiskEtag, fun() -> - send_json(Req, 200, [{"Etag", DiskEtag}], couch_doc:to_json_obj(Doc, Options)) - end); - _ -> - send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options)) - end; + Options2 = + if AttsSince /= nil -> + [{atts_since, AttsSince}, attachments | Options]; + true -> Options + end, + Doc = couch_doc_open(Db, DocId, Rev, Options2), + send_doc(Req, Doc, Options2); _ -> {ok, Results} = fabric:open_revs(Db, DocId, Revs, Options), - {ok, Resp} = start_json_response(Req, 200), - send_chunk(Resp, "["), - % We loop through the docs. The first time through the separator - % is whitespace, then a comma on subsequent iterations. - lists:foldl( - fun(Result, AccSeparator) -> - case Result of - {ok, Doc} -> - JsonDoc = couch_doc:to_json_obj(Doc, Options), - Json = ?JSON_ENCODE({[{ok, JsonDoc}]}), - send_chunk(Resp, AccSeparator ++ Json); - {{not_found, missing}, RevId} -> - Json = ?JSON_ENCODE({[{"missing", RevId}]}), - send_chunk(Resp, AccSeparator ++ Json) + AcceptedTypes = case couch_httpd:header_value(Req, "Accept") of + undefined -> []; + AcceptHeader -> string:tokens(AcceptHeader, "; ") + end, + case lists:member("multipart/mixed", AcceptedTypes) of + false -> + {ok, Resp} = start_json_response(Req, 200), + send_chunk(Resp, "["), + % We loop through the docs. The first time through the separator + % is whitespace, then a comma on subsequent iterations. + lists:foldl( + fun(Result, AccSeparator) -> + case Result of + {ok, Doc} -> + JsonDoc = couch_doc:to_json_obj(Doc, Options), + Json = ?JSON_ENCODE({[{ok, JsonDoc}]}), + send_chunk(Resp, AccSeparator ++ Json); + {{not_found, missing}, RevId} -> + RevStr = couch_doc:rev_to_str(RevId), + Json = ?JSON_ENCODE({[{"missing", RevStr}]}), + send_chunk(Resp, AccSeparator ++ Json) + end, + "," % AccSeparator now has a comma end, - "," % AccSeparator now has a comma - end, - "", Results), - send_chunk(Resp, "]"), - end_json_response(Resp) + "", Results), + send_chunk(Resp, "]"), + end_json_response(Resp); + true -> + send_docs_multipart(Req, Results, Options) + end end; db_doc_req(#httpd{method='POST', user_ctx=Ctx}=Req, Db, DocId) -> + couch_httpd:validate_referer(Req), couch_doc:validate_docid(DocId), - case chttpd:header_value(Req, "content-type") of - "multipart/form-data" ++ _Rest -> - ok; - _Else -> - throw({bad_ctype, <<"Invalid Content-Type header for form upload">>}) + couch_httpd:validate_ctype(Req, "multipart/form-data"), + Form = couch_httpd:parse_form(Req), + case proplists:is_defined("_doc", Form) of + true -> + Json = ?JSON_DECODE(couch_util:get_value("_doc", Form)), + Doc = couch_doc_from_req(Req, DocId, Json); + false -> + Rev = couch_doc:parse_rev(list_to_binary(couch_util:get_value("_rev", Form))), + {ok, [{ok, Doc}]} = fabric:open_revs(Db, DocId, [Rev], []) end, - Form = chttpd:parse_form(Req), - Rev = couch_doc:parse_rev(list_to_binary(couch_util:get_value("_rev", Form))), - {ok, [{ok, Doc}]} = fabric:open_revs(Db, DocId, [Rev], []), - UpdatedAtts = [ #att{name=validate_attachment_name(Name), type=list_to_binary(ContentType), @@ -535,7 +575,7 @@ db_doc_req(#httpd{method='PUT', user_ctx=Ctx}=Req, Db, DocId) -> db_doc_req(#httpd{method='COPY', user_ctx=Ctx}=Req, Db, SourceDocId) -> SourceRev = - case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of + case extract_header_rev(Req, couch_httpd:qs_value(Req, "rev")) of missing_rev -> nil; Rev -> Rev end, @@ -553,6 +593,75 @@ db_doc_req(#httpd{method='COPY', user_ctx=Ctx}=Req, Db, SourceDocId) -> db_doc_req(Req, _Db, _DocId) -> send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY"). +send_doc(Req, Doc, Options) -> + case Doc#doc.meta of + [] -> + DiskEtag = couch_httpd:doc_etag(Doc), + % output etag only when we have no meta + couch_httpd:etag_respond(Req, DiskEtag, fun() -> + send_doc_efficiently(Req, Doc, [{"Etag", DiskEtag}], Options) + end); + _ -> + send_doc_efficiently(Req, Doc, [], Options) + end. + +send_doc_efficiently(Req, #doc{atts=[]}=Doc, Headers, Options) -> + send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options)); +send_doc_efficiently(Req, #doc{atts=Atts}=Doc, Headers, Options) -> + case lists:member(attachments, Options) of + true -> + AcceptedTypes = case couch_httpd:header_value(Req, "Accept") of + undefined -> []; + AcceptHeader -> string:tokens(AcceptHeader, ", ") + end, + case lists:member("multipart/related", AcceptedTypes) of + false -> + send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options)); + true -> + Boundary = couch_uuids:random(), + JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc, + [attachments, follows|Options])), + {ContentType, Len} = couch_doc:len_doc_to_multi_part_stream( + Boundary,JsonBytes, Atts,false), + CType = {<<"Content-Type">>, ContentType}, + {ok, Resp} = start_response_length(Req, 200, [CType|Headers], Len), + couch_doc:doc_to_multi_part_stream(Boundary,JsonBytes,Atts, + fun(Data) -> couch_httpd:send(Resp, Data) end, false) + end; + false -> + send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options)) + end. + +send_docs_multipart(Req, Results, Options) -> + OuterBoundary = couch_uuids:random(), + InnerBoundary = couch_uuids:random(), + CType = {"Content-Type", + "multipart/mixed; boundary=\"" ++ ?b2l(OuterBoundary) ++ "\""}, + {ok, Resp} = start_chunked_response(Req, 200, [CType]), + couch_httpd:send_chunk(Resp, <<"--", OuterBoundary/binary>>), + lists:foreach( + fun({ok, #doc{atts=Atts}=Doc}) -> + JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc, + [attachments,follows|Options])), + {ContentType, _Len} = couch_doc:len_doc_to_multi_part_stream( + InnerBoundary, JsonBytes, Atts, false), + couch_httpd:send_chunk(Resp, <<"\r\nContent-Type: ", + ContentType/binary, "\r\n\r\n">>), + couch_doc:doc_to_multi_part_stream(InnerBoundary, JsonBytes, Atts, + fun(Data) -> couch_httpd:send_chunk(Resp, Data) + end, false), + couch_httpd:send_chunk(Resp, <<"\r\n--", OuterBoundary/binary>>); + ({{not_found, missing}, RevId}) -> + RevStr = couch_doc:rev_to_str(RevId), + Json = ?JSON_ENCODE({[{"missing", RevStr}]}), + couch_httpd:send_chunk(Resp, + [<<"\r\nContent-Type: application/json; error=\"true\"\r\n\r\n">>, + Json, + <<"\r\n--", OuterBoundary/binary>>]) + end, Results), + couch_httpd:send_chunk(Resp, <<"--">>), + couch_httpd:last_chunk(Resp). + receive_request_data(Req) -> {couch_httpd:recv(Req, 0), fun() -> receive_request_data(Req) end}. @@ -595,21 +704,24 @@ update_doc(#httpd{user_ctx=Ctx} = Req, Db, DocId, #doc{deleted=Deleted}=Doc, {rev, NewRevStr} ]}). -couch_doc_from_req(Req, DocId, Json) -> - Doc = couch_doc:from_json_obj(Json), +couch_doc_from_req(Req, DocId, #doc{revs=Revs} = Doc) -> validate_attachment_names(Doc), ExplicitDocRev = - case Doc#doc.revs of + case Revs of {Start,[RevId|_]} -> {Start, RevId}; _ -> undefined end, case extract_header_rev(Req, ExplicitDocRev) of missing_rev -> - Revs = {0, []}; + Revs2 = {0, []}; + ExplicitDocRev -> + Revs2 = Revs; {Pos, Rev} -> - Revs = {Pos, [Rev]} + Revs2 = {Pos, [Rev]} end, - Doc#doc{id=DocId, revs=Revs}. + Doc#doc{id=DocId, revs=Revs2}; +couch_doc_from_req(Req, DocId, Json) -> + couch_doc_from_req(Req, DocId, couch_doc:from_json_obj(Json)). % Useful for debugging @@ -629,6 +741,8 @@ couch_doc_open(Db, DocId, Rev, Options) -> case fabric:open_revs(Db, DocId, [Rev], Options) of {ok, [{ok, Doc}]} -> Doc; + {ok, [{{not_found, missing}, Rev}]} -> + throw(not_found); {ok, [Else]} -> throw(Else) end @@ -637,7 +751,8 @@ couch_doc_open(Db, DocId, Rev, Options) -> % Attachment request handlers db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) -> - FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, FileNameParts),"/")), + FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, + FileNameParts),"/")), #doc_query_args{ rev=Rev, options=Options @@ -720,7 +835,7 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa _ -> [#att{ name=FileName, - type = case chttpd:header_value(Req,"Content-Type") of + type = case couch_httpd:header_value(Req,"Content-Type") of undefined -> % We could throw an error here or guess by the FileName. % Currently, just giving it a default. @@ -729,7 +844,7 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa list_to_binary(CType) end, data = fabric:att_receiver(Req, chttpd:body_length(Req)), - att_len = case chttpd:header_value(Req,"Content-Length") of + att_len = case couch_httpd:header_value(Req,"Content-Length") of undefined -> undefined; Length -> @@ -737,7 +852,7 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa end, md5 = get_md5_header(Req), encoding = case string:to_lower(string:strip( - chttpd:header_value(Req,"Content-Encoding","identity") + couch_httpd:header_value(Req,"Content-Encoding","identity") )) of "identity" -> identity; @@ -752,7 +867,7 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa }] end, - Doc = case extract_header_rev(Req, chttpd:qs_value(Req, "rev")) of + Doc = case extract_header_rev(Req, couch_httpd:qs_value(Req, "rev")) of missing_rev -> % make the new doc couch_doc:validate_docid(DocId), #doc{id=DocId}; @@ -768,17 +883,14 @@ db_attachment_req(#httpd{method=Method, user_ctx=Ctx}=Req, Db, DocId, FileNamePa atts = NewAtt ++ [A || A <- Atts, A#att.name /= FileName] }, {ok, UpdatedRev} = fabric:update_doc(Db, DocEdited, [{user_ctx,Ctx}]), - DbName = couch_db:name(Db), + #db{name=DbName} = Db, {Status, Headers} = case Method of 'DELETE' -> {200, []}; _ -> - {201, [{"Location", absolute_uri(Req, "/" ++ - binary_to_list(DbName) ++ "/" ++ - binary_to_list(DocId) ++ "/" ++ - binary_to_list(FileName) - )}]} + {201, [{"Location", absolute_uri(Req, [$/, DbName, $/, DocId, $/, + FileName])}]} end, send_json(Req,Status, Headers, {[ {ok, true}, @@ -887,35 +999,7 @@ parse_changes_query(Req) -> _Else -> % unknown key value pair, ignore. Args end - end, #changes_args{}, chttpd:qs(Req)). - -make_filter_fun(Filter, _, _) when is_function(Filter, 1) -> - Filter; -make_filter_fun(FilterName, Req, Db) -> - case [?l2b(chttpd:unquote(X)) || X <- string:tokens(FilterName, "/")] of - [DName, FName] -> - case fabric:open_doc(Db, <<"_design/", DName/binary>>, []) of - {ok, #doc{body={Props}} = DDoc} -> - couch_util:get_nested_json_value({Props}, [<<"filters">>, FName]), - JsonReq = chttpd_external:json_req_obj(Req, Db), - fun(DocInfos) -> - Docs = [Doc || {ok, Doc} <- [ - {ok, _Doc} = fabric:open_doc(Db, Id, [deleted, conflicts]) - || #doc_info{id=Id} <- DocInfos]], - {ok, Passes} = couch_query_servers:filter_docs( - {json_req,JsonReq}, Db, DDoc, FName, Docs - ), - [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]} - || #doc_info{revs=[#rev_info{rev=Rev}|_]} <- DocInfos, - Pass <- Passes, Pass == true] - end; - Error -> - throw(Error) - end; - _Else -> - throw({bad_request, - "filter parameter must be of the form `designname/filtername`"}) - end. + end, #changes_args{}, couch_httpd:qs(Req)). extract_header_rev(Req, ExplicitRev) when is_binary(ExplicitRev) or is_list(ExplicitRev)-> extract_header_rev(Req, couch_doc:parse_rev(ExplicitRev)); -- cgit v1.2.3 From 71e490b6c38c8f23c84dba735ad2df62ad29a648 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 13 Jul 2010 18:36:20 -0400 Subject: trivial bug caught by dialyzer --- src/chttpd_oauth.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd_oauth.erl b/src/chttpd_oauth.erl index f0bffb36..70995409 100644 --- a/src/chttpd_oauth.erl +++ b/src/chttpd_oauth.erl @@ -31,7 +31,7 @@ oauth_authentication_handler(#httpd{mochi_req=MochiReq}=Req) -> % Look up the consumer key and get the roles to give the consumer set_user_ctx(Req, AccessToken) -> DbName = couch_config:get("chttpd_auth", "authentication_db"), - {ok, _Db} = chttpd_auth:ensure_users_db_exists(?l2b(DbName)), + ok = chttpd_auth:ensure_users_db_exists(?l2b(DbName)), Name = ?l2b(couch_config:get("oauth_token_users", AccessToken)), case chttpd_auth:get_user(Name) of nil -> Req; -- cgit v1.2.3 From fead243883a6c11e887ddbbe0d4b8cbe6002ce3f Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 13 Jul 2010 19:03:31 -0400 Subject: remove unused increment_update_seq handler --- src/chttpd_misc.erl | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 3fd5e919..a0560f89 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -18,8 +18,6 @@ handle_task_status_req/1,handle_sleep_req/1,handle_welcome_req/1, handle_utils_dir_req/1, handle_favicon_req/1, handle_metrics_req/1]). --export([increment_update_seq_req/2]). - -include_lib("couch/include/couch_db.hrl"). @@ -191,17 +189,6 @@ handle_config_req(#httpd{method='DELETE',path_parts=[_,Section,Key]}=Req) -> handle_config_req(Req) -> send_method_not_allowed(Req, "GET,PUT,DELETE"). - -% httpd db handlers - -increment_update_seq_req(#httpd{method='POST'}=Req, Db) -> - {ok, NewSeq} = ?COUCH:increment_update_seq(Db), - send_json(Req, {[{ok, true}, - {update_seq, NewSeq} - ]}); -increment_update_seq_req(Req, _Db) -> - send_method_not_allowed(Req, "POST"). - % httpd log handlers handle_log_req(#httpd{method='GET'}=Req) -> -- cgit v1.2.3 From d9ebd8bf90c7e51530a268db98f8b73d7407da82 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 14 Jul 2010 10:53:22 -0400 Subject: add _system resource for munin plugins --- src/chttpd_misc.erl | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index a0560f89..afa097d4 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -16,7 +16,8 @@ handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1, handle_uuids_req/1,handle_config_req/1,handle_log_req/1, handle_task_status_req/1,handle_sleep_req/1,handle_welcome_req/1, - handle_utils_dir_req/1, handle_favicon_req/1, handle_metrics_req/1]). + handle_utils_dir_req/1, handle_favicon_req/1, handle_metrics_req/1, + handle_system_req/1]). -include_lib("couch/include/couch_db.hrl"). @@ -220,3 +221,16 @@ handle_metrics_req(#httpd{method='GET', path_parts=[_, Id]}=Req) -> handle_metrics_req(Req) -> send_method_not_allowed(Req, "GET,HEAD"). +% Note: this resource is exposed on the backdoor interface, but it's in chttpd +% because it's not couch trunk +handle_system_req(Req) -> + Other = erlang:memory(system) - lists:sum([X || {_,X} <- + erlang:memory([atom, code, binary, ets])]), + Memory = [{other, Other} | erlang:memory([atom, atom_used, processes, + processes_used, binary, code, ets])], + send_json(Req, {[ + {memory, {Memory}}, + {run_queue, statistics(run_queue)}, + {process_count, erlang:system_info(process_count)}, + {process_limit, erlang:system_info(process_limit)} + ]}). -- cgit v1.2.3 From 06c0046f8225df82e38120c7bd487479ac3d726a Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 14 Jul 2010 14:49:22 -0400 Subject: support for revs_limit and security metadata --- src/chttpd_db.erl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index c185d1e8..22ca7fe6 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -351,9 +351,10 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_revs_diff">>]}=Req, Db) -> db_req(#httpd{path_parts=[_,<<"_revs_diff">>]}=Req, _Db) -> send_method_not_allowed(Req, "POST"); -db_req(#httpd{method='PUT',path_parts=[_,<<"_security">>]}=Req, Db) -> +db_req(#httpd{method='PUT',path_parts=[_,<<"_security">>],user_ctx=Ctx}=Req, + Db) -> SecObj = couch_httpd:json_body(Req), - ok = fabric:set_security(Db, SecObj), + ok = fabric:set_security(Db, SecObj, [{user_ctx,Ctx}]), send_json(Req, {[{<<"ok">>, true}]}); db_req(#httpd{method='GET',path_parts=[_,<<"_security">>]}=Req, Db) -> @@ -362,10 +363,10 @@ db_req(#httpd{method='GET',path_parts=[_,<<"_security">>]}=Req, Db) -> db_req(#httpd{path_parts=[_,<<"_security">>]}=Req, _Db) -> send_method_not_allowed(Req, "PUT,GET"); -db_req(#httpd{method='PUT',path_parts=[_,<<"_revs_limit">>]}=Req, +db_req(#httpd{method='PUT',path_parts=[_,<<"_revs_limit">>],user_ctx=Ctx}=Req, Db) -> Limit = chttpd:json_body(Req), - ok = fabric:set_revs_limit(Db, Limit), + ok = fabric:set_revs_limit(Db, Limit, [{user_ctx,Ctx}]), send_json(Req, {[{<<"ok">>, true}]}); db_req(#httpd{method='GET',path_parts=[_,<<"_revs_limit">>]}=Req, Db) -> -- cgit v1.2.3 From 9f4b3e86b174749408811416544b464cb78d6755 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 14 Jul 2010 15:33:16 -0400 Subject: oops, referenced the wrong config setting for auth db --- src/chttpd_auth.erl | 8 ++++---- src/chttpd_oauth.erl | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index d1e9a7a5..26f05e1a 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -164,18 +164,18 @@ handle_session_req(Req) -> send_method_not_allowed(Req, "GET,HEAD,POST,DELETE"). handle_user_req(#httpd{method='POST'}=Req) -> - DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + DbName = couch_config:get("couch_httpd_auth", "authentication_db", "users"), ensure_users_db_exists(DbName), create_user(Req, DbName); handle_user_req(#httpd{method=Method, path_parts=[_]}=_Req) when Method == 'PUT' orelse Method == 'DELETE' -> throw({bad_request, <<"Username is missing">>}); handle_user_req(#httpd{method='PUT', path_parts=[_, UserName]}=Req) -> - DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + DbName = couch_config:get("couch_httpd_auth", "authentication_db", "users"), ensure_users_db_exists(DbName), update_user(Req, DbName, UserName); handle_user_req(#httpd{method='DELETE', path_parts=[_, UserName]}=Req) -> - DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + DbName = couch_config:get("couch_httpd_auth", "authentication_db", "users"), ensure_users_db_exists(DbName), delete_user(Req, DbName, UserName); handle_user_req(Req) -> @@ -205,7 +205,7 @@ get_user(UserName) -> end. load_user_from_db(UserName) -> - DbName = couch_config:get("chttpd_auth", "authentication_db", "users"), + DbName = couch_config:get("couch_httpd_auth", "authentication_db", "users"), try fabric:open_doc(DbName, UserName, []) of {ok, Doc} -> ?LOG_INFO("cache miss on username ~s", [UserName]), diff --git a/src/chttpd_oauth.erl b/src/chttpd_oauth.erl index 70995409..84506efe 100644 --- a/src/chttpd_oauth.erl +++ b/src/chttpd_oauth.erl @@ -30,7 +30,7 @@ oauth_authentication_handler(#httpd{mochi_req=MochiReq}=Req) -> % Look up the consumer key and get the roles to give the consumer set_user_ctx(Req, AccessToken) -> - DbName = couch_config:get("chttpd_auth", "authentication_db"), + DbName = couch_config:get("couch_httpd_auth", "authentication_db", "users"), ok = chttpd_auth:ensure_users_db_exists(?l2b(DbName)), Name = ?l2b(couch_config:get("oauth_token_users", AccessToken)), case chttpd_auth:get_user(Name) of -- cgit v1.2.3 From 299d9d924c9aa40d5f327b0fc2848eccae20d8a4 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Thu, 15 Jul 2010 14:01:54 -0400 Subject: use couch_httpd_auth config block everywhere --- src/chttpd_auth.erl | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/chttpd_auth.erl b/src/chttpd_auth.erl index 26f05e1a..ac4679c5 100644 --- a/src/chttpd_auth.erl +++ b/src/chttpd_auth.erl @@ -141,7 +141,7 @@ handle_session_req(#httpd{method='POST', mochi_req=MochiReq, user_ctx=Ctx}=Req) throw({forbidden, <<"Name or password is incorrect.">>}) end end, - Secret = ?l2b(couch_config:get("chttpd_auth", "secret")), + Secret = ?l2b(couch_config:get("couch_httpd_auth", "secret")), SecretAndSalt = <>, Cookie = generate_cookie(UserName, SecretAndSalt, timestamp()), send_response(Req, [Cookie]) @@ -253,7 +253,7 @@ cookie_auth_user(#httpd{mochi_req=MochiReq}=Req) -> AuthSession = couch_util:decodeBase64Url(Cookie), [User, TimeStr | HashParts] = string:tokens(?b2l(AuthSession), ":"), % Verify expiry and hash - case couch_config:get("chttpd_auth", "secret") of + case couch_config:get("couch_httpd_auth", "secret") of undefined -> ?LOG_DEBUG("AuthSession cookie, but no secret in config!", []), {cookie_auth_failed, {internal_server_error, null}}; @@ -272,7 +272,7 @@ cookie_auth_user(#httpd{mochi_req=MochiReq}=Req) -> true -> TimeStamp = erlang:list_to_integer(TimeStr, 16), Timeout = erlang:list_to_integer(couch_config:get( - "chttpd_auth", "timeout", "600")), + "couch_httpd_auth", "timeout", "600")), CurrentTime = timestamp(), if CurrentTime < TimeStamp + Timeout -> TimeLeft = TimeStamp + Timeout - CurrentTime, @@ -380,11 +380,8 @@ generate_cookie(User, Secret, TimeStamp) -> SessionData = ?b2l(User) ++ ":" ++ erlang:integer_to_list(TimeStamp, 16), Hash = crypto:sha_mac(Secret, SessionData), Cookie = couch_util:encodeBase64Url(SessionData ++ ":" ++ ?b2l(Hash)), - % MaxAge = erlang:list_to_integer(couch_config:get("chttpd_auth", - % "timeout", "600")), % TODO add {secure, true} to options when SSL is detected mochiweb_cookies:cookie("AuthSession", Cookie, [{path, "/"}]). - % {max_age, MaxAge}]). hash_password(Password, Salt) -> ?l2b(couch_util:to_hex(crypto:sha(<>))). -- cgit v1.2.3 From 134dc2c24f08ef7487fcdf5c5d751603a5ed14a6 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 16 Jul 2010 11:07:22 -0400 Subject: measure all get_db_info calls --- src/chttpd_db.erl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index 22ca7fe6..bd11683b 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -174,7 +174,11 @@ do_db_req(#httpd{path_parts=[DbName|_]}=Req, Fun) -> Fun(Req, #db{name=DbName}). db_req(#httpd{method='GET',path_parts=[DbName]}=Req, _Db) -> + % measure the time required to generate the etag, see if it's worth it + T0 = now(), {ok, DbInfo} = fabric:get_db_info(DbName), + DeltaT = timer:now_diff(now(), T0) / 1000, + couch_stats_collector:record({couchdb, dbinfo}, DeltaT), send_json(Req, {cloudant_util:customer_db_info(Req, DbInfo)}); db_req(#httpd{method='POST', path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> -- cgit v1.2.3 From cad50b218bde37cd1f0f1cfb767334df57c325f5 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 16 Jul 2010 13:47:20 -0400 Subject: Human-readable 404 for missing DB, BugzID 10523 --- src/chttpd.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 47d4c6a7..3e46b263 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -510,7 +510,7 @@ error_info({bad_request, Reason}) -> error_info({query_parse_error, Reason}) -> {400, <<"query_parse_error">>, Reason}; error_info(database_does_not_exist) -> - {404, <<"not_found">>, <<"database_does_not_exist">>}; + {404, <<"not_found">>, <<"Database does not exist.">>}; error_info(not_found) -> {404, <<"not_found">>, <<"missing">>}; error_info({not_found, Reason}) -> -- cgit v1.2.3 From f7109f32f7c2f97eaa19ce334f6bf7ea8caa995e Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 16 Jul 2010 18:32:32 -0400 Subject: nicer error messages for missing attachment stubs --- src/chttpd.erl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/chttpd.erl b/src/chttpd.erl index 3e46b263..3ef33b40 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -540,6 +540,8 @@ error_info({error, illegal_database_name}) -> {400, <<"illegal_database_name">>, <<"Only lowercase characters (a-z), " "digits (0-9), and any of the characters _, $, (, ), +, -, and / " "are allowed">>}; +error_info({missing_stub, Reason}) -> + {412, <<"missing_stub">>, Reason}; error_info(not_implemented) -> {501, <<"not_implemented">>, <<"this feature is not yet implemented">>}; error_info({Error, Reason}) -> -- cgit v1.2.3 From 61450d9d1e01a8e8506f62343f6546903924c6a6 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 20 Jul 2010 13:19:41 -0400 Subject: sort the response to /_active_tasks --- src/chttpd_misc.erl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index afa097d4..a6cbd520 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -101,8 +101,7 @@ handle_task_status_req(#httpd{method='GET'}=Req) -> Response = lists:flatmap(fun({Node, Tasks}) -> [{[{node,Node} | Task]} || Task <- Tasks] end, Replies), - % TODO filter by customer - send_json(Req, Response); + send_json(Req, lists:sort(Response)); handle_task_status_req(Req) -> send_method_not_allowed(Req, "GET,HEAD"). -- cgit v1.2.3 From 1c5b4f76251999455be14c82c930f323915752bb Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 26 Jul 2010 20:52:49 -0400 Subject: use correct server header --- src/chttpd.erl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index 3ef33b40..e6e9866b 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -611,5 +611,4 @@ negotiate_content_type(#httpd{mochi_req=MochiReq}) -> end. server_header() -> - OTPVersion = erlang:system_info(otp_release), - [{"Server", "CouchDB/0.11.0 (Erlang OTP/" ++ OTPVersion ++ ")"}]. + couch_httpd:server_header(). -- cgit v1.2.3 From 875f8552e49eb2c4d281533035acd548c7ebba92 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Mon, 26 Jul 2010 20:54:26 -0400 Subject: appups for 1.2.1 --- ebin/chttpd.app | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ebin/chttpd.app b/ebin/chttpd.app index 93364fc4..7b072d4a 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -1,6 +1,6 @@ {application, chttpd, [ {description, "HTTP interface for CouchDB cluster"}, - {vsn, "1.0"}, + {vsn, "1.0.1"}, {modules, [ chttpd, chttpd_app, -- cgit v1.2.3 From e353d40c50b2355129f283bff2165da0b3a05d1c Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Tue, 27 Jul 2010 11:52:55 -0400 Subject: add the appups for real --- ebin/chttpd.appup | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 ebin/chttpd.appup diff --git a/ebin/chttpd.appup b/ebin/chttpd.appup new file mode 100644 index 00000000..f0c687f7 --- /dev/null +++ b/ebin/chttpd.appup @@ -0,0 +1,5 @@ +{"1.0.1",[{"1.0",[ + {load_module, chttpd_misc}, + {load_module, chttpd}, + {load_module, cloudant_auth} +]}],[{"1.0",[]}]}. -- cgit v1.2.3 From 3e3d587680b86b79115ebef386296309e562ba1d Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 28 Jul 2010 17:08:15 -0400 Subject: convert all replication requests into remote/remote --- src/chttpd_misc.erl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index a6cbd520..77d40b16 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -105,15 +105,19 @@ handle_task_status_req(#httpd{method='GET'}=Req) -> handle_task_status_req(Req) -> send_method_not_allowed(Req, "GET,HEAD"). -handle_replicate_req(#httpd{method='POST'}=Req) -> +handle_replicate_req(#httpd{method='POST', user_ctx=Ctx} = Req) -> PostBody = get(post_body), - try showroom_rep:replicate(PostBody, Req#httpd.user_ctx) of + try couch_rep:replicate(PostBody, Ctx) of {ok, {continuous, RepId}} -> send_json(Req, 202, {[{ok, true}, {<<"_local_id">>, RepId}]}); + {ok, {cancelled, RepId}} -> + send_json(Req, 200, {[{ok, true}, {<<"_local_id">>, RepId}]}); {ok, {JsonResults}} -> send_json(Req, {[{ok, true} | JsonResults]}); {error, {Type, Details}} -> send_json(Req, 500, {[{error, Type}, {reason, Details}]}); + {error, not_found} -> + send_json(Req, 404, {[{error, not_found}]}); {error, Reason} -> send_json(Req, 500, {[{error, Reason}]}) catch -- cgit v1.2.3 From b4ba7a67ff78eb84c85c6f8e67fa6022745b3e54 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Wed, 28 Jul 2010 17:35:57 -0400 Subject: appups for 1.2.2 --- ebin/chttpd.app | 2 +- ebin/chttpd.appup | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/ebin/chttpd.app b/ebin/chttpd.app index 7b072d4a..5b1b6cfb 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -1,6 +1,6 @@ {application, chttpd, [ {description, "HTTP interface for CouchDB cluster"}, - {vsn, "1.0.1"}, + {vsn, "1.0.2"}, {modules, [ chttpd, chttpd_app, diff --git a/ebin/chttpd.appup b/ebin/chttpd.appup index f0c687f7..9ba401ec 100644 --- a/ebin/chttpd.appup +++ b/ebin/chttpd.appup @@ -1,5 +1,4 @@ -{"1.0.1",[{"1.0",[ +{"1.0.2",[{"1.0.1",[ {load_module, chttpd_misc}, - {load_module, chttpd}, {load_module, cloudant_auth} -]}],[{"1.0",[]}]}. +]}],[{"1.0.1",[]}]}. -- cgit v1.2.3 From c232a16f5938fbd0ff1f37d9933d3f6eb7a98066 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Thu, 29 Jul 2010 20:33:26 -0400 Subject: enable _search handler. Didn't add [external] block to config --- src/chttpd.erl | 3 ++- src/chttpd_external.erl | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/chttpd.erl b/src/chttpd.erl index e6e9866b..0c4d2d70 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -218,7 +218,8 @@ db_url_handlers() -> {<<"_compact">>, fun chttpd_db:handle_compact_req/2}, {<<"_design">>, fun chttpd_db:handle_design_req/2}, {<<"_temp_view">>, fun chttpd_db:handle_temp_view_req/2}, - {<<"_changes">>, fun chttpd_db:handle_changes_req/2} + {<<"_changes">>, fun chttpd_db:handle_changes_req/2}, + {<<"_search">>, fun chttpd_external:handle_search_req/2} ]. design_url_handlers() -> diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl index 3aff3291..ffde0ee7 100644 --- a/src/chttpd_external.erl +++ b/src/chttpd_external.erl @@ -15,11 +15,15 @@ -export([handle_external_req/2, handle_external_req/3]). -export([send_external_response/2, json_req_obj/2, json_req_obj/3]). -export([default_or_content_type/2, parse_external_response/1]). +-export([handle_search_req/2]). -import(chttpd,[send_error/4]). -include_lib("couch/include/couch_db.hrl"). +handle_search_req(Req, Db) -> + process_external_req(Req, Db, <<"search">>). + % handle_external_req/2 % for the old type of config usage: % _external = {chttpd_external, handle_external_req} -- cgit v1.2.3 From 974f97224523bdc09bafa3f1ce18c31b7dc78986 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Fri, 30 Jul 2010 15:16:56 -0400 Subject: appups for 1.2.3 --- ebin/chttpd.app | 2 +- ebin/chttpd.appup | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ebin/chttpd.app b/ebin/chttpd.app index 5b1b6cfb..a6a5f2b5 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -1,6 +1,6 @@ {application, chttpd, [ {description, "HTTP interface for CouchDB cluster"}, - {vsn, "1.0.2"}, + {vsn, "1.0.3"}, {modules, [ chttpd, chttpd_app, diff --git a/ebin/chttpd.appup b/ebin/chttpd.appup index 9ba401ec..e0b5cdfe 100644 --- a/ebin/chttpd.appup +++ b/ebin/chttpd.appup @@ -1,4 +1,4 @@ -{"1.0.2",[{"1.0.1",[ - {load_module, chttpd_misc}, - {load_module, cloudant_auth} -]}],[{"1.0.1",[]}]}. +{"1.0.3",[{"1.0.2",[ + {load_module, chttpd_external}, + {load_module, chttpd} +]}],[{"1.0.2",[]}]}. -- cgit v1.2.3 From e1dda6bc6219d7ac33ea75d56b9c02f5063b4787 Mon Sep 17 00:00:00 2001 From: Adam Kocoloski Date: Thu, 12 Aug 2010 11:31:13 -0400 Subject: remove some cloudant stuff --- ebin/chttpd.app | 3 +-- src/chttpd.erl | 39 +++++---------------------------------- src/chttpd_db.erl | 2 +- src/chttpd_external.erl | 17 ++++------------- src/chttpd_misc.erl | 23 +++-------------------- src/chttpd_view.erl | 4 ---- 6 files changed, 14 insertions(+), 74 deletions(-) diff --git a/ebin/chttpd.app b/ebin/chttpd.app index a6a5f2b5..a32f4786 100644 --- a/ebin/chttpd.app +++ b/ebin/chttpd.app @@ -12,8 +12,7 @@ chttpd_rewrite, chttpd_show, chttpd_sup, - chttpd_view, - cloudant_auth + chttpd_view ]}, {registered, [chttpd_sup, chttpd]}, {applications, [kernel, stdlib, couch, fabric]}, diff --git a/src/chttpd.erl b/src/chttpd.erl index 0c4d2d70..7bad9be2 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -63,9 +63,7 @@ handle_request(MochiReq) -> % for the path, use the raw path with the query string and fragment % removed, but URL quoting left intact RawUri = MochiReq:get(raw_path), - Customer = cloudant_util:customer_name(#httpd{mochi_req=MochiReq}), - {Path, _, _} = mochiweb_util:urlsplit_path(generate_customer_path(RawUri, - Customer)), + {"/" ++ Path, _, _} = mochiweb_util:urlsplit_path(RawUri), {HandlerKey, _, _} = mochiweb_util:partition(Path, "/"), LogForClosedSocket = io_lib:format("mochiweb_recv_error for ~s - ~p ~s", [ @@ -104,7 +102,7 @@ handle_request(MochiReq) -> case authenticate_request(HttpReq, AuthenticationFuns) of #httpd{} = Req -> HandlerFun = url_handler(HandlerKey), - HandlerFun(cloudant_auth:authorize_request(Req)); + HandlerFun(Req); Response -> Response end @@ -140,35 +138,12 @@ handle_request(MochiReq) -> Peer = MochiReq:get(peer), Code = Resp:get(code), Host = MochiReq:get_header_value("Host"), - couch_metrics_event:notify(#response{ - peer = Peer, - host = Host, - customer = Customer, - code = Code, - time = RequestTime, - method = Method1, - uri = RawUri - }), - showroom_log:message(notice, "~s ~s ~s ~s ~B ~B", [Peer, Host, + ?LOG_INFO("~s ~s ~s ~s ~B ~B", [Peer, Host, atom_to_list(Method1), RawUri, Code, round(RequestTime)]), couch_stats_collector:record({couchdb, request_time}, RequestTime), couch_stats_collector:increment({httpd, requests}), {ok, Resp}. -generate_customer_path("/", _Customer) -> - ""; -generate_customer_path("/favicon.ico", _Customer) -> - "favicon.ico"; -generate_customer_path([$/,$_|Rest], _Customer) -> - lists:flatten([$_|Rest]); -generate_customer_path([$/|RawPath], Customer) -> - case Customer of - "" -> - RawPath; - Else -> - lists:flatten([Else, "%2F", RawPath]) - end. - % Try authentication handlers in order until one returns a result authenticate_request(#httpd{user_ctx=#user_ctx{}} = Req, _AuthFuns) -> Req; @@ -206,10 +181,8 @@ url_handler("_sleep") -> fun chttpd_misc:handle_sleep_req/1; url_handler("_session") -> fun chttpd_auth:handle_session_req/1; url_handler("_user") -> fun chttpd_auth:handle_user_req/1; url_handler("_oauth") -> fun chttpd_oauth:handle_oauth_req/1; -url_handler("_metrics") -> fun chttpd_misc:handle_metrics_req/1; url_handler("_restart") -> fun showroom_http:handle_restart_req/1; url_handler("_membership") -> fun mem3_httpd:handle_membership_req/1; -url_handler("_cloudant") -> fun showroom_httpd_admin:handle_cloudant_req/1; url_handler(_) -> fun chttpd_db:handle_request/1. db_url_handlers() -> @@ -265,7 +238,7 @@ qs(#httpd{mochi_req=MochiReq}) -> path(#httpd{mochi_req=MochiReq}) -> MochiReq:get(path). -absolute_uri(#httpd{mochi_req=MochiReq} = Req, Path) -> +absolute_uri(#httpd{mochi_req=MochiReq}, Path) -> XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"), Host = case MochiReq:get_header_value(XHost) of undefined -> @@ -290,9 +263,7 @@ absolute_uri(#httpd{mochi_req=MochiReq} = Req, Path) -> _ -> "http" end end, - CustomerRegex = ["^/", cloudant_util:customer_name(Req), "[/%2F]+"], - NewPath = re:replace(Path, CustomerRegex, "/", [{return,list}]), - Scheme ++ "://" ++ Host ++ NewPath. + Scheme ++ "://" ++ Host ++ Path. unquote(UrlEncodedString) -> mochiweb_util:unquote(UrlEncodedString). diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index bd11683b..e01dffeb 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -179,7 +179,7 @@ db_req(#httpd{method='GET',path_parts=[DbName]}=Req, _Db) -> {ok, DbInfo} = fabric:get_db_info(DbName), DeltaT = timer:now_diff(now(), T0) / 1000, couch_stats_collector:record({couchdb, dbinfo}, DeltaT), - send_json(Req, {cloudant_util:customer_db_info(Req, DbInfo)}); + send_json(Req, {DbInfo}); db_req(#httpd{method='POST', path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> couch_httpd:validate_ctype(Req, "application/json"), diff --git a/src/chttpd_external.erl b/src/chttpd_external.erl index ffde0ee7..51f32e10 100644 --- a/src/chttpd_external.erl +++ b/src/chttpd_external.erl @@ -63,7 +63,7 @@ json_req_obj(#httpd{mochi_req=Req, method=Method, path_parts=Path, req_body=ReqBody - } = HttpReq, Db, DocId) -> + }, Db, DocId) -> Body = case ReqBody of undefined -> Req:recv_body(); Else -> Else @@ -78,27 +78,18 @@ json_req_obj(#httpd{mochi_req=Req, Hlist = mochiweb_headers:to_list(Headers), {ok, Info} = fabric:get_db_info(Db), - % send correct path to customer - BugzID 6849 - CustomerBin = list_to_binary(cloudant_util:customer_name(HttpReq)), - Len = byte_size(CustomerBin), - FixedPath = case Path of - [<> | Rest] -> - [DbName | Rest]; - NoCustomer -> - NoCustomer - end, % add headers... - {[{<<"info">>, {cloudant_util:customer_db_info(HttpReq, Info)}}, + {[{<<"info">>, {Info}}, {<<"id">>, DocId}, {<<"method">>, Method}, - {<<"path">>, FixedPath}, + {<<"path">>, Path}, {<<"query">>, json_query_keys(to_json_terms(Req:parse_qs()))}, {<<"headers">>, to_json_terms(Hlist)}, {<<"body">>, Body}, {<<"peer">>, ?l2b(Req:get(peer))}, {<<"form">>, to_json_terms(ParsedForm)}, {<<"cookie">>, to_json_terms(Req:parse_cookie())}, - {<<"userCtx">>, couch_util:json_user_ctx(Db#db{name=hd(FixedPath)})}]}. + {<<"userCtx">>, couch_util:json_user_ctx(Db)}]}. to_json_terms(Data) -> to_json_terms(Data, []). diff --git a/src/chttpd_misc.erl b/src/chttpd_misc.erl index 77d40b16..9a05f69e 100644 --- a/src/chttpd_misc.erl +++ b/src/chttpd_misc.erl @@ -16,8 +16,7 @@ handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1, handle_uuids_req/1,handle_config_req/1,handle_log_req/1, handle_task_status_req/1,handle_sleep_req/1,handle_welcome_req/1, - handle_utils_dir_req/1, handle_favicon_req/1, handle_metrics_req/1, - handle_system_req/1]). + handle_utils_dir_req/1, handle_favicon_req/1, handle_system_req/1]). -include_lib("couch/include/couch_db.hrl"). @@ -36,7 +35,7 @@ handle_welcome_req(#httpd{method='GET'}=Req, WelcomeMessage) -> send_json(Req, {[ {couchdb, WelcomeMessage}, {version, list_to_binary(couch:version())}, - {cloudant_build, get_version()} + {dbcore, get_version()} ]}); handle_welcome_req(Req, _) -> send_method_not_allowed(Req, "GET,HEAD"). @@ -89,8 +88,7 @@ handle_sleep_req(Req) -> send_method_not_allowed(Req, "GET,HEAD"). handle_all_dbs_req(#httpd{method='GET'}=Req) -> - Customer = cloudant_util:customer_name(Req), - {ok, DbNames} = fabric:all_dbs(Customer), + {ok, DbNames} = fabric:all_dbs(), send_json(Req, DbNames); handle_all_dbs_req(Req) -> send_method_not_allowed(Req, "GET,HEAD"). @@ -209,21 +207,6 @@ handle_log_req(#httpd{method='GET'}=Req) -> handle_log_req(Req) -> send_method_not_allowed(Req, "GET"). -handle_metrics_req(#httpd{method='GET', path_parts=[_]}=Req) -> - {_, L} = lists:unzip(gen_event:which_handlers(couch_metrics_event_manager)), - send_json(Req, L); -handle_metrics_req(#httpd{method='GET', path_parts=[_, Id]}=Req) -> - case chttpd:qs_value(Req, "slice") of - undefined -> - Data = couch_metrics_event:get_global_metrics(?b2a(Id)); - SliceStr -> - Slice = list_to_integer(SliceStr), - Data = couch_metrics_event:get_global_metrics(?b2a(Id), Slice) - end, - send_json(Req, Data); -handle_metrics_req(Req) -> - send_method_not_allowed(Req, "GET,HEAD"). - % Note: this resource is exposed on the backdoor interface, but it's in chttpd % because it's not couch trunk handle_system_req(Req) -> diff --git a/src/chttpd_view.erl b/src/chttpd_view.erl index e09487c8..87a03307 100644 --- a/src/chttpd_view.erl +++ b/src/chttpd_view.erl @@ -258,10 +258,6 @@ validate_view_query(include_docs, true, Args) -> end; validate_view_query(include_docs, _Value, Args) -> Args; -validate_view_query(show_total_rows, false, Args) -> - Args#view_query_args{show_total_rows=false}; -validate_view_query(show_total_rows, _Value, Args) -> - Args; validate_view_query(sorted, false, Args) -> Args#view_query_args{sorted=false}; validate_view_query(sorted, _Value, Args) -> -- cgit v1.2.3