summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/couchdb/couch_db.erl11
-rw-r--r--src/couchdb/couch_doc.erl37
-rw-r--r--src/couchdb/couch_httpd.erl39
-rw-r--r--src/couchdb/couch_httpd_db.erl40
-rw-r--r--src/couchdb/couch_httpd_rewrite.erl192
-rw-r--r--src/couchdb/couch_js_functions.hrl36
-rw-r--r--src/couchdb/couch_key_tree.erl49
-rw-r--r--src/couchdb/couch_rep.erl52
-rw-r--r--src/couchdb/couch_rep_writer.erl4
-rw-r--r--src/couchdb/couch_replication_manager.erl6
-rw-r--r--src/erlang-oauth/oauth_uri.erl66
11 files changed, 340 insertions, 192 deletions
diff --git a/src/couchdb/couch_db.erl b/src/couchdb/couch_db.erl
index 1e7addaf..85f7e291 100644
--- a/src/couchdb/couch_db.erl
+++ b/src/couchdb/couch_db.erl
@@ -733,6 +733,8 @@ update_docs(Db, Docs, Options, interactive_edit) ->
% for the doc.
make_first_doc_on_disk(_Db, _Id, _Pos, []) ->
nil;
+make_first_doc_on_disk(Db, Id, Pos, [{_Rev, #doc{}} | RestPath]) ->
+ make_first_doc_on_disk(Db, Id, Pos-1, RestPath);
make_first_doc_on_disk(Db, Id, Pos, [{_Rev, ?REV_MISSING}|RestPath]) ->
make_first_doc_on_disk(Db, Id, Pos - 1, RestPath);
make_first_doc_on_disk(Db, Id, Pos, [{_Rev, {IsDel, Sp, _Seq}} |_]=DocPath) ->
@@ -810,7 +812,7 @@ doc_flush_atts(Doc, Fd) ->
Doc#doc{atts=[flush_att(Fd, Att) || Att <- Doc#doc.atts]}.
check_md5(_NewSig, <<>>) -> ok;
-check_md5(Sig1, Sig2) when Sig1 == Sig2 -> ok;
+check_md5(Sig, Sig) -> ok;
check_md5(_, _) -> throw(md5_mismatch).
flush_att(Fd, #att{data={Fd0, _}}=Att) when Fd0 == Fd ->
@@ -921,10 +923,15 @@ with_stream(Fd, #att{md5=InMd5,type=Type,encoding=Enc}=Att, Fun) ->
write_streamed_attachment(_Stream, _F, 0) ->
ok;
write_streamed_attachment(Stream, F, LenLeft) when LenLeft > 0 ->
- Bin = F(),
+ Bin = read_next_chunk(F, LenLeft),
ok = couch_stream:write(Stream, Bin),
write_streamed_attachment(Stream, F, LenLeft - size(Bin)).
+read_next_chunk(F, _) when is_function(F, 0) ->
+ F();
+read_next_chunk(F, LenLeft) when is_function(F, 1) ->
+ F(lists:min([LenLeft, 16#2000])).
+
enum_docs_since_reduce_to_count(Reds) ->
couch_btree:final_reduce(
fun couch_db_updater:btree_by_seq_reduce/2, Reds).
diff --git a/src/couchdb/couch_doc.erl b/src/couchdb/couch_doc.erl
index e3d66145..5cd6ac80 100644
--- a/src/couchdb/couch_doc.erl
+++ b/src/couchdb/couch_doc.erl
@@ -87,8 +87,14 @@ to_json_attachments(Atts, OutputData, DataToFollow, ShowEncInfo) ->
fun(#att{disk_len=DiskLen, att_len=AttLen, encoding=Enc}=Att) ->
{Att#att.name, {[
{<<"content_type">>, Att#att.type},
- {<<"revpos">>, Att#att.revpos}
- ] ++
+ {<<"revpos">>, Att#att.revpos}] ++
+ case Att#att.md5 of
+ <<>> ->
+ [];
+ Md5 ->
+ EncodedMd5 = base64:encode(Md5),
+ [{<<"digest">>, <<"md5-",EncodedMd5/binary>>}]
+ end ++
if not OutputData orelse Att#att.data == stub ->
[{<<"length">>, DiskLen}, {<<"stub">>, true}];
true ->
@@ -199,6 +205,12 @@ transfer_fields([{<<"_rev">>, _Rev} | Rest], Doc) ->
transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc) ->
Atts = lists:map(fun({Name, {BinProps}}) ->
+ Md5 = case couch_util:get_value(<<"digest">>, BinProps) of
+ <<"md5-",EncodedMd5/binary>> ->
+ base64:decode(EncodedMd5);
+ _ ->
+ <<>>
+ end,
case couch_util:get_value(<<"stub">>, BinProps) of
true ->
Type = couch_util:get_value(<<"content_type">>, BinProps),
@@ -206,7 +218,7 @@ transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc) ->
DiskLen = couch_util:get_value(<<"length">>, BinProps),
{Enc, EncLen} = att_encoding_info(BinProps),
#att{name=Name, data=stub, type=Type, att_len=EncLen,
- disk_len=DiskLen, encoding=Enc, revpos=RevPos};
+ disk_len=DiskLen, encoding=Enc, revpos=RevPos, md5=Md5};
_ ->
Type = couch_util:get_value(<<"content_type">>, BinProps,
?DEFAULT_ATTACHMENT_CONTENT_TYPE),
@@ -216,7 +228,7 @@ transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc) ->
DiskLen = couch_util:get_value(<<"length">>, BinProps),
{Enc, EncLen} = att_encoding_info(BinProps),
#att{name=Name, data=follows, type=Type, encoding=Enc,
- att_len=EncLen, disk_len=DiskLen, revpos=RevPos};
+ att_len=EncLen, disk_len=DiskLen, revpos=RevPos, md5=Md5};
_ ->
Value = couch_util:get_value(<<"data">>, BinProps),
Bin = base64:decode(Value),
@@ -461,16 +473,17 @@ atts_to_mp([Att | RestAtts], Boundary, WriteFun,
doc_from_multi_part_stream(ContentType, DataFun) ->
- Self = self(),
+ Parent = self(),
Parser = spawn_link(fun() ->
- couch_httpd:parse_multipart_request(ContentType, DataFun,
- fun(Next)-> mp_parse_doc(Next, []) end),
- unlink(Self)
+ {<<"--">>, _, _} = couch_httpd:parse_multipart_request(
+ ContentType, DataFun,
+ fun(Next) -> mp_parse_doc(Next, []) end),
+ unlink(Parent),
+ Parent ! {self(), finished}
end),
Parser ! {get_doc_bytes, self()},
receive
{doc_bytes, DocBytes} ->
- erlang:put(mochiweb_request_recv, true),
Doc = from_json_obj(?JSON_DECODE(DocBytes)),
% go through the attachments looking for 'follows' in the data,
% replace with function that reads the data from MIME stream.
@@ -484,7 +497,11 @@ doc_from_multi_part_stream(ContentType, DataFun) ->
(A) ->
A
end, Doc#doc.atts),
- {ok, Doc#doc{atts=Atts2}}
+ WaitFun = fun() ->
+ receive {Parser, finished} -> ok end,
+ erlang:put(mochiweb_request_recv, true)
+ end,
+ {ok, Doc#doc{atts=Atts2}, WaitFun}
end.
mp_parse_doc({headers, H}, []) ->
diff --git a/src/couchdb/couch_httpd.erl b/src/couchdb/couch_httpd.erl
index 73d214e8..7c7781f6 100644
--- a/src/couchdb/couch_httpd.erl
+++ b/src/couchdb/couch_httpd.erl
@@ -764,24 +764,29 @@ error_headers(#httpd{mochi_req=MochiReq}=Req, Code, ErrorStr, ReasonStr) ->
% send the browser popup header no matter what if we are require_valid_user
{Code, [{"WWW-Authenticate", "Basic realm=\"server\""}]};
_False ->
- case MochiReq:accepts_content_type("text/html") of
- false ->
- {Code, []};
+ case MochiReq:accepts_content_type("application/json") of
true ->
- % Redirect to the path the user requested, not
- % the one that is used internally.
- UrlReturnRaw = case MochiReq:get_header_value("x-couchdb-vhost-path") of
- undefined ->
- MochiReq:get(path);
- VHostPath ->
- VHostPath
- end,
- RedirectLocation = lists:flatten([
- AuthRedirect,
- "?return=", couch_util:url_encode(UrlReturnRaw),
- "&reason=", couch_util:url_encode(ReasonStr)
- ]),
- {302, [{"Location", absolute_uri(Req, RedirectLocation)}]}
+ {Code, []};
+ false ->
+ case MochiReq:accepts_content_type("text/html") of
+ true ->
+ % Redirect to the path the user requested, not
+ % the one that is used internally.
+ UrlReturnRaw = case MochiReq:get_header_value("x-couchdb-vhost-path") of
+ undefined ->
+ MochiReq:get(path);
+ VHostPath ->
+ VHostPath
+ end,
+ RedirectLocation = lists:flatten([
+ AuthRedirect,
+ "?return=", couch_util:url_encode(UrlReturnRaw),
+ "&reason=", couch_util:url_encode(ReasonStr)
+ ]),
+ {302, [{"Location", absolute_uri(Req, RedirectLocation)}]};
+ false ->
+ {Code, []}
+ end
end
end
end;
diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl
index 0dbebb6e..e3638b25 100644
--- a/src/couchdb/couch_httpd_db.erl
+++ b/src/couchdb/couch_httpd_db.erl
@@ -114,11 +114,16 @@ handle_changes_req1(Req, Db) ->
FeedChangesFun(MakeCallback(Resp))
end
end,
- couch_stats_collector:track_process_count(
+ couch_stats_collector:increment(
{httpd, clients_requesting_changes}
),
- WrapperFun(ChangesFun).
-
+ try
+ WrapperFun(ChangesFun)
+ after
+ couch_stats_collector:decrement(
+ {httpd, clients_requesting_changes}
+ )
+ end.
handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, Db) ->
ok = couch_db:check_is_admin(Db),
@@ -687,10 +692,12 @@ db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
RespHeaders = [{"Location", Loc}],
case couch_util:to_list(couch_httpd:header_value(Req, "Content-Type")) of
("multipart/related;" ++ _) = ContentType ->
- {ok, Doc0} = couch_doc:doc_from_multi_part_stream(ContentType,
- fun() -> receive_request_data(Req) end),
+ {ok, Doc0, WaitFun} = couch_doc:doc_from_multi_part_stream(
+ ContentType, fun() -> receive_request_data(Req) end),
Doc = couch_doc_from_req(Req, DocId, Doc0),
- update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType);
+ Result = update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType),
+ WaitFun(),
+ Result;
_Else ->
case couch_httpd:qs_value(Req, "batch") of
"ok" ->
@@ -762,7 +769,7 @@ send_doc_efficiently(#httpd{mochi_req = MochiReq} = Req,
true ->
Boundary = couch_uuids:random(),
JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc,
- [attachments, follows|Options])),
+ [attachments, follows, att_encoding_info | Options])),
{ContentType, Len} = couch_doc:len_doc_to_multi_part_stream(
Boundary,JsonBytes, Atts, true),
CType = {<<"Content-Type">>, ContentType},
@@ -825,7 +832,14 @@ send_ranges_multipart(Req, ContentType, Len, Att, Ranges) ->
{ok, Resp}.
receive_request_data(Req) ->
- {couch_httpd:recv(Req, 0), fun() -> receive_request_data(Req) end}.
+ receive_request_data(Req, couch_httpd:body_length(Req)).
+
+receive_request_data(Req, LenLeft) when LenLeft > 0 ->
+ Len = erlang:min(4096, LenLeft),
+ Data = couch_httpd:recv(Req, Len),
+ {Data, fun() -> receive_request_data(Req, LenLeft - iolist_size(Data)) end};
+receive_request_data(_Req, _) ->
+ throw(<<"expected more data">>).
make_content_range(From, To, Len) ->
?l2b(io_lib:format("bytes ~B-~B/~B", [From, To, Len])).
@@ -995,7 +1009,13 @@ db_attachment_req(#httpd{method='GET',mochi_req=MochiReq}=Req, Db, DocId, FileNa
{identity, Ranges} when is_list(Ranges) ->
send_ranges_multipart(Req, Type, Len, Att, Ranges);
_ ->
- {ok, Resp} = start_response_length(Req, 200, Headers, Len),
+ Headers1 = Headers ++
+ if Enc =:= identity orelse ReqAcceptsAttEnc =:= true ->
+ [{"Content-MD5", base64:encode(Att#att.md5)}];
+ true ->
+ []
+ end,
+ {ok, Resp} = start_response_length(Req, 200, Headers1, Len),
AttFun(Att, fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp})
end
end
@@ -1052,7 +1072,7 @@ db_attachment_req(#httpd{method=Method,mochi_req=MochiReq}=Req, Db, DocId, FileN
end,
- fun() -> couch_httpd:recv(Req, 0) end
+ fun(Size) -> couch_httpd:recv(Req, Size) end
end,
att_len = case couch_httpd:header_value(Req,"Content-Length") of
undefined ->
diff --git a/src/couchdb/couch_httpd_rewrite.erl b/src/couchdb/couch_httpd_rewrite.erl
index a8297ae1..e24cb5db 100644
--- a/src/couchdb/couch_httpd_rewrite.erl
+++ b/src/couchdb/couch_httpd_rewrite.erl
@@ -117,8 +117,7 @@ handle_rewrite_req(#httpd{
% we are in a design handler
DesignId = <<"_design/", DesignName/binary>>,
Prefix = <<"/", DbName/binary, "/", DesignId/binary>>,
- QueryList = couch_httpd:qs(Req),
- QueryList1 = [{to_binding(K), V} || {K, V} <- QueryList],
+ QueryList = lists:map(fun decode_query_value/1, couch_httpd:qs(Req)),
#doc{body={Props}} = DDoc,
@@ -130,10 +129,11 @@ handle_rewrite_req(#httpd{
Rules ->
% create dispatch list from rules
DispatchList = [make_rule(Rule) || {Rule} <- Rules],
+ Method1 = couch_util:to_binary(Method),
%% get raw path by matching url to a rule.
- RawPath = case try_bind_path(DispatchList, couch_util:to_binary(Method), PathParts,
- QueryList1) of
+ RawPath = case try_bind_path(DispatchList, Method1,
+ PathParts, QueryList) of
no_dispatch_path ->
throw(not_found);
{NewPathParts, Bindings} ->
@@ -141,12 +141,13 @@ handle_rewrite_req(#httpd{
% build new path, reencode query args, eventually convert
% them to json
- Path = lists:append(
- string:join(Parts, [?SEPARATOR]),
- case Bindings of
- [] -> [];
- _ -> [$?, encode_query(Bindings)]
- end),
+ Bindings1 = maybe_encode_bindings(Bindings),
+ Path = binary_to_list(
+ iolist_to_binary([
+ string:join(Parts, [?SEPARATOR]),
+ [["?", mochiweb_util:urlencode(Bindings1)]
+ || Bindings1 =/= [] ]
+ ])),
% if path is relative detect it and rewrite path
case mochiweb_util:safe_relative_path(Path) of
@@ -193,7 +194,7 @@ quote_plus(X) ->
try_bind_path([], _Method, _PathParts, _QueryList) ->
no_dispatch_path;
try_bind_path([Dispatch|Rest], Method, PathParts, QueryList) ->
- [{PathParts1, Method1}, RedirectPath, QueryArgs] = Dispatch,
+ [{PathParts1, Method1}, RedirectPath, QueryArgs, Formats] = Dispatch,
case bind_method(Method1, Method) of
true ->
case bind_path(PathParts1, PathParts, []) of
@@ -201,7 +202,8 @@ try_bind_path([Dispatch|Rest], Method, PathParts, QueryList) ->
Bindings1 = Bindings ++ QueryList,
% we parse query args from the rule and fill
% it eventually with bindings vars
- QueryArgs1 = make_query_list(QueryArgs, Bindings1, []),
+ QueryArgs1 = make_query_list(QueryArgs, Bindings1,
+ Formats, []),
% remove params in QueryLists1 that are already in
% QueryArgs1
Bindings2 = lists:foldl(fun({K, V}, Acc) ->
@@ -227,61 +229,79 @@ try_bind_path([Dispatch|Rest], Method, PathParts, QueryList) ->
%% rewriting dynamically the quey list given as query member in
%% rewrites. Each value is replaced by one binding or an argument
%% passed in url.
-make_query_list([], _Bindings, Acc) ->
+make_query_list([], _Bindings, _Formats, Acc) ->
Acc;
-make_query_list([{Key, {Value}}|Rest], Bindings, Acc) ->
- Value1 = to_json({Value}),
- make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
-make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_binary(Value) ->
- Value1 = replace_var(Key, Value, Bindings),
- make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
-make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_list(Value) ->
- Value1 = replace_var(Key, Value, Bindings),
- make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
-make_query_list([{Key, Value}|Rest], Bindings, Acc) ->
- make_query_list(Rest, Bindings, [{to_binding(Key), Value}|Acc]).
-
-replace_var(Key, Value, Bindings) ->
- case Value of
- <<":", Var/binary>> ->
- get_var(Var, Bindings, Value);
- <<"*">> ->
- get_var(Value, Bindings, Value);
- _ when is_list(Value) ->
- Value1 = lists:foldr(fun(V, Acc) ->
- V1 = case V of
- <<":", VName/binary>> ->
- case get_var(VName, Bindings, V) of
- V2 when is_list(V2) ->
- iolist_to_binary(V2);
- V2 -> V2
- end;
- <<"*">> ->
- get_var(V, Bindings, V);
- _ ->
- V
- end,
- [V1|Acc]
- end, [], Value),
- to_json(Value1);
- _ when is_binary(Value) ->
- Value;
- _ ->
- case Key of
- <<"key">> -> to_json(Value);
- <<"startkey">> -> to_json(Value);
- <<"start_key">> -> to_json(Value);
- <<"endkey">> -> to_json(Value);
- <<"end_key">> -> to_json(Value);
- _ ->
- lists:flatten(?JSON_ENCODE(Value))
- end
+make_query_list([{Key, {Value}}|Rest], Bindings, Formats, Acc) ->
+ Value1 = {Value},
+ make_query_list(Rest, Bindings, Formats, [{to_binding(Key), Value1}|Acc]);
+make_query_list([{Key, Value}|Rest], Bindings, Formats, Acc) when is_binary(Value) ->
+ Value1 = replace_var(Value, Bindings, Formats),
+ make_query_list(Rest, Bindings, Formats, [{to_binding(Key), Value1}|Acc]);
+make_query_list([{Key, Value}|Rest], Bindings, Formats, Acc) when is_list(Value) ->
+ Value1 = replace_var(Value, Bindings, Formats),
+ make_query_list(Rest, Bindings, Formats, [{to_binding(Key), Value1}|Acc]);
+make_query_list([{Key, Value}|Rest], Bindings, Formats, Acc) ->
+ make_query_list(Rest, Bindings, Formats, [{to_binding(Key), Value}|Acc]).
+
+replace_var(<<"*">>=Value, Bindings, Formats) ->
+ get_var(Value, Bindings, Value, Formats);
+replace_var(<<":", Var/binary>> = Value, Bindings, Formats) ->
+ get_var(Var, Bindings, Value, Formats);
+replace_var(Value, _Bindings, _Formats) when is_binary(Value) ->
+ Value;
+replace_var(Value, Bindings, Formats) when is_list(Value) ->
+ lists:reverse(lists:foldl(fun
+ (<<":", Var/binary>>=Value1, Acc) ->
+ [get_var(Var, Bindings, Value1, Formats)|Acc];
+ (Value1, Acc) ->
+ [Value1|Acc]
+ end, [], Value));
+replace_var(Value, _Bindings, _Formats) ->
+ Value.
+
+maybe_json(Key, Value) ->
+ case lists:member(Key, [<<"key">>, <<"startkey">>, <<"start_key">>,
+ <<"endkey">>, <<"end_key">>, <<"keys">>]) of
+ true ->
+ ?JSON_ENCODE(Value);
+ false ->
+ Value
end.
-
-get_var(VarName, Props, Default) ->
+get_var(VarName, Props, Default, Formats) ->
VarName1 = to_binding(VarName),
- couch_util:get_value(VarName1, Props, Default).
+ Val = couch_util:get_value(VarName1, Props, Default),
+ maybe_format(VarName, Val, Formats).
+
+maybe_format(VarName, Value, Formats) ->
+ case couch_util:get_value(VarName, Formats) of
+ undefined ->
+ Value;
+ Format ->
+ format(Format, Value)
+ end.
+
+format(<<"int">>, Value) when is_integer(Value) ->
+ Value;
+format(<<"int">>, Value) when is_binary(Value) ->
+ format(<<"int">>, ?b2l(Value));
+format(<<"int">>, Value) when is_list(Value) ->
+ case (catch list_to_integer(Value)) of
+ IntVal when is_integer(IntVal) ->
+ IntVal;
+ _ ->
+ Value
+ end;
+format(<<"bool">>, Value) when is_binary(Value) ->
+ format(<<"bool">>, ?b2l(Value));
+format(<<"bool">>, Value) when is_list(Value) ->
+ case string:to_lower(Value) of
+ "true" -> true;
+ "false" -> false;
+ _ -> Value
+ end;
+format(_Format, Value) ->
+ Value.
%% doc: build new patch from bindings. bindings are query args
%% (+ dynamic query rewritten if needed) and bindings found in
@@ -297,7 +317,8 @@ make_new_path([?MATCH_ALL|_Rest], _Bindings, Remaining, Acc) ->
make_new_path([{bind, P}|Rest], Bindings, Remaining, Acc) ->
P2 = case couch_util:get_value({bind, P}, Bindings) of
undefined -> << "undefined">>;
- P1 -> P1
+ P1 ->
+ iolist_to_binary(P1)
end,
make_new_path(Rest, Bindings, Remaining, [P2|Acc]);
make_new_path([P|Rest], Bindings, Remaining, Acc) ->
@@ -374,7 +395,11 @@ make_rule(Rule) ->
To ->
parse_path(To)
end,
- [{FromParts, Method}, ToParts, QueryArgs].
+ Formats = case couch_util:get_value(<<"formats">>, Rule) of
+ undefined -> [];
+ {Fmts} -> Fmts
+ end,
+ [{FromParts, Method}, ToParts, QueryArgs, Formats].
parse_path(Path) ->
{ok, SlashRE} = re:compile(<<"\\/">>),
@@ -407,21 +432,25 @@ path_to_list([P|R], Acc, DotDotCount) ->
end,
path_to_list(R, [P1|Acc], DotDotCount).
-encode_query(Props) ->
- Props1 = lists:foldl(fun ({{bind, K}, V}, Acc) ->
- case K of
- <<"*">> -> Acc;
- _ ->
- V1 = case is_list(V) orelse is_binary(V) of
- true -> V;
- false ->
- % probably it's a number
- quote_plus(V)
- end,
- [{K, V1} | Acc]
- end
- end, [], Props),
- lists:flatten(mochiweb_util:urlencode(Props1)).
+maybe_encode_bindings([]) ->
+ [];
+maybe_encode_bindings(Props) ->
+ lists:foldl(fun
+ ({{bind, <<"*">>}, _V}, Acc) ->
+ Acc;
+ ({{bind, K}, V}, Acc) ->
+ V1 = iolist_to_binary(maybe_json(K, V)),
+ [{K, V1}|Acc]
+ end, [], Props).
+
+decode_query_value({K,V}) ->
+ case lists:member(K, ["key", "startkey", "start_key",
+ "endkey", "end_key", "keys"]) of
+ true ->
+ {to_binding(K), ?JSON_DECODE(V)};
+ false ->
+ {to_binding(K), ?l2b(V)}
+ end.
to_binding({bind, V}) ->
{bind, V};
@@ -429,6 +458,3 @@ to_binding(V) when is_list(V) ->
to_binding(?l2b(V));
to_binding(V) ->
{bind, V}.
-
-to_json(V) ->
- iolist_to_binary(?JSON_ENCODE(V)).
diff --git a/src/couchdb/couch_js_functions.hrl b/src/couchdb/couch_js_functions.hrl
index 0cc49d62..d07eead5 100644
--- a/src/couchdb/couch_js_functions.hrl
+++ b/src/couchdb/couch_js_functions.hrl
@@ -140,8 +140,10 @@
var isReplicator = (userCtx.roles.indexOf('_replicator') >= 0);
var isAdmin = (userCtx.roles.indexOf('_admin') >= 0);
- if (oldDoc && !newDoc._deleted && !isReplicator) {
- reportError('Only the replicator can edit replication documents.');
+ if (oldDoc && !newDoc._deleted && !isReplicator &&
+ (oldDoc._replication_state === 'triggered')) {
+ reportError('Only the replicator can edit replication documents ' +
+ 'that are in the triggered state.');
}
if (!newDoc._deleted) {
@@ -180,12 +182,6 @@
}
if (newDoc.user_ctx) {
- if (!isAdmin) {
- reportError('Delegated replications (use of the ' +
- '`user_ctx\\' property) can only be triggered by ' +
- 'administrators.');
- }
-
var user_ctx = newDoc.user_ctx;
if ((typeof user_ctx !== 'object') || (user_ctx === null)) {
@@ -202,24 +198,40 @@
'non-empty string or null.');
}
+ if (!isAdmin && (user_ctx.name !== userCtx.name)) {
+ reportError('The given `user_ctx.name\\' is not valid');
+ }
+
if (user_ctx.roles && !isArray(user_ctx.roles)) {
reportError('The `user_ctx.roles\\' property must be ' +
'an array of strings.');
}
- if (user_ctx.roles) {
+ if (!isAdmin && user_ctx.roles) {
for (var i = 0; i < user_ctx.roles.length; i++) {
var role = user_ctx.roles[i];
if (typeof role !== 'string' || role.length === 0) {
reportError('Roles must be non-empty strings.');
}
- if (role[0] === '_') {
- reportError('System roles (starting with an ' +
- 'underscore) are not allowed.');
+ if (userCtx.roles.indexOf(role) === -1) {
+ reportError('Invalid role (`' + role +
+ '\\') in the `user_ctx\\'');
}
}
}
+ } else {
+ if (!isAdmin) {
+ reportError('The `user_ctx\\' property is missing (it is ' +
+ 'optional for admins only).');
+ }
+ }
+ } else {
+ if (!isAdmin) {
+ if (!oldDoc.user_ctx || (oldDoc.user_ctx.name !== userCtx.name)) {
+ reportError('Replication documents can only be deleted by ' +
+ 'admins or by the users who created them.');
+ }
}
}
}
diff --git a/src/couchdb/couch_key_tree.erl b/src/couchdb/couch_key_tree.erl
index bc723cc2..e5d3549f 100644
--- a/src/couchdb/couch_key_tree.erl
+++ b/src/couchdb/couch_key_tree.erl
@@ -16,6 +16,8 @@
-export([map/2, get_all_leafs/1, count_leafs/1, remove_leafs/2,
get_all_leafs_full/1,stem/2,map_leafs/2]).
+-include("couch_db.hrl").
+
% Tree::term() is really a tree(), but we don't want to require R13B04 yet
-type branch() :: {Key::term(), Value::term(), Tree::term()}.
-type path() :: {Start::pos_integer(), branch()}.
@@ -82,9 +84,9 @@ merge_at(OurTree, Place, [{Key, Value, SubTree}]) when Place < 0 ->
no ->
no
end;
-merge_at([{Key, Value, SubTree}|Sibs], 0, [{Key, _Value, InsertSubTree}]) ->
+merge_at([{Key, V1, SubTree}|Sibs], 0, [{Key, V2, InsertSubTree}]) ->
{Merged, Conflicts} = merge_simple(SubTree, InsertSubTree),
- {ok, [{Key, Value, Merged} | Sibs], Conflicts};
+ {ok, [{Key, value_pref(V1, V2), Merged} | Sibs], Conflicts};
merge_at([{OurKey, _, _} | _], 0, [{Key, _, _}]) when OurKey > Key ->
% siblings keys are ordered, no point in continuing
no;
@@ -103,9 +105,10 @@ merge_simple([], B) ->
{B, false};
merge_simple(A, []) ->
{A, false};
-merge_simple([{Key, Value, SubA} | NextA], [{Key, _, SubB} | NextB]) ->
+merge_simple([{Key, V1, SubA} | NextA], [{Key, V2, SubB} | NextB]) ->
{MergedSubTree, Conflict1} = merge_simple(SubA, SubB),
{MergedNextTree, Conflict2} = merge_simple(NextA, NextB),
+ Value = value_pref(V1, V2),
{[{Key, Value, MergedSubTree} | MergedNextTree], Conflict1 or Conflict2};
merge_simple([{A, _, _} = Tree | Next], [{B, _, _} | _] = Insert) when A < B ->
{Merged, _} = merge_simple(Next, Insert),
@@ -157,14 +160,18 @@ remove_leafs(Trees, Keys) ->
% filter out any that are in the keys list.
{FilteredPaths, RemovedKeys} = filter_leafs(Paths, Keys, [], []),
+ SortedPaths = lists:sort(
+ [{Pos + 1 - length(Path), Path} || {Pos, Path} <- FilteredPaths]
+ ),
+
% convert paths back to trees
NewTree = lists:foldl(
- fun({PathPos, Path},TreeAcc) ->
+ fun({StartPos, Path},TreeAcc) ->
[SingleTree] = lists:foldl(
fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
- {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
+ {NewTrees, _} = merge(TreeAcc, {StartPos, SingleTree}),
NewTrees
- end, [], FilteredPaths),
+ end, [], SortedPaths),
{NewTree, RemovedKeys}.
@@ -314,19 +321,35 @@ map_leafs_simple(Fun, Pos, [{Key, Value, SubTree} | RestTree]) ->
stem(Trees, Limit) ->
- % flatten each branch in a tree into a tree path
- Paths = get_all_leafs_full(Trees),
-
- Paths2 = [{Pos, lists:sublist(Path, Limit)} || {Pos, Path} <- Paths],
+ % flatten each branch in a tree into a tree path, sort by starting rev #
+ Paths = lists:sort(lists:map(fun({Pos, Path}) ->
+ StemmedPath = lists:sublist(Path, Limit),
+ {Pos + 1 - length(StemmedPath), StemmedPath}
+ end, get_all_leafs_full(Trees))),
% convert paths back to trees
lists:foldl(
- fun({PathPos, Path},TreeAcc) ->
+ fun({StartPos, Path},TreeAcc) ->
[SingleTree] = lists:foldl(
fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
- {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
+ {NewTrees, _} = merge(TreeAcc, {StartPos, SingleTree}),
NewTrees
- end, [], Paths2).
+ end, [], Paths).
+
+
+value_pref(Tuple, _) when is_tuple(Tuple),
+ (tuple_size(Tuple) == 3 orelse tuple_size(Tuple) == 4) ->
+ Tuple;
+value_pref(_, Tuple) when is_tuple(Tuple),
+ (tuple_size(Tuple) == 3 orelse tuple_size(Tuple) == 4) ->
+ Tuple;
+value_pref(?REV_MISSING, Other) ->
+ Other;
+value_pref(Other, ?REV_MISSING) ->
+ Other;
+value_pref(Last, _) ->
+ Last.
+
% Tests moved to test/etap/06?-*.t
diff --git a/src/couchdb/couch_rep.erl b/src/couchdb/couch_rep.erl
index 5c9fbce6..fd323f7f 100644
--- a/src/couchdb/couch_rep.erl
+++ b/src/couchdb/couch_rep.erl
@@ -883,26 +883,50 @@ update_rep_doc({Props} = _RepDoc, KVs) ->
update_rep_doc(RepDb, #doc{body = {RepDocBody}} = RepDoc, KVs) ->
NewRepDocBody = lists:foldl(
- fun({<<"_replication_state">> = K, _V} = KV, Body) ->
- Body1 = lists:keystore(K, 1, Body, KV),
- {Mega, Secs, _} = erlang:now(),
- UnixTime = Mega * 1000000 + Secs,
- lists:keystore(
- <<"_replication_state_time">>, 1,
- Body1, {<<"_replication_state_time">>, UnixTime});
+ fun({<<"_replication_state">> = K, State} = KV, Body) ->
+ case couch_util:get_value(K, Body) of
+ State ->
+ Body;
+ _ ->
+ Body1 = lists:keystore(K, 1, Body, KV),
+ lists:keystore(
+ <<"_replication_state_time">>, 1,
+ Body1, {<<"_replication_state_time">>, timestamp()})
+ end;
({K, _V} = KV, Body) ->
lists:keystore(K, 1, Body, KV)
end,
RepDocBody,
KVs
),
- % might not succeed - when the replication doc is deleted right
- % before this update (not an error)
- couch_db:update_doc(
- RepDb,
- RepDoc#doc{body = {NewRepDocBody}},
- []
- ).
+ case NewRepDocBody of
+ RepDocBody ->
+ ok;
+ _ ->
+ % might not succeed - when the replication doc is deleted right
+ % before this update (not an error)
+ couch_db:update_doc(RepDb, RepDoc#doc{body = {NewRepDocBody}}, [])
+ end.
+
+% RFC3339 timestamps.
+% Note: doesn't include the time seconds fraction (RFC3339 says it's optional).
+timestamp() ->
+ {{Year, Month, Day}, {Hour, Min, Sec}} = calendar:now_to_local_time(now()),
+ UTime = erlang:universaltime(),
+ LocalTime = calendar:universal_time_to_local_time(UTime),
+ DiffSecs = calendar:datetime_to_gregorian_seconds(LocalTime) -
+ calendar:datetime_to_gregorian_seconds(UTime),
+ zone(DiffSecs div 3600, (DiffSecs rem 3600) div 60),
+ iolist_to_binary(
+ io_lib:format("~4..0w-~2..0w-~2..0wT~2..0w:~2..0w:~2..0w~s",
+ [Year, Month, Day, Hour, Min, Sec,
+ zone(DiffSecs div 3600, (DiffSecs rem 3600) div 60)])).
+
+zone(Hr, Min) when Hr >= 0, Min >= 0 ->
+ io_lib:format("+~2..0w:~2..0w", [Hr, Min]);
+zone(Hr, Min) ->
+ io_lib:format("-~2..0w:~2..0w", [abs(Hr), abs(Min)]).
+
maybe_set_triggered({RepProps} = RepDoc, RepId) ->
case couch_util:get_value(<<"_replication_state">>, RepProps) of
diff --git a/src/couchdb/couch_rep_writer.erl b/src/couchdb/couch_rep_writer.erl
index 12d6dec5..2b722e8e 100644
--- a/src/couchdb/couch_rep_writer.erl
+++ b/src/couchdb/couch_rep_writer.erl
@@ -64,7 +64,7 @@ write_bulk_docs(_Db, []) ->
[];
write_bulk_docs(#http_db{headers = Headers} = Db, Docs) ->
JsonDocs = [
- couch_doc:to_json_obj(Doc, [revs, att_gzip_length]) || Doc <- Docs
+ couch_doc:to_json_obj(Doc, [revs]) || Doc <- Docs
],
Request = Db#http_db{
resource = "_bulk_docs",
@@ -84,7 +84,7 @@ write_multi_part_doc(#http_db{headers=Headers} = Db, #doc{atts=Atts} = Doc) ->
JsonBytes = ?JSON_ENCODE(
couch_doc:to_json_obj(
Doc,
- [follows, att_encoding_info, attachments]
+ [follows, att_encoding_info, attachments, revs]
)
),
Boundary = couch_uuids:random(),
diff --git a/src/couchdb/couch_replication_manager.erl b/src/couchdb/couch_replication_manager.erl
index 6101c9c5..6537c8b2 100644
--- a/src/couchdb/couch_replication_manager.erl
+++ b/src/couchdb/couch_replication_manager.erl
@@ -253,7 +253,7 @@ process_update(State, {Change}) ->
rep_user_ctx({RepDoc}) ->
case get_value(<<"user_ctx">>, RepDoc) of
undefined ->
- #user_ctx{roles = [<<"_admin">>]};
+ #user_ctx{};
{UserCtx} ->
#user_ctx{
name = get_value(<<"name">>, UserCtx, null),
@@ -307,6 +307,10 @@ start_replication(Server, {RepProps} = RepDoc, RepId, UserCtx, MaxRetries) ->
ok = gen_server:call(Server, {triggered, RepId}, infinity),
couch_rep:get_result(Pid, RepId, RepDoc, UserCtx);
Error ->
+ couch_rep:update_rep_doc(
+ RepDoc,
+ [{<<"_replication_state">>, <<"error">>},
+ {<<"_replication_id">>, ?l2b(element(1, RepId))}]),
keep_retrying(
Server, RepId, RepDoc, UserCtx, Error, ?INITIAL_WAIT, MaxRetries)
end.
diff --git a/src/erlang-oauth/oauth_uri.erl b/src/erlang-oauth/oauth_uri.erl
index 3bdc9076..5023f983 100644
--- a/src/erlang-oauth/oauth_uri.erl
+++ b/src/erlang-oauth/oauth_uri.erl
@@ -6,14 +6,6 @@
-import(lists, [concat/1]).
--define(is_uppercase_alpha(C), C >= $A, C =< $Z).
--define(is_lowercase_alpha(C), C >= $a, C =< $z).
--define(is_alpha(C), ?is_uppercase_alpha(C); ?is_lowercase_alpha(C)).
--define(is_digit(C), C >= $0, C =< $9).
--define(is_alphanumeric(C), ?is_alpha(C); ?is_digit(C)).
--define(is_unreserved(C), ?is_alphanumeric(C); C =:= $-; C =:= $_; C =:= $.; C =:= $~).
--define(is_hex(C), ?is_digit(C); C >= $A, C =< $F).
-
normalize(URI) ->
case http_uri:parse(URI) of
@@ -66,23 +58,41 @@ intersperse(_, [X]) -> [X];
intersperse(Sep, [X|Xs]) ->
[X, Sep|intersperse(Sep, Xs)].
-decode(Chars) ->
- decode(Chars, []).
-
-decode([], Decoded) ->
- lists:reverse(Decoded);
-decode([$%,A,B|Etc], Decoded) when ?is_hex(A), ?is_hex(B) ->
- decode(Etc, [erlang:list_to_integer([A,B], 16)|Decoded]);
-decode([C|Etc], Decoded) when ?is_unreserved(C) ->
- decode(Etc, [C|Decoded]).
-
-encode(Chars) ->
- encode(Chars, []).
-
-encode([], Encoded) ->
- lists:flatten(lists:reverse(Encoded));
-encode([C|Etc], Encoded) when ?is_unreserved(C) ->
- encode(Etc, [C|Encoded]);
-encode([C|Etc], Encoded) ->
- Value = io_lib:format("%~2.2.0s", [erlang:integer_to_list(C, 16)]),
- encode(Etc, [Value|Encoded]).
+-define(is_alphanum(C), C >= $A, C =< $Z; C >= $a, C =< $z; C >= $0, C =< $9).
+
+encode(Term) when is_integer(Term) ->
+ integer_to_list(Term);
+encode(Term) when is_atom(Term) ->
+ encode(atom_to_list(Term));
+encode(Term) when is_list(Term) ->
+ encode(lists:reverse(Term, []), []).
+
+encode([X | T], Acc) when ?is_alphanum(X); X =:= $-; X =:= $_; X =:= $.; X =:= $~ ->
+ encode(T, [X | Acc]);
+encode([X | T], Acc) ->
+ NewAcc = [$%, dec2hex(X bsr 4), dec2hex(X band 16#0f) | Acc],
+ encode(T, NewAcc);
+encode([], Acc) ->
+ Acc.
+
+decode(Str) when is_list(Str) ->
+ decode(Str, []).
+
+decode([$%, A, B | T], Acc) ->
+ decode(T, [(hex2dec(A) bsl 4) + hex2dec(B) | Acc]);
+decode([X | T], Acc) ->
+ decode(T, [X | Acc]);
+decode([], Acc) ->
+ lists:reverse(Acc, []).
+
+-compile({inline, [{dec2hex, 1}, {hex2dec, 1}]}).
+
+dec2hex(N) when N >= 10 andalso N =< 15 ->
+ N + $A - 10;
+dec2hex(N) when N >= 0 andalso N =< 9 ->
+ N + $0.
+
+hex2dec(C) when C >= $A andalso C =< $F ->
+ C - $A + 10;
+hex2dec(C) when C >= $0 andalso C =< $9 ->
+ C - $0.