summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobert Newson <rnewson@apache.org>2010-08-18 11:41:10 +0000
committerRobert Newson <rnewson@apache.org>2010-08-18 11:41:10 +0000
commitd18d7036788acf2d5ddab608d0352158139de189 (patch)
tree4474cce378e152c36bd4484a673776ab3abe15be
parent6ddcdf17ef2a5e55f0962211c7c878aa5e74ffe7 (diff)
COUCHDB-161 - support Range header for attachments.
Attachments are upgraded to support the Range header on compaction. git-svn-id: https://svn.apache.org/repos/asf/couchdb/trunk@986629 13f79535-47bb-0310-9956-ffa450edef68
-rw-r--r--share/www/script/couch_tests.js1
-rw-r--r--share/www/script/test/attachment_ranges.js108
-rw-r--r--src/couchdb/couch_doc.erl5
-rw-r--r--src/couchdb/couch_file.erl2
-rw-r--r--src/couchdb/couch_httpd.erl2
-rw-r--r--src/couchdb/couch_httpd_db.erl46
-rw-r--r--src/couchdb/couch_stream.erl41
-rwxr-xr-xtest/etap/050-stream.t6
8 files changed, 200 insertions, 11 deletions
diff --git a/share/www/script/couch_tests.js b/share/www/script/couch_tests.js
index c048521c..0b8f9288 100644
--- a/share/www/script/couch_tests.js
+++ b/share/www/script/couch_tests.js
@@ -36,6 +36,7 @@ loadTest("attachments.js");
loadTest("attachments_multipart.js");
loadTest("attachment_names.js");
loadTest("attachment_paths.js");
+loadTest("attachment_ranges.js");
loadTest("attachment_views.js");
loadTest("auth_cache.js");
loadTest("batch_save.js");
diff --git a/share/www/script/test/attachment_ranges.js b/share/www/script/test/attachment_ranges.js
new file mode 100644
index 00000000..65c1f475
--- /dev/null
+++ b/share/www/script/test/attachment_ranges.js
@@ -0,0 +1,108 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+couchTests.attachment_ranges = function(debug) {
+ var db = new CouchDB("test_suite_db", {
+ "X-Couch-Full-Commit": "false"
+ });
+ db.deleteDb();
+ db.createDb();
+
+ if (debug) debugger;
+
+ var binAttDoc = {
+ _id: "bin_doc",
+ _attachments: {
+ "foo.txt": {
+ content_type: "application/octet-stream",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ }
+ }
+
+ var save_response = db.save(binAttDoc);
+ T(save_response.ok);
+
+ // Fetching the whole entity is a 206.
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-28"
+ }
+ });
+ TEquals(206, xhr.status);
+ TEquals("This is a base64 encoded text", xhr.responseText);
+ TEquals("bytes 0-28/29", xhr.getResponseHeader("Content-Range"));
+ TEquals("29", xhr.getResponseHeader("Content-Length"));
+
+ // Fetch the whole entity without an end offset is a 200.
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-"
+ }
+ });
+ TEquals(200, xhr.status);
+ TEquals("This is a base64 encoded text", xhr.responseText);
+ TEquals("29", xhr.getResponseHeader("Content-Length"));
+
+ // Fetch the end of an entity without an end offset is a 206.
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=2-"
+ }
+ });
+ TEquals(206, xhr.status);
+ TEquals("is is a base64 encoded text", xhr.responseText);
+ TEquals("bytes 2-28/29", xhr.getResponseHeader("Content-Range"));
+ TEquals("27", xhr.getResponseHeader("Content-Length"));
+
+ // Fetch past the end of the entity is a 416
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-29"
+ }
+ });
+ TEquals(416, xhr.status);
+
+ // Fetch first part of entity is a 206
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-3"
+ }
+ });
+ TEquals(206, xhr.status);
+ TEquals("This", xhr.responseText);
+ TEquals("4", xhr.getResponseHeader("Content-Length"));
+ TEquals("bytes 0-3/29", xhr.getResponseHeader("Content-Range"));
+
+ // Fetch middle of entity is also a 206
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=10-15"
+ }
+ });
+ TEquals(206, xhr.status);
+ TEquals("base64", xhr.responseText);
+ TEquals("6", xhr.getResponseHeader("Content-Length"));
+ TEquals("bytes 10-15/29", xhr.getResponseHeader("Content-Range"));
+
+ // Fetch end of entity is also a 206
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=-3"
+ }
+ });
+ TEquals(206, xhr.status);
+ TEquals("text", xhr.responseText);
+ TEquals("4", xhr.getResponseHeader("Content-Length"));
+ TEquals("bytes 25-28/29", xhr.getResponseHeader("Content-Range"));
+
+
+};
diff --git a/src/couchdb/couch_doc.erl b/src/couchdb/couch_doc.erl
index d15cd7de..5009f8fc 100644
--- a/src/couchdb/couch_doc.erl
+++ b/src/couchdb/couch_doc.erl
@@ -13,7 +13,7 @@
-module(couch_doc).
-export([to_doc_info/1,to_doc_info_path/1,parse_rev/1,parse_revs/1,rev_to_str/1,revs_to_strs/1]).
--export([att_foldl/3,att_foldl_decode/3,get_validate_doc_fun/1]).
+-export([att_foldl/3,range_att_foldl/5,att_foldl_decode/3,get_validate_doc_fun/1]).
-export([from_json_obj/1,to_json_obj/2,has_stubs/1, merge_stubs/2]).
-export([validate_docid/1]).
-export([doc_from_multi_part_stream/2]).
@@ -307,6 +307,9 @@ att_foldl(#att{data={Fd,Sp},md5=Md5}, Fun, Acc) ->
att_foldl(#att{data=DataFun,att_len=Len}, Fun, Acc) when is_function(DataFun) ->
fold_streamed_data(DataFun, Len, Fun, Acc).
+range_att_foldl(#att{data={Fd,Sp}}, From, To, Fun, Acc) ->
+ couch_stream:range_foldl(Fd, Sp, From, To, Fun, Acc).
+
att_foldl_decode(#att{data={Fd,Sp},md5=Md5,encoding=Enc}, Fun, Acc) ->
couch_stream:foldl_decode(Fd, Sp, Md5, Enc, Fun, Acc);
att_foldl_decode(#att{data=Fun2,att_len=Len, encoding=identity}, Fun, Acc) ->
diff --git a/src/couchdb/couch_file.erl b/src/couchdb/couch_file.erl
index 0a891712..f809ce1e 100644
--- a/src/couchdb/couch_file.erl
+++ b/src/couchdb/couch_file.erl
@@ -508,6 +508,8 @@ load_header(Fd, Block) ->
-spec read_raw_iolist_int(#file{}, Pos::non_neg_integer(), Len::non_neg_integer()) ->
{Data::iolist(), CurPos::non_neg_integer()}.
+read_raw_iolist_int(Fd, {Pos, _Size}, Len) -> % 0110 UPGRADE CODE
+ read_raw_iolist_int(Fd, Pos, Len);
read_raw_iolist_int(#file{fd=Fd, tail_append_begin=TAB}, Pos, Len) ->
BlockOffset = Pos rem ?SIZE_BLOCK,
TotalBytes = calculate_total_read_len(BlockOffset, Len),
diff --git a/src/couchdb/couch_httpd.erl b/src/couchdb/couch_httpd.erl
index cf83605f..bb97cb97 100644
--- a/src/couchdb/couch_httpd.erl
+++ b/src/couchdb/couch_httpd.erl
@@ -702,6 +702,8 @@ error_info(file_exists) ->
"created, the file already exists.">>};
error_info({bad_ctype, Reason}) ->
{415, <<"bad_content_type">>, Reason};
+error_info(requested_range_not_satisfiable) ->
+ {416, <<"requested_range_not_satisfiable">>, <<"Requested range not satisfiable">>};
error_info({error, illegal_database_name}) ->
{400, <<"illegal_database_name">>, <<"Only lowercase characters (a-z), "
"digits (0-9), and any of the characters _, $, (, ), +, -, and / "
diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl
index 3c7e57c4..ab14a7d1 100644
--- a/src/couchdb/couch_httpd_db.erl
+++ b/src/couchdb/couch_httpd_db.erl
@@ -20,7 +20,7 @@
-import(couch_httpd,
[send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
- start_json_response/2,start_json_response/3,
+ send_response/4,start_json_response/2,start_json_response/3,
send_chunk/2,last_chunk/1,end_json_response/1,
start_chunked_response/3, absolute_uri/2, send/2,
start_response_length/4]).
@@ -862,7 +862,7 @@ couch_doc_open(Db, DocId, Rev, Options) ->
% Attachment request handlers
-db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) ->
+db_attachment_req(#httpd{method='GET',mochi_req=MochiReq}=Req, Db, DocId, FileNameParts) ->
FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, FileNameParts),"/")),
#doc_query_args{
rev=Rev,
@@ -925,8 +925,46 @@ db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) ->
AttFun(Att, fun(Seg, _) -> send_chunk(Resp, Seg) end, {ok, Resp}),
last_chunk(Resp);
_ ->
- {ok, Resp} = start_response_length(Req, 200, Headers, Len),
- AttFun(Att, fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp})
+ #att{data={_,StreamInfo}} = Att, %% layering violation
+ SupportsRange = case StreamInfo of
+ [{_,_}|_] -> true;
+ _ -> false
+ end,
+ Ranges = MochiReq:get(range),
+ HasSingleRange = case Ranges of
+ [_] -> true;
+ _ -> false
+ end,
+ Headers1 = case SupportsRange of
+ false ->[{<<"Accept-Ranges">>, <<"none">>}] ++ Headers;
+ true -> [{<<"Accept-Ranges">>, <<"bytes">>}] ++ Headers
+ end,
+ if
+ Enc == identity andalso SupportsRange == true andalso HasSingleRange == true ->
+ [{From, To}] = Ranges,
+ {From1, To1} = case {From, To} of
+ {none, To} ->
+ {Len - To - 1, Len - 1};
+ {From, none} ->
+ {From, Len - 1};
+ _ ->
+ {From, To}
+ end,
+ if
+ From < 0 orelse To1 >= Len ->
+ throw(requested_range_not_satisfiable);
+ true ->
+ ok
+ end,
+ Headers2 = [{<<"Content-Range">>,
+ ?l2b(io_lib:format("bytes ~B-~B/~B", [From1, To1, Len]))}]
+ ++ Headers1,
+ {ok, Resp} = start_response_length(Req, 206, Headers2, To1 - From1 + 1),
+ couch_doc:range_att_foldl(Att, From1, To1 + 1, fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp});
+ true ->
+ {ok, Resp} = start_response_length(Req, 200, Headers1, Len),
+ AttFun(Att, fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp})
+ end
end
end
)
diff --git a/src/couchdb/couch_stream.erl b/src/couchdb/couch_stream.erl
index 04c17770..9209cfec 100644
--- a/src/couchdb/couch_stream.erl
+++ b/src/couchdb/couch_stream.erl
@@ -24,7 +24,7 @@
-define(DEFAULT_STREAM_CHUNK, 16#00100000). % 1 meg chunks when streaming data
--export([open/1, open/3, close/1, write/2, foldl/4, foldl/5, foldl_decode/6,
+-export([open/1, open/3, close/1, write/2, foldl/4, foldl/5, range_foldl/6, foldl_decode/6,
old_foldl/5,old_copy_to_new_stream/4]).
-export([copy_to_new_stream/3,old_read_term/2]).
-export([init/1, terminate/2, handle_call/3]).
@@ -112,22 +112,57 @@ foldl_decode(Fd, PosList, Md5, Enc, Fun, Acc) ->
foldl(_Fd, [], Md5, Md5Acc, _Fun, Acc) ->
Md5 = couch_util:md5_final(Md5Acc),
Acc;
+foldl(Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) -> % 0110 UPGRADE CODE
+ foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc);
foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
{ok, Bin} = couch_file:pread_iolist(Fd, Pos),
Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, Bin)),
Fun(Bin, Acc);
+foldl(Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
+ foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
{ok, Bin} = couch_file:pread_iolist(Fd, Pos),
foldl(Fd, Rest, Md5, couch_util:md5_update(Md5Acc, Bin), Fun, Fun(Bin, Acc)).
+range_foldl(Fd, PosList, From, To, Fun, Acc) ->
+ range_foldl(Fd, PosList, From, To, 0, Fun, Acc).
+
+range_foldl(_Fd, _PosList, _From, To, Off, _Fun, Acc) when Off >= To ->
+ Acc;
+range_foldl(Fd, [{_Pos, Size}|Rest], From, To, Off, Fun, Acc) when From > Off + Size ->
+ range_foldl(Fd, Rest, From, To, Off + Size, Fun, Acc);
+range_foldl(Fd, [{Pos, Size}|Rest], From, To, Off, Fun, Acc) ->
+ {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
+ Bin1 = if
+ From =< Off andalso To >= Off + Size -> Bin; %% the whole block is covered
+ true ->
+ PrefixLen = clip(From - Off, 0, Size),
+ PostfixLen = clip(Off + Size - To, 0, Size),
+ MatchLen = Size - PrefixLen - PostfixLen,
+ <<_Prefix:PrefixLen/binary,Match:MatchLen/binary,_Postfix:PostfixLen/binary>> = iolist_to_binary(Bin),
+ Match
+ end,
+ range_foldl(Fd, Rest, From, To, Off + Size, Fun, Fun(Bin1, Acc)).
+
+clip(Value, Lo, Hi) ->
+ if
+ Value < Lo -> Lo;
+ Value > Hi -> Hi;
+ true -> Value
+ end.
+
foldl_decode(_DecFun, _Fd, [], Md5, Md5Acc, _Fun, Acc) ->
Md5 = couch_util:md5_final(Md5Acc),
Acc;
+foldl_decode(DecFun, Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) ->
+ foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc);
foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
{ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, EncBin)),
Bin = DecFun(EncBin),
Fun(Bin, Acc);
+foldl_decode(DecFun, Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
+ foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
{ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
Bin = DecFun(EncBin),
@@ -227,7 +262,7 @@ handle_call({write, Bin}, _From, Stream) ->
{ok, Pos} = couch_file:append_binary(Fd, WriteBin2),
WrittenLen2 = WrittenLen + iolist_size(WriteBin2),
Md5_2 = couch_util:md5_update(Md5, WriteBin2),
- Written2 = [Pos|Written]
+ Written2 = [{Pos, iolist_size(WriteBin2)}|Written]
end,
{reply, ok, Stream#stream{
@@ -265,7 +300,7 @@ handle_call(close, _From, Stream) ->
{lists:reverse(Written), WrittenLen, IdenLen, Md5Final, IdenMd5Final};
_ ->
{ok, Pos} = couch_file:append_binary(Fd, WriteBin2),
- StreamInfo = lists:reverse(Written, [Pos]),
+ StreamInfo = lists:reverse(Written, [{Pos, iolist_size(WriteBin2)}]),
StreamLen = WrittenLen + iolist_size(WriteBin2),
{StreamInfo, StreamLen, IdenLen, Md5Final, IdenMd5Final}
end,
diff --git a/test/etap/050-stream.t b/test/etap/050-stream.t
index 545dd524..d30b524a 100755
--- a/test/etap/050-stream.t
+++ b/test/etap/050-stream.t
@@ -43,7 +43,7 @@ test() ->
"Writing an empty binary does nothing."),
{Ptrs, Length, _, _, _} = couch_stream:close(Stream),
- etap:is(Ptrs, [0], "Close returns the file pointers."),
+ etap:is(Ptrs, [{0, 8}], "Close returns the file pointers."),
etap:is(Length, 8, "Close also returns the number of bytes written."),
etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."),
@@ -59,7 +59,7 @@ test() ->
"Successfully wrote 80 0 bits."),
{Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2),
- etap:is(Ptrs2, [ExpPtr], "Closing stream returns the file pointers."),
+ etap:is(Ptrs2, [{ExpPtr, 20}], "Closing stream returns the file pointers."),
etap:is(Length2, 20, "Length written is 160 bytes."),
AllBits = iolist_to_binary([OneBits,ZeroBits]),
@@ -80,7 +80,7 @@ test() ->
% + 4 bytes for the term_to_binary adding a length header
% + 1 byte every 4K for tail append headers
SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1,
- etap:is(Ptrs3, [ExpPtr2, SecondPtr], "Pointers every 4K bytes."),
+ etap:is(Ptrs3, [{ExpPtr2, 4100}, {SecondPtr, 1020}], "Pointers every 4K bytes."),
etap:is(Length3, 5120, "Wrote the expected 5K bytes."),
couch_file:close(Fd),