summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdam Kocoloski <adam@cloudant.com>2011-10-04 13:11:12 -0700
committerAdam Kocoloski <adam@cloudant.com>2011-10-04 13:11:12 -0700
commitda7284ed2844926c7df0efef12ba30d8e2e6d039 (patch)
tree10fa2d5cf35bfae4d48fc632924cec98ee02a22a
parentb57aadd742e4f8cc925ba4f005ae658c43a7c5b4 (diff)
parent4e19639a64d3033e1cc7c22a0b7404d277643c78 (diff)
Merge pull request #67 from cloudant/12645-merge_latest_1.1.x
-rw-r--r--apps/couch/CHANGES3
-rw-r--r--apps/couch/INSTALL.Windows6
-rw-r--r--apps/couch/NEWS3
-rw-r--r--apps/couch/THANKS4
-rw-r--r--apps/couch/include/couch_db.hrl17
-rw-r--r--apps/couch/src/couch_btree.erl30
-rw-r--r--apps/couch/src/couch_changes.erl2
-rw-r--r--apps/couch/src/couch_db.erl32
-rw-r--r--apps/couch/src/couch_doc.erl20
-rw-r--r--apps/couch/src/couch_httpd.erl55
-rw-r--r--apps/couch/src/couch_httpd_db.erl6
-rw-r--r--apps/couch/src/couch_httpd_show.erl30
-rw-r--r--apps/couch/src/couch_httpd_vhost.erl12
-rw-r--r--apps/couch/src/couch_httpd_view.erl80
-rw-r--r--apps/couch/src/couch_key_tree.erl17
-rw-r--r--apps/couch/src/couch_log.erl28
-rw-r--r--apps/couch/src/couch_os_process.erl5
-rw-r--r--apps/couch/src/couch_query_servers.erl4
-rw-r--r--apps/couch/src/couch_rep.erl57
-rw-r--r--apps/couch/src/couch_rep_changes_feed.erl27
-rw-r--r--apps/couch/src/couch_rep_reader.erl12
-rw-r--r--apps/couch/src/couch_rep_writer.erl16
-rw-r--r--apps/couch/src/couch_replication_manager.erl2
-rw-r--r--apps/couch/src/couch_view.erl12
-rw-r--r--apps/couch/src/couch_view_compactor.erl29
-rw-r--r--apps/couch/src/couch_view_group.erl167
-rw-r--r--apps/couch/src/couch_view_updater.erl30
-rw-r--r--apps/couch/src/test_util.erl28
-rwxr-xr-xapps/couch/test/etap/020-btree-basics.t13
-rw-r--r--apps/couch/test/javascript/run.tpl2
-rw-r--r--couchjs/c_src/SConscript54
-rw-r--r--couchjs/c_src/http.c382
-rw-r--r--couchjs/c_src/http.h12
-rw-r--r--couchjs/c_src/main.c334
-rw-r--r--couchjs/c_src/sm.h13
-rw-r--r--couchjs/c_src/sm170.c378
-rw-r--r--couchjs/c_src/sm180.c387
-rw-r--r--couchjs/c_src/sm185.c401
-rw-r--r--couchjs/c_src/utf8.c18
-rw-r--r--couchjs/c_src/util.c235
-rw-r--r--couchjs/c_src/util.h34
-rw-r--r--couchjs/js/mimeparse.js2
-rw-r--r--couchjs/js/render.js10
-rw-r--r--couchjs/js/util.js11
-rw-r--r--rel/overlay/etc/default.ini1
-rw-r--r--rel/overlay/share/www/script/couch_test_runner.js15
-rw-r--r--rel/overlay/share/www/script/futon.browse.js3
-rw-r--r--rel/overlay/share/www/script/test/all_docs.js7
-rw-r--r--rel/overlay/share/www/script/test/basics.js19
-rw-r--r--rel/overlay/share/www/script/test/changes.js26
-rw-r--r--rel/overlay/share/www/script/test/design_docs.js39
-rw-r--r--rel/overlay/share/www/script/test/etags_views.js8
-rw-r--r--rel/overlay/share/www/script/test/jsonp.js2
-rw-r--r--rel/overlay/share/www/script/test/recreate_doc.js65
-rw-r--r--rel/overlay/share/www/script/test/show_documents.js26
-rw-r--r--rel/overlay/share/www/script/test/update_documents.js38
-rw-r--r--rel/overlay/share/www/script/test/view_collation_raw.js9
-rwxr-xr-xtest/etap/072-cleanup.t8
58 files changed, 2409 insertions, 877 deletions
diff --git a/apps/couch/CHANGES b/apps/couch/CHANGES
index 54a2e03c..64b4f3c1 100644
--- a/apps/couch/CHANGES
+++ b/apps/couch/CHANGES
@@ -6,6 +6,9 @@ Version 1.1.1
This version has not been released yet.
+* ETags for views include current sequence if include_docs=true.
+* JSONP responses now send "text/javascript" for Content-Type.
+
Version 1.1.0
-------------
diff --git a/apps/couch/INSTALL.Windows b/apps/couch/INSTALL.Windows
index d2082734..d661f1da 100644
--- a/apps/couch/INSTALL.Windows
+++ b/apps/couch/INSTALL.Windows
@@ -8,8 +8,8 @@ Dependencies
You will need the following installed:
- * Erlang OTP (=14B01) (http://erlang.org/)
- * ICU (http://icu.sourceforge.net/)
+ * Erlang OTP (>=14B03) (http://erlang.org/)
+ * ICU (=4.4.*) (http://icu.sourceforge.net/)
* OpenSSL (http://www.openssl.org/)
* Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/)
* libcurl (http://curl.haxx.se/libcurl/)
@@ -21,7 +21,7 @@ General Notes
* When installing Erlang, you must build it from source.
- The CouchDB build makes use of a number of the Erlang build scripts.
+The CouchDB build requires a number of the Erlang build scripts.
* When installing ICU, select the binaries built with Visual Studio 2008.
diff --git a/apps/couch/NEWS b/apps/couch/NEWS
index 97eb58e7..d7dc7cf1 100644
--- a/apps/couch/NEWS
+++ b/apps/couch/NEWS
@@ -12,6 +12,9 @@ Version 1.1.1
This version has not been released yet.
+* ETags for views include current sequence if include_docs=true.
+* JSONP responses now send "text/javascript" for Content-Type.
+
Version 1.1.0
-------------
diff --git a/apps/couch/THANKS b/apps/couch/THANKS
index aae7991c..470c3937 100644
--- a/apps/couch/THANKS
+++ b/apps/couch/THANKS
@@ -80,6 +80,8 @@ suggesting improvements or submitting changes. Some of these people are:
* Sam Bisbee <sam@sbisbee.com>
* Nathan Vander Wilt <natevw@yahoo.com>
* Caolan McMahon <caolan.mcmahon@googlemail.com>
-
+ * Alexander Shorin <kxepal@gmail.com>
+ * Christopher Bonhage <queezey@me.com>
+ * Christian Carter <cdcarter@gmail.com>
For a list of authors see the `AUTHORS` file.
diff --git a/apps/couch/include/couch_db.hrl b/apps/couch/include/couch_db.hrl
index a96d5d4f..1c425e8d 100644
--- a/apps/couch/include/couch_db.hrl
+++ b/apps/couch/include/couch_db.hrl
@@ -25,8 +25,20 @@
-define(DEFAULT_ATTACHMENT_CONTENT_TYPE, <<"application/octet-stream">>).
--define(LOG_DEBUG(Format, Args), couch_log:debug(Format, Args)).
--define(LOG_INFO(Format, Args), couch_log:info(Format, Args)).
+-define(LOG_DEBUG(Format, Args),
+ case couch_log:debug_on() of
+ true ->
+ couch_log:debug(Format, Args);
+ false -> ok
+ end).
+
+-define(LOG_INFO(Format, Args),
+ case couch_log:info_on() of
+ true ->
+ couch_log:info(Format, Args);
+ false -> ok
+ end).
+
-define(LOG_ERROR(Format, Args), couch_log:error(Format, Args)).
-record(rev_info,
@@ -210,7 +222,6 @@
-record(group, {
sig=nil,
- dbname,
fd=nil,
name,
def_lang,
diff --git a/apps/couch/src/couch_btree.erl b/apps/couch/src/couch_btree.erl
index 3f2e86d8..52fcaece 100644
--- a/apps/couch/src/couch_btree.erl
+++ b/apps/couch/src/couch_btree.erl
@@ -15,6 +15,7 @@
-export([open/2, open/3, query_modify/4, add/2, add_remove/3]).
-export([fold/4, full_reduce/1, final_reduce/2, foldl/3, foldl/4]).
-export([fold_reduce/4, lookup/2, get_state/1, set_options/2]).
+-export([less/3]).
-record(btree,
{fd,
@@ -109,9 +110,17 @@ full_reduce(#btree{root={_P, Red}, reduce=Reduce}) ->
% wraps a 2 arity function with the proper 3 arity function
convert_fun_arity(Fun) when is_function(Fun, 2) ->
- fun(KV, _Reds, AccIn) -> Fun(KV, AccIn) end;
+ fun
+ (visit, KV, _Reds, AccIn) -> Fun(KV, AccIn);
+ (traverse, _K, _Red, AccIn) -> {ok, AccIn}
+ end;
convert_fun_arity(Fun) when is_function(Fun, 3) ->
- Fun. % Already arity 3
+ fun
+ (visit, KV, Reds, AccIn) -> Fun(KV, Reds, AccIn);
+ (traverse, _K, _Red, AccIn) -> {ok, AccIn}
+ end;
+convert_fun_arity(Fun) when is_function(Fun, 4) ->
+ Fun. % Already arity 4
make_key_in_end_range_function(Bt, fwd, Options) ->
case couch_util:get_value(end_key_gt, Options) of
@@ -614,12 +623,17 @@ stream_node(Bt, Reds, {Pointer, _Reds}, InRange, Dir, Fun, Acc) ->
stream_kp_node(_Bt, _Reds, [], _InRange, _Dir, _Fun, Acc) ->
{ok, Acc};
-stream_kp_node(Bt, Reds, [{_Key, {Pointer, Red}} | Rest], InRange, Dir, Fun, Acc) ->
- case stream_node(Bt, Reds, {Pointer, Red}, InRange, Dir, Fun, Acc) of
+stream_kp_node(Bt, Reds, [{Key, {Pointer, Red}} | Rest], InRange, Dir, Fun, Acc) ->
+ case Fun(traverse, Key, Red, Acc) of
{ok, Acc2} ->
- stream_kp_node(Bt, [Red | Reds], Rest, InRange, Dir, Fun, Acc2);
- {stop, LastReds, Acc2} ->
- {stop, LastReds, Acc2}
+ case stream_node(Bt, Reds, {Pointer, Red}, InRange, Dir, Fun, Acc2) of
+ {ok, Acc3} ->
+ stream_kp_node(Bt, [Red | Reds], Rest, InRange, Dir, Fun, Acc3);
+ {stop, LastReds, Acc3} ->
+ {stop, LastReds, Acc3}
+ end;
+ {skip, Acc2} ->
+ stream_kp_node(Bt, [Red | Reds], Rest, InRange, Dir, Fun, Acc2)
end.
drop_nodes(_Bt, Reds, _StartKey, []) ->
@@ -680,7 +694,7 @@ stream_kv_node2(Bt, Reds, PrevKVs, [{K,V} | RestKVs], InRange, Dir, Fun, Acc) ->
{stop, {PrevKVs, Reds}, Acc};
true ->
AssembledKV = assemble(Bt, K, V),
- case Fun(AssembledKV, {PrevKVs, Reds}, Acc) of
+ case Fun(visit, AssembledKV, {PrevKVs, Reds}, Acc) of
{ok, Acc2} ->
stream_kv_node2(Bt, Reds, [AssembledKV | PrevKVs], RestKVs, InRange, Dir, Fun, Acc2);
{stop, Acc2} ->
diff --git a/apps/couch/src/couch_changes.erl b/apps/couch/src/couch_changes.erl
index 44d0ad46..4f2857b6 100644
--- a/apps/couch/src/couch_changes.erl
+++ b/apps/couch/src/couch_changes.erl
@@ -268,7 +268,6 @@ start_sending_changes(Callback, UserAcc, ResponseType) ->
send_changes(Args, Callback, UserAcc, Db, StartSeq, Prepend) ->
#changes_args{
- style = Style,
include_docs = IncludeDocs,
conflicts = Conflicts,
limit = Limit,
@@ -278,7 +277,6 @@ send_changes(Args, Callback, UserAcc, Db, StartSeq, Prepend) ->
} = Args,
couch_db:changes_since(
Db,
- Style,
StartSeq,
fun changes_enumerator/2,
[{dir, Dir}],
diff --git a/apps/couch/src/couch_db.erl b/apps/couch/src/couch_db.erl
index 96c49886..c01b0a35 100644
--- a/apps/couch/src/couch_db.erl
+++ b/apps/couch/src/couch_db.erl
@@ -23,7 +23,7 @@
-export([increment_update_seq/1,get_purge_seq/1,purge_docs/2,get_last_purged/1]).
-export([start_link/3,open_doc_int/3,ensure_full_commit/1,ensure_full_commit/2]).
-export([set_security/2,get_security/1]).
--export([changes_since/5,changes_since/6,read_doc/2,new_revid/1]).
+-export([changes_since/4,changes_since/5,read_doc/2,new_revid/1]).
-export([check_is_admin/1, check_is_reader/1, get_doc_count/1]).
-export([reopen/1, make_doc/5]).
@@ -293,8 +293,11 @@ get_design_docs(#db{name = <<"shards/", _/binary>> = ShardName}) ->
Response
end;
get_design_docs(#db{id_tree=Btree}=Db) ->
- {ok,_, Docs} = couch_btree:fold(Btree,
- fun(#full_doc_info{id= <<"_design/",_/binary>>}=FullDocInfo, _Reds, AccDocs) ->
+ {ok, _, Docs} = couch_view:fold(
+ #view{btree=Btree},
+ fun(#full_doc_info{deleted = true}, _Reds, AccDocs) ->
+ {ok, AccDocs};
+ (#full_doc_info{id= <<"_design/",_/binary>>}=FullDocInfo, _Reds, AccDocs) ->
{ok, Doc} = couch_db:open_doc_int(Db, FullDocInfo, []),
{ok, [Doc | AccDocs]};
(_, _Reds, AccDocs) ->
@@ -987,10 +990,10 @@ enum_docs_reduce_to_count(Reds) ->
fun couch_db_updater:btree_by_id_reduce/2, Reds),
Count.
-changes_since(Db, Style, StartSeq, Fun, Acc) ->
- changes_since(Db, Style, StartSeq, Fun, [], Acc).
-
-changes_since(Db, Style, StartSeq, Fun, Options, Acc) ->
+changes_since(Db, StartSeq, Fun, Acc) ->
+ changes_since(Db, StartSeq, Fun, [], Acc).
+
+changes_since(Db, StartSeq, Fun, Options, Acc) ->
Wrapper = fun(FullDocInfo, _Offset, Acc2) ->
case FullDocInfo of
#full_doc_info{} ->
@@ -998,17 +1001,7 @@ changes_since(Db, Style, StartSeq, Fun, Options, Acc) ->
#doc_info{} ->
DocInfo = FullDocInfo
end,
- #doc_info{revs=Revs} = DocInfo,
- DocInfo2 =
- case Style of
- main_only ->
- DocInfo;
- all_docs ->
- % remove revs before the seq
- DocInfo#doc_info{revs=[RevInfo ||
- #rev_info{seq=RevSeq}=RevInfo <- Revs, StartSeq < RevSeq]}
- end,
- Fun(DocInfo2, Acc2)
+ Fun(DocInfo, Acc2)
end,
{ok, _LastReduction, AccOut} = couch_btree:fold(Db#db.seq_tree, Wrapper,
Acc, [{start_key, couch_util:to_integer(StartSeq) + 1} | Options]),
@@ -1028,7 +1021,8 @@ enum_docs_since(Db, SinceSeq, InFun, Acc, Options) ->
{ok, enum_docs_since_reduce_to_count(LastReduction), AccOut}.
enum_docs(Db, InFun, InAcc, Options) ->
- {ok, LastReduce, OutAcc} = couch_btree:fold(Db#db.id_tree, InFun, InAcc, Options),
+ {ok, LastReduce, OutAcc} = couch_view:fold(
+ #view{btree=Db#db.id_tree}, InFun, InAcc, Options),
{ok, enum_docs_reduce_to_count(LastReduce), OutAcc}.
%%% Internal function %%%
diff --git a/apps/couch/src/couch_doc.erl b/apps/couch/src/couch_doc.erl
index 33d7e3cf..63ac0892 100644
--- a/apps/couch/src/couch_doc.erl
+++ b/apps/couch/src/couch_doc.erl
@@ -302,12 +302,20 @@ to_doc_info(FullDocInfo) ->
{DocInfo, _Path} = to_doc_info_path(FullDocInfo),
DocInfo.
-max_seq([], Max) ->
- Max;
-max_seq([#rev_info{seq=Seq}|Rest], Max) ->
- max_seq(Rest, if Max > Seq -> Max; true -> Seq end).
+max_seq(Tree, UpdateSeq) ->
+ FoldFun = fun({_Pos, _Key}, Value, _Type, MaxOldSeq) ->
+ case Value of
+ {_Deleted, _DiskPos, OldTreeSeq} ->
+ erlang:max(MaxOldSeq, OldTreeSeq);
+ #leaf{seq=LeafSeq} ->
+ erlang:max(MaxOldSeq, LeafSeq);
+ _ ->
+ MaxOldSeq
+ end
+ end,
+ couch_key_tree:fold(FoldFun, UpdateSeq, Tree).
-to_doc_info_path(#full_doc_info{id=Id,rev_tree=Tree}) ->
+to_doc_info_path(#full_doc_info{id=Id,rev_tree=Tree,update_seq=FDISeq}) ->
RevInfosAndPath =
[{#rev_info{deleted=Del,body_sp=Bp,seq=Seq,rev={Pos,RevId}}, Path} ||
{#leaf{deleted=Del, ptr=Bp, seq=Seq},{Pos, [RevId|_]}=Path} <-
@@ -320,7 +328,7 @@ to_doc_info_path(#full_doc_info{id=Id,rev_tree=Tree}) ->
end, RevInfosAndPath),
[{_RevInfo, WinPath}|_] = SortedRevInfosAndPath,
RevInfos = [RevInfo || {RevInfo, _Path} <- SortedRevInfosAndPath],
- {#doc_info{id=Id, high_seq=max_seq(RevInfos, 0), revs=RevInfos}, WinPath}.
+ {#doc_info{id=Id, high_seq=max_seq(Tree, FDISeq), revs=RevInfos}, WinPath}.
diff --git a/apps/couch/src/couch_httpd.erl b/apps/couch/src/couch_httpd.erl
index 8fb2687c..602bdf2b 100644
--- a/apps/couch/src/couch_httpd.erl
+++ b/apps/couch/src/couch_httpd.erl
@@ -469,16 +469,24 @@ body_length(Req) ->
Unknown -> {unknown_transfer_encoding, Unknown}
end.
-body(#httpd{mochi_req=MochiReq, req_body=ReqBody}) ->
- case ReqBody of
+body(#httpd{mochi_req=MochiReq, req_body=undefined} = Req) ->
+ case body_length(Req) of
undefined ->
- % Maximum size of document PUT request body (4GB)
MaxSize = list_to_integer(
couch_config:get("couchdb", "max_document_size", "4294967296")),
MochiReq:recv_body(MaxSize);
- _Else ->
- ReqBody
- end.
+ chunked ->
+ ChunkFun = fun({0, _Footers}, Acc) ->
+ lists:reverse(Acc);
+ ({_Len, Chunk}, Acc) ->
+ [Chunk | Acc]
+ end,
+ recv_chunked(Req, 8192, ChunkFun, []);
+ Len ->
+ MochiReq:recv_body(Len)
+ end;
+body(#httpd{req_body=ReqBody}) ->
+ ReqBody.
json_body(Httpd) ->
?JSON_DECODE(body(Httpd)).
@@ -619,25 +627,25 @@ send_json(Req, Code, Value) ->
send_json(Req, Code, [], Value).
send_json(Req, Code, Headers, Value) ->
+ initialize_jsonp(Req),
DefaultHeaders = [
{"Content-Type", negotiate_content_type(Req)},
{"Cache-Control", "must-revalidate"}
],
- Body = [start_jsonp(Req), ?JSON_ENCODE(Value), end_jsonp(), $\n],
+ Body = [start_jsonp(), ?JSON_ENCODE(Value), end_jsonp(), $\n],
send_response(Req, Code, DefaultHeaders ++ Headers, Body).
start_json_response(Req, Code) ->
start_json_response(Req, Code, []).
start_json_response(Req, Code, Headers) ->
+ initialize_jsonp(Req),
DefaultHeaders = [
{"Content-Type", negotiate_content_type(Req)},
{"Cache-Control", "must-revalidate"}
],
- start_jsonp(Req), % Validate before starting chunked.
- %start_chunked_response(Req, Code, DefaultHeaders ++ Headers).
{ok, Resp} = start_chunked_response(Req, Code, DefaultHeaders ++ Headers),
- case start_jsonp(Req) of
+ case start_jsonp() of
[] -> ok;
Start -> send_chunk(Resp, Start)
end,
@@ -647,7 +655,7 @@ end_json_response(Resp) ->
send_chunk(Resp, end_jsonp() ++ [$\n]),
last_chunk(Resp).
-start_jsonp(Req) ->
+initialize_jsonp(Req) ->
case get(jsonp) of
undefined -> put(jsonp, qs_value(Req, "callback", no_jsonp));
_ -> ok
@@ -660,14 +668,9 @@ start_jsonp(Req) ->
% make sure jsonp is configured on (default off)
case couch_config:get("httpd", "allow_jsonp", "false") of
"true" ->
- validate_callback(CallBack),
- CallBack ++ "(";
+ validate_callback(CallBack);
_Else ->
- % this could throw an error message, but instead we just ignore the
- % jsonp parameter
- % throw({bad_request, <<"JSONP must be configured before using.">>})
- put(jsonp, no_jsonp),
- []
+ put(jsonp, no_jsonp)
end
catch
Error ->
@@ -676,6 +679,13 @@ start_jsonp(Req) ->
end
end.
+start_jsonp() ->
+ case get(jsonp) of
+ no_jsonp -> [];
+ [] -> [];
+ CallBack -> CallBack ++ "("
+ end.
+
end_jsonp() ->
Resp = case get(jsonp) of
no_jsonp -> [];
@@ -836,7 +846,14 @@ send_redirect(Req, Path) ->
Headers = [{"Location", couch_httpd:absolute_uri(Req, Path)}],
send_response(Req, 301, Headers, <<>>).
-negotiate_content_type(#httpd{mochi_req=MochiReq}) ->
+negotiate_content_type(Req) ->
+ case get(jsonp) of
+ no_jsonp -> negotiate_content_type1(Req);
+ [] -> negotiate_content_type1(Req);
+ _Callback -> "text/javascript"
+ end.
+
+negotiate_content_type1(#httpd{mochi_req=MochiReq}) ->
%% Determine the appropriate Content-Type header for a JSON response
%% depending on the Accept header in the request. A request that explicitly
%% lists the correct JSON MIME type will get that type, otherwise the
diff --git a/apps/couch/src/couch_httpd_db.erl b/apps/couch/src/couch_httpd_db.erl
index 71204598..0bf97e26 100644
--- a/apps/couch/src/couch_httpd_db.erl
+++ b/apps/couch/src/couch_httpd_db.erl
@@ -128,7 +128,7 @@ handle_changes_req1(Req, Db) ->
handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, Db) ->
ok = couch_db:check_is_admin(Db),
couch_httpd:validate_ctype(Req, "application/json"),
- ok = couch_view_compactor:start_compact(DbName, Id),
+ {ok, _} = couch_view_compactor:start_compact(DbName, Id),
send_json(Req, 202, {[{ok, true}]});
handle_compact_req(#httpd{method='POST'}=Req, Db) ->
@@ -477,6 +477,7 @@ db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) ->
db_attachment_req(Req, Db, DocId, FileNameParts).
all_docs_view(Req, Db, Keys) ->
+ RawCollator = fun(A, B) -> A < B end,
#view_query_args{
start_key = StartKey,
start_docid = StartDocId,
@@ -486,7 +487,8 @@ all_docs_view(Req, Db, Keys) ->
skip = SkipCount,
direction = Dir,
inclusive_end = Inclusive
- } = QueryArgs = couch_httpd_view:parse_view_params(Req, Keys, map),
+ } = QueryArgs
+ = couch_httpd_view:parse_view_params(Req, Keys, map, RawCollator),
{ok, Info} = couch_db:get_db_info(Db),
CurrentEtag = couch_httpd:make_etag(Info),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
diff --git a/apps/couch/src/couch_httpd_show.erl b/apps/couch/src/couch_httpd_show.erl
index 59f74e1c..58f046e4 100644
--- a/apps/couch/src/couch_httpd_show.erl
+++ b/apps/couch/src/couch_httpd_show.erl
@@ -106,13 +106,15 @@ get_fun_key(DDoc, Type, Name) ->
% send_method_not_allowed(Req, "POST,PUT,DELETE,ETC");
handle_doc_update_req(#httpd{
- path_parts=[_, _, _, _, UpdateName, DocId]
+ path_parts=[_, _, _, _, UpdateName, DocId|Rest]
}=Req, Db, DDoc) ->
- Doc = try couch_httpd_db:couch_doc_open(Db, DocId, nil, [conflicts])
+ DocParts = [DocId|Rest],
+ DocId1 = ?l2b(string:join([?b2l(P)|| P <- DocParts], "/")),
+ Doc = try couch_httpd_db:couch_doc_open(Db, DocId1, nil, [conflicts])
catch
_ -> nil
end,
- send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId);
+ send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId1);
handle_doc_update_req(#httpd{
path_parts=[_, _, _, _, UpdateName]
@@ -125,7 +127,7 @@ handle_doc_update_req(Req, _Db, _DDoc) ->
send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) ->
JsonReq = couch_httpd_external:json_req_obj(Req, Db, DocId),
JsonDoc = couch_query_servers:json_doc(Doc),
- {Code, JsonResp1} = case couch_query_servers:ddoc_prompt(DDoc,
+ JsonResp1 = case couch_query_servers:ddoc_prompt(DDoc,
[<<"updates">>, UpdateName], [JsonDoc, JsonReq]) of
[<<"up">>, {NewJsonDoc}, {JsonResp}] ->
Options = case couch_httpd:header_value(Req, "X-Couch-Full-Commit",
@@ -138,16 +140,14 @@ send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) ->
NewDoc = couch_doc:from_json_obj({NewJsonDoc}),
{ok, NewRev} = couch_db:update_doc(Db, NewDoc, Options),
NewRevStr = couch_doc:rev_to_str(NewRev),
- JsonRespWithRev = {[{<<"headers">>,
- {[{<<"X-Couch-Update-NewRev">>, NewRevStr}]}} | JsonResp]},
- {201, JsonRespWithRev};
- [<<"up">>, _Other, JsonResp] ->
- {200, JsonResp}
+ {[{<<"code">>, 201}, {<<"headers">>,
+ {[{<<"X-Couch-Update-NewRev">>, NewRevStr}]}} | JsonResp]};
+ [<<"up">>, _Other, {JsonResp}] ->
+ {[{<<"code">>, 200} | JsonResp]}
end,
-
- JsonResp2 = couch_util:json_apply_field({<<"code">>, Code}, JsonResp1),
+
% todo set location field
- couch_httpd_external:send_external_response(Req, JsonResp2).
+ couch_httpd_external:send_external_response(Req, JsonResp1).
% view-list request with view and list from same design doc.
@@ -190,14 +190,14 @@ handle_view_list_req(Req, _Db, _DDoc) ->
handle_view_list(Req, Db, DDoc, LName, {ViewDesignName, ViewName}, Keys) ->
ViewDesignId = <<"_design/", ViewDesignName/binary>>,
{ViewType, View, Group, QueryArgs} = couch_httpd_view:load_view(Req, Db, {ViewDesignId, ViewName}, Keys),
- Etag = list_etag(Req, Db, Group, View, {couch_httpd:doc_etag(DDoc), Keys}),
+ Etag = list_etag(Req, Db, Group, View, QueryArgs, {couch_httpd:doc_etag(DDoc), Keys}),
couch_httpd:etag_respond(Req, Etag, fun() ->
output_list(ViewType, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group)
end).
-list_etag(#httpd{user_ctx=UserCtx}=Req, Db, Group, View, More) ->
+list_etag(#httpd{user_ctx=UserCtx}=Req, Db, Group, View, QueryArgs, More) ->
Accept = couch_httpd:header_value(Req, "Accept"),
- couch_httpd_view:view_etag(Db, Group, View, {More, Accept, UserCtx#user_ctx.roles}).
+ couch_httpd_view:view_etag(Db, Group, View, QueryArgs, {More, Accept, UserCtx#user_ctx.roles}).
output_list(map, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group);
diff --git a/apps/couch/src/couch_httpd_vhost.erl b/apps/couch/src/couch_httpd_vhost.erl
index 9bfb5951..03dd02ae 100644
--- a/apps/couch/src/couch_httpd_vhost.erl
+++ b/apps/couch/src/couch_httpd_vhost.erl
@@ -216,15 +216,19 @@ code_change(_OldVsn, State, _Extra) ->
{ok, State}.
+append_path("/"=_Target, "/"=_Path) ->
+ "/";
+append_path(Target, Path) ->
+ Target ++ Path.
% default redirect vhost handler
redirect_to_vhost(MochiReq, VhostTarget) ->
Path = MochiReq:get(raw_path),
- Target = VhostTarget ++ Path,
+ Target = append_path(VhostTarget, Path),
?LOG_DEBUG("Vhost Target: '~p'~n", [Target]),
-
+
Headers = mochiweb_headers:enter("x-couchdb-vhost-path", Path,
MochiReq:get(headers)),
@@ -356,8 +360,8 @@ split_host_port(HostAsString) ->
{split_host(HostAsString), '*'};
N ->
HostPart = string:substr(HostAsString, 1, N-1),
- case (catch erlang:list_to_integer(HostAsString, N+1,
- length(HostAsString))) of
+ case (catch erlang:list_to_integer(string:substr(HostAsString,
+ N+1, length(HostAsString)))) of
{'EXIT', _} ->
{split_host(HostAsString), '*'};
Port ->
diff --git a/apps/couch/src/couch_httpd_view.erl b/apps/couch/src/couch_httpd_view.erl
index b71fc2c6..082a5039 100644
--- a/apps/couch/src/couch_httpd_view.erl
+++ b/apps/couch/src/couch_httpd_view.erl
@@ -15,9 +15,9 @@
-export([handle_view_req/3,handle_temp_view_req/2]).
--export([parse_view_params/3]).
+-export([parse_view_params/4]).
-export([make_view_fold_fun/7, finish_view_fold/4, finish_view_fold/5, view_row_obj/4]).
--export([view_etag/3, view_etag/4, make_reduce_fold_funs/6]).
+-export([view_etag/5, make_reduce_fold_funs/6]).
-export([design_doc_view/5, parse_bool_param/1, doc_member/3]).
-export([make_key_options/1, load_view/4]).
@@ -34,18 +34,19 @@ design_doc_view(Req, Db, DName, ViewName, Keys) ->
Reduce = get_reduce_type(Req),
Result = case couch_view:get_map_view(Db, DesignId, ViewName, Stale) of
{ok, View, Group} ->
- QueryArgs = parse_view_params(Req, Keys, map),
+ QueryArgs = parse_view_params(Req, Keys, map, view_collator(View)),
output_map_view(Req, View, Group, Db, QueryArgs, Keys);
{not_found, Reason} ->
case couch_view:get_reduce_view(Db, DesignId, ViewName, Stale) of
{ok, ReduceView, Group} ->
+ Collator = view_collator(ReduceView),
case Reduce of
false ->
- QueryArgs = parse_view_params(Req, Keys, red_map),
+ QueryArgs = parse_view_params(Req, Keys, red_map, Collator),
MapView = couch_view:extract_map_view(ReduceView),
output_map_view(Req, MapView, Group, Db, QueryArgs, Keys);
_ ->
- QueryArgs = parse_view_params(Req, Keys, reduce),
+ QueryArgs = parse_view_params(Req, Keys, reduce, Collator),
output_reduce_view(Req, Db, ReduceView, Group, QueryArgs, Keys)
end;
_ ->
@@ -90,19 +91,19 @@ handle_temp_view_req(#httpd{method='POST'}=Req, Db) ->
Reduce = get_reduce_type(Req),
case couch_util:get_value(<<"reduce">>, Props, null) of
null ->
- QueryArgs = parse_view_params(Req, Keys, map),
{ok, View, Group} = couch_view:get_temp_map_view(Db, Language,
DesignOptions, MapSrc),
+ QueryArgs = parse_view_params(Req, Keys, map, view_collator(View)),
output_map_view(Req, View, Group, Db, QueryArgs, Keys);
_ when Reduce =:= false ->
- QueryArgs = parse_view_params(Req, Keys, red_map),
{ok, View, Group} = couch_view:get_temp_map_view(Db, Language,
DesignOptions, MapSrc),
+ QueryArgs = parse_view_params(Req, Keys, red_map, view_collator(View)),
output_map_view(Req, View, Group, Db, QueryArgs, Keys);
RedSrc ->
- QueryArgs = parse_view_params(Req, Keys, reduce),
{ok, View, Group} = couch_view:get_temp_reduce_view(Db, Language,
DesignOptions, MapSrc, RedSrc),
+ QueryArgs = parse_view_params(Req, Keys, reduce, view_collator(View)),
output_reduce_view(Req, Db, View, Group, QueryArgs, Keys)
end;
@@ -114,7 +115,7 @@ output_map_view(Req, View, Group, Db, QueryArgs, nil) ->
limit = Limit,
skip = SkipCount
} = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View),
+ CurrentEtag = view_etag(Db, Group, View, QueryArgs),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
{ok, RowCount} = couch_view:get_row_count(View),
FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, Group#group.current_seq, RowCount, #view_fold_helper_funs{reduce_count=fun couch_view:reduce_to_count/1}),
@@ -130,7 +131,7 @@ output_map_view(Req, View, Group, Db, QueryArgs, Keys) ->
limit = Limit,
skip = SkipCount
} = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View, Keys),
+ CurrentEtag = view_etag(Db, Group, View, QueryArgs, Keys),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
{ok, RowCount} = couch_view:get_row_count(View),
FoldAccInit = {Limit, SkipCount, undefined, []},
@@ -155,7 +156,7 @@ output_reduce_view(Req, Db, View, Group, QueryArgs, nil) ->
skip = Skip,
group_level = GroupLevel
} = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View),
+ CurrentEtag = view_etag(Db, Group, View, QueryArgs),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
{ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel,
QueryArgs, CurrentEtag, Group#group.current_seq,
@@ -173,7 +174,7 @@ output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) ->
skip = Skip,
group_level = GroupLevel
} = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View, Keys),
+ CurrentEtag = view_etag(Db, Group, View, QueryArgs, Keys),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
{ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel,
QueryArgs, CurrentEtag, Group#group.current_seq,
@@ -209,18 +210,19 @@ load_view(Req, Db, {ViewDesignId, ViewName}, Keys) ->
Reduce = get_reduce_type(Req),
case couch_view:get_map_view(Db, ViewDesignId, ViewName, Stale) of
{ok, View, Group} ->
- QueryArgs = parse_view_params(Req, Keys, map),
+ QueryArgs = parse_view_params(Req, Keys, map, view_collator(View)),
{map, View, Group, QueryArgs};
{not_found, _Reason} ->
case couch_view:get_reduce_view(Db, ViewDesignId, ViewName, Stale) of
{ok, ReduceView, Group} ->
+ Collator = view_collator(ReduceView),
case Reduce of
false ->
- QueryArgs = parse_view_params(Req, Keys, map_red),
+ QueryArgs = parse_view_params(Req, Keys, map_red, Collator),
MapView = couch_view:extract_map_view(ReduceView),
{map, MapView, Group, QueryArgs};
_ ->
- QueryArgs = parse_view_params(Req, Keys, reduce),
+ QueryArgs = parse_view_params(Req, Keys, reduce, Collator),
{reduce, ReduceView, Group, QueryArgs}
end;
{not_found, Reason} ->
@@ -228,12 +230,30 @@ load_view(Req, Db, {ViewDesignId, ViewName}, Keys) ->
end
end.
+view_collator({reduce, _N, _Lang, View}) ->
+ view_collator(View);
+
+view_collator({temp_reduce, View}) ->
+ view_collator(View);
+
+view_collator(#view{btree=Btree}) ->
+ % Return an "is-less-than" predicate by calling into the btree's
+ % collator. For raw collation, couch_btree compares arbitrary
+ % Erlang terms, but for normal (ICU) collation, it expects
+ % {Json, Id} tuples.
+ fun
+ ({_JsonA, _IdA}=A, {_JsonB, _IdB}=B) ->
+ couch_btree:less(Btree, A, B);
+ (JsonA, JsonB) ->
+ couch_btree:less(Btree, {JsonA, null}, {JsonB, null})
+ end.
+
% query_parse_error could be removed
% we wouldn't need to pass the view type, it'd just parse params.
% I'm not sure what to do about the error handling, but
% it might simplify things to have a parse_view_params function
% that doesn't throw().
-parse_view_params(Req, Keys, ViewType) ->
+parse_view_params(Req, Keys, ViewType, LessThan) ->
QueryList = couch_httpd:qs(Req),
QueryParams =
lists:foldl(fun({K, V}, Acc) ->
@@ -247,7 +267,7 @@ parse_view_params(Req, Keys, ViewType) ->
QueryArgs = lists:foldl(fun({K, V}, Args2) ->
validate_view_query(K, V, Args2)
end, Args, lists:reverse(QueryParams)), % Reverse to match QS order.
- warn_on_empty_key_range(QueryArgs),
+ warn_on_empty_key_range(QueryArgs, LessThan),
GroupLevel = QueryArgs#view_query_args.group_level,
case {ViewType, GroupLevel, IsMultiGet} of
{reduce, exact, true} ->
@@ -328,15 +348,15 @@ parse_view_param("callback", _) ->
parse_view_param(Key, Value) ->
[{extra, {Key, Value}}].
-warn_on_empty_key_range(#view_query_args{start_key=undefined}) ->
+warn_on_empty_key_range(#view_query_args{start_key=undefined}, _Lt) ->
ok;
-warn_on_empty_key_range(#view_query_args{end_key=undefined}) ->
+warn_on_empty_key_range(#view_query_args{end_key=undefined}, _Lt) ->
ok;
-warn_on_empty_key_range(#view_query_args{start_key=A, end_key=A}) ->
+warn_on_empty_key_range(#view_query_args{start_key=A, end_key=A}, _Lt) ->
ok;
warn_on_empty_key_range(#view_query_args{
- start_key=StartKey, end_key=EndKey, direction=Dir}) ->
- case {Dir, couch_view:less_json(StartKey, EndKey)} of
+ start_key=StartKey, end_key=EndKey, direction=Dir}, LessThan) ->
+ case {Dir, LessThan(StartKey, EndKey)} of
{fwd, false} ->
throw({query_parse_error,
<<"No rows can match your key range, reverse your ",
@@ -640,14 +660,16 @@ send_json_reduce_row(Resp, {Key, Value}, RowFront) ->
send_chunk(Resp, RowFront ++ ?JSON_ENCODE({[{key, Key}, {value, Value}]})),
{ok, ",\r\n"}.
-view_etag(Db, Group, View) ->
- view_etag(Db, Group, View, nil).
+view_etag(Db, Group, View, QueryArgs) ->
+ view_etag(Db, Group, View, QueryArgs, nil).
-view_etag(Db, Group, {reduce, _, _, View}, Extra) ->
- view_etag(Db, Group, View, Extra);
-view_etag(Db, Group, {temp_reduce, View}, Extra) ->
- view_etag(Db, Group, View, Extra);
-view_etag(_Db, #group{sig=Sig}, #view{update_seq=UpdateSeq, purge_seq=PurgeSeq}, Extra) ->
+view_etag(Db, Group, {reduce, _, _, View}, QueryArgs, Extra) ->
+ view_etag(Db, Group, View, QueryArgs, Extra);
+view_etag(Db, Group, {temp_reduce, View}, QueryArgs, Extra) ->
+ view_etag(Db, Group, View, QueryArgs, Extra);
+view_etag(_Db, #group{sig=Sig, current_seq=CurrentSeq}, _View, #view_query_args{include_docs=true}, Extra) ->
+ couch_httpd:make_etag({Sig, CurrentSeq, Extra});
+view_etag(_Db, #group{sig=Sig}, #view{update_seq=UpdateSeq, purge_seq=PurgeSeq}, _QueryArgs, Extra) ->
couch_httpd:make_etag({Sig, UpdateSeq, PurgeSeq, Extra}).
% the view row has an error
diff --git a/apps/couch/src/couch_key_tree.erl b/apps/couch/src/couch_key_tree.erl
index 2f3c6abf..5e24e0f7 100644
--- a/apps/couch/src/couch_key_tree.erl
+++ b/apps/couch/src/couch_key_tree.erl
@@ -50,7 +50,7 @@
-export([merge/3, find_missing/2, get_key_leafs/2,
get_full_key_paths/2, get/2, compute_data_size/1]).
-export([map/2, get_all_leafs/1, count_leafs/1, remove_leafs/2,
- get_all_leafs_full/1,stem/2,map_leafs/2]).
+ get_all_leafs_full/1,stem/2,map_leafs/2, fold/3]).
-include("couch_db.hrl").
@@ -373,6 +373,21 @@ tree_fold_simple(Fun, Pos, [{Key, Value, SubTree} | RestTree], Acc) ->
Acc2
end.
+fold(_Fun, Acc, []) ->
+ Acc;
+fold(Fun, Acc0, [{Pos, Tree}|Rest]) ->
+ Acc1 = fold_simple(Fun, Acc0, Pos, [Tree]),
+ fold(Fun, Acc1, Rest).
+
+fold_simple(_Fun, Acc, _Pos, []) ->
+ Acc;
+fold_simple(Fun, Acc0, Pos, [{Key, Value, SubTree} | RestTree]) ->
+ Type = if SubTree == [] -> leaf; true -> branch end,
+ Acc1 = Fun({Pos, Key}, Value, Type, Acc0),
+ Acc2 = fold_simple(Fun, Acc1, Pos+1, SubTree),
+ fold_simple(Fun, Acc2, Pos, RestTree).
+
+
map(_Fun, []) ->
[];
map(Fun, [{Pos, Tree}|Rest]) ->
diff --git a/apps/couch/src/couch_log.erl b/apps/couch/src/couch_log.erl
index 9bac7450..362d092d 100644
--- a/apps/couch/src/couch_log.erl
+++ b/apps/couch/src/couch_log.erl
@@ -25,22 +25,12 @@
-define(LEVEL_TMI, 0).
debug(Format, Args) ->
- case debug_on() of
- false ->
- ok;
- true ->
- {ConsoleMsg, FileMsg} = get_log_messages(self(), debug, Format, Args),
- gen_event:sync_notify(error_logger, {couch_debug, ConsoleMsg, FileMsg})
- end.
+ {ConsoleMsg, FileMsg} = get_log_messages(self(), debug, Format, Args),
+ gen_event:sync_notify(error_logger, {couch_debug, ConsoleMsg, FileMsg}).
info(Format, Args) ->
- case info_on() of
- false ->
- ok;
- true ->
- {ConsoleMsg, FileMsg} = get_log_messages(self(), info, Format, Args),
- gen_event:sync_notify(error_logger, {couch_info, ConsoleMsg, FileMsg})
- end.
+ {ConsoleMsg, FileMsg} = get_log_messages(self(), info, Format, Args),
+ gen_event:sync_notify(error_logger, {couch_info, ConsoleMsg, FileMsg}).
error(Format, Args) ->
{ConsoleMsg, FileMsg} = get_log_messages(self(), error, Format, Args),
@@ -180,6 +170,15 @@ get_log_messages(Pid, Level, Format, Args) ->
read(Bytes, Offset) ->
LogFileName = couch_config:get("log", "file"),
LogFileSize = filelib:file_size(LogFileName),
+ MaxChunkSize = list_to_integer(
+ couch_config:get("httpd", "log_max_chunk_size", "1000000")),
+ case Bytes > MaxChunkSize of
+ true ->
+ throw({bad_request, "'bytes' cannot exceed " ++
+ integer_to_list(MaxChunkSize)});
+ false ->
+ ok
+ end,
{ok, Fd} = file:open(LogFileName, [read]),
Start = lists:max([LogFileSize - Bytes, 0]) + Offset,
@@ -188,4 +187,5 @@ read(Bytes, Offset) ->
% TODO: make streaming
{ok, Chunk} = file:pread(Fd, Start, LogFileSize),
+ ok = file:close(Fd),
Chunk.
diff --git a/apps/couch/src/couch_os_process.erl b/apps/couch/src/couch_os_process.erl
index 7fe8aa89..2a6d92a7 100644
--- a/apps/couch/src/couch_os_process.erl
+++ b/apps/couch/src/couch_os_process.erl
@@ -173,10 +173,7 @@ handle_info({Port, {exit_status, 0}}, #os_proc{port=Port}=OsProc) ->
{stop, normal, OsProc};
handle_info({Port, {exit_status, Status}}, #os_proc{port=Port}=OsProc) ->
?LOG_ERROR("OS Process died with status: ~p", [Status]),
- {stop, {exit_status, Status}, OsProc};
-handle_info(Msg, OsProc) ->
- ?LOG_DEBUG("OS Proc: Unknown info: ~p", [Msg]),
- {noreply, OsProc}.
+ {stop, {exit_status, Status}, OsProc}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
diff --git a/apps/couch/src/couch_query_servers.erl b/apps/couch/src/couch_query_servers.erl
index be7c465b..9714a0ca 100644
--- a/apps/couch/src/couch_query_servers.erl
+++ b/apps/couch/src/couch_query_servers.erl
@@ -17,9 +17,9 @@
-export([filter_docs/5]).
-export([with_ddoc_proc/2, proc_prompt/2, ddoc_prompt/3, ddoc_proc_prompt/3, json_doc/1]).
--export([get_os_process/1, ret_os_process/1]).
-% -export([test/0]).
+% For 210-os-proc-pool.t
+-export([get_os_process/1, ret_os_process/1]).
-include("couch_db.hrl").
diff --git a/apps/couch/src/couch_rep.erl b/apps/couch/src/couch_rep.erl
index 2d011aab..46bcb282 100644
--- a/apps/couch/src/couch_rep.erl
+++ b/apps/couch/src/couch_rep.erl
@@ -20,6 +20,7 @@
-export([start_replication/4, end_replication/1, get_result/4]).
-include("couch_db.hrl").
+-include("couch_js_functions.hrl").
-include_lib("ibrowse/include/ibrowse.hrl").
-define(REP_ID_VERSION, 2).
@@ -110,20 +111,20 @@ checkpoint(Server) ->
gen_server:cast(Server, do_checkpoint).
get_result(Server, {BaseId, _Extension}, {Props} = PostBody, UserCtx) ->
- case couch_util:get_value(<<"continuous">>, Props, false) of
- true ->
- {ok, {continuous, ?l2b(BaseId)}};
- false ->
- try gen_server:call(Server, get_result, infinity) of
- retry -> replicate(PostBody, UserCtx);
- Else -> Else
- catch
- exit:{noproc, {gen_server, call, [Server, get_result , infinity]}} ->
- %% oops, this replication just finished -- restart it.
- replicate(PostBody, UserCtx);
- exit:{normal, {gen_server, call, [Server, get_result , infinity]}} ->
- %% we made the call during terminate
- replicate(PostBody, UserCtx)
+ case couch_util:get_value(<<"continuous">>, Props, false) of
+ true ->
+ {ok, {continuous, ?l2b(BaseId)}};
+ false ->
+ try gen_server:call(Server, get_result, infinity) of
+ retry -> replicate(PostBody, UserCtx);
+ Else -> Else
+ catch
+ exit:{noproc, {gen_server, call, [Server, get_result, infinity]}} ->
+ %% oops, this replication just finished -- restart it.
+ replicate(PostBody, UserCtx);
+ exit:{normal, {gen_server, call, [Server, get_result, infinity]}} ->
+ %% we made the call during terminate
+ replicate(PostBody, UserCtx)
end
end.
@@ -154,13 +155,13 @@ do_init([{BaseId, _Ext} = RepId, {PostProps}, UserCtx, Module] = InitArgs) ->
[SourceLog, TargetLog] = find_replication_logs(
[Source, Target], BaseId, {PostProps}, UserCtx),
- {StartSeq, History} = compare_replication_logs(SourceLog, TargetLog),
+ {StartSeq, History} = compare_replication_logs(SourceLog, TargetLog),
- {ok, ChangesFeed} =
+ {ok, ChangesFeed} =
couch_rep_changes_feed:start_link(self(), Source, StartSeq, PostProps),
- {ok, MissingRevs} =
+ {ok, MissingRevs} =
couch_rep_missing_revs:start_link(self(), Target, ChangesFeed, PostProps),
- {ok, Reader} =
+ {ok, Reader} =
couch_rep_reader:start_link(self(), Source, MissingRevs, PostProps),
{ok, Writer} =
couch_rep_writer:start_link(self(), Target, Reader, PostProps),
@@ -545,7 +546,7 @@ filter_code(Filter, Props, UserCtx) ->
DocErrorMsg = io_lib:format(
"Couldn't open document `_design/~s` from source "
"database `~s`: ~s",
- [dbname(Source), DDocName, couch_util:to_binary(DocError)]),
+ [DDocName, dbname(Source), couch_util:to_binary(DocError)]),
throw({error, iolist_to_binary(DocErrorMsg)})
end,
Code = couch_util:get_nested_json_value(
@@ -649,18 +650,18 @@ open_db(<<"https://",_/binary>>=Url, _, ProxyParams, CreateTarget) ->
open_db({[{<<"url">>,Url}]}, [], ProxyParams, CreateTarget);
open_db(<<DbName/binary>>, UserCtx, _ProxyParams, CreateTarget) ->
try
- case CreateTarget of
- true ->
- ok = couch_httpd:verify_is_server_admin(UserCtx),
- couch_server:create(DbName, [{user_ctx, UserCtx}]);
+ case CreateTarget of
+ true ->
+ ok = couch_httpd:verify_is_server_admin(UserCtx),
+ couch_server:create(DbName, [{user_ctx, UserCtx}]);
false ->
ok
- end,
+ end,
- case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
- {ok, Db} ->
- couch_db:monitor(Db),
- Db;
+ case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
+ {ok, Db} ->
+ couch_db:monitor(Db),
+ Db;
{not_found, no_db_file} ->
throw({db_not_found, DbName})
end
diff --git a/apps/couch/src/couch_rep_changes_feed.erl b/apps/couch/src/couch_rep_changes_feed.erl
index 36fe82aa..7a9573d6 100644
--- a/apps/couch/src/couch_rep_changes_feed.erl
+++ b/apps/couch/src/couch_rep_changes_feed.erl
@@ -154,7 +154,7 @@ init([Parent, #http_db{headers = Headers0} = Source, Since, PostProps]) ->
end;
{ibrowse_async_headers, ReqId, Code, _} ->
{stop, {changes_error_code, list_to_integer(Code)}}
- after 10000 ->
+ after 30000 ->
{stop, changes_timeout}
end;
@@ -491,13 +491,30 @@ purge_req_messages(ReqId) ->
ok
end.
-queue_changes_row(Row, #state{doc_ids = nil, count = Count, rows = Rows}) ->
- {queue:in(Row, Rows), Count + 1};
+queue_changes_row(Row, #state{doc_ids = nil} = State) ->
+ maybe_queue_row(Row, State);
queue_changes_row({RowProps} = Row,
- #state{doc_ids = Ids, count = Count, rows = Rows}) ->
+ #state{doc_ids = Ids, count = Count, rows = Rows} = State) ->
case lists:member(get_value(<<"id">>, RowProps), Ids) of
true ->
- {queue:in(Row, Rows), Count + 1};
+ maybe_queue_row(Row, State);
false ->
{Rows, Count}
end.
+
+maybe_queue_row({Props} = Row, #state{count = Count, rows = Rows} = State) ->
+ case get_value(<<"id">>, Props) of
+ <<>> ->
+ [_, Db | _] = State#state.init_args,
+ ?LOG_ERROR("Replicator: ignoring document with empty ID in source "
+ "database `~s` (_changes sequence ~p)",
+ [dbname(Db), couch_util:get_value(<<"seq">>, Props)]),
+ {Rows, Count};
+ _ ->
+ {queue:in(Row, Rows), Count + 1}
+ end.
+
+dbname(#http_db{url = Url}) ->
+ couch_util:url_strip_password(Url);
+dbname(#db{name = Name}) ->
+ Name.
diff --git a/apps/couch/src/couch_rep_reader.erl b/apps/couch/src/couch_rep_reader.erl
index 0d344e5c..1e8ca074 100644
--- a/apps/couch/src/couch_rep_reader.erl
+++ b/apps/couch/src/couch_rep_reader.erl
@@ -244,7 +244,7 @@ reader_loop(ReaderServer, Parent, Source, MissingRevsServer) ->
case couch_rep_missing_revs:next(MissingRevsServer) of
complete ->
exit(complete);
- {HighSeq, IdsRevs} ->
+ {_HighSeq, IdsRevs} ->
% to be safe, make sure Results are sorted by source_seq
SortedIdsRevs = lists:keysort(2, IdsRevs),
RequestSeqs = [S || {_,S,_} <- SortedIdsRevs],
@@ -255,8 +255,8 @@ reader_loop(ReaderServer, Parent, Source, MissingRevsServer) ->
infinity) || {Id,Seq,Revs} <- SortedIdsRevs],
reader_loop(ReaderServer, Parent, Source, MissingRevsServer);
_Local ->
- {ok, Source1} = gen_server:call(Parent, get_source_db, infinity),
- Source2 = maybe_reopen_db(Source1, HighSeq),
+ {ok, Source2} = couch_db:open(
+ Source#db.name, [{user_ctx, Source#db.user_ctx}]),
lists:foreach(fun({Id,Seq,Revs}) ->
{ok, Docs} = couch_db:open_doc_revs(Source2, Id, Revs, [latest]),
JustTheDocs = [Doc || {ok, Doc} <- Docs],
@@ -268,12 +268,6 @@ reader_loop(ReaderServer, Parent, Source, MissingRevsServer) ->
end
end.
-maybe_reopen_db(#db{update_seq=OldSeq} = Db, HighSeq) when HighSeq > OldSeq ->
- {ok, NewDb} = couch_db:open(Db#db.name, [{user_ctx, Db#db.user_ctx}]),
- NewDb;
-maybe_reopen_db(Db, _HighSeq) ->
- Db.
-
spawn_document_request(Source, Id, Seq, Revs) ->
Server = self(),
SpawnFun = fun() ->
diff --git a/apps/couch/src/couch_rep_writer.erl b/apps/couch/src/couch_rep_writer.erl
index 2b722e8e..40323925 100644
--- a/apps/couch/src/couch_rep_writer.erl
+++ b/apps/couch/src/couch_rep_writer.erl
@@ -26,7 +26,8 @@ writer_loop(Parent, Reader) ->
ok;
{HighSeq, Docs} ->
DocCount = length(Docs),
- {ok, Target} = gen_server:call(Parent, get_target_db, infinity),
+ {ok, Target0} = gen_server:call(Parent, get_target_db, infinity),
+ Target = open_db(Target0),
try write_docs(Target, Docs) of
{ok, []} ->
Parent ! {update_stats, docs_written, DocCount};
@@ -38,6 +39,8 @@ writer_loop(Parent, Reader) ->
{attachment_request_failed, Err} ->
?LOG_DEBUG("writer failed to write an attachment ~p", [Err]),
exit({attachment_request_failed, Err, Docs})
+ after
+ close_db(Target)
end,
Parent ! {writer_checkpoint, HighSeq},
couch_rep_att:cleanup(),
@@ -163,3 +166,14 @@ write_docs_1({Props}) ->
ErrId = couch_util:to_existing_atom(couch_util:get_value(<<"error">>, Props)),
Reason = couch_util:get_value(<<"reason">>, Props),
{{Id, Rev}, {ErrId, Reason}}.
+
+open_db(#db{name = Name, user_ctx = UserCtx}) ->
+ {ok, Db} = couch_db:open(Name, [{user_ctx, UserCtx}]),
+ Db;
+open_db(HttpDb) ->
+ HttpDb.
+
+close_db(#db{} = Db) ->
+ couch_db:close(Db);
+close_db(_HttpDb) ->
+ ok.
diff --git a/apps/couch/src/couch_replication_manager.erl b/apps/couch/src/couch_replication_manager.erl
index 3f7cc27c..3715cea1 100644
--- a/apps/couch/src/couch_replication_manager.erl
+++ b/apps/couch/src/couch_replication_manager.erl
@@ -317,7 +317,7 @@ process_update(State, {Change}) ->
<<"error">> ->
case ets:lookup(?DOC_TO_REP, DocId) of
[] ->
- maybe_start_replication(State, DocId, JsonRepDoc);
+ maybe_start_replication(State, DocId, JsonRepDoc);
_ ->
State
end
diff --git a/apps/couch/src/couch_view.erl b/apps/couch/src/couch_view.erl
index 05174245..8d479d7e 100644
--- a/apps/couch/src/couch_view.erl
+++ b/apps/couch/src/couch_view.erl
@@ -260,8 +260,16 @@ fold_fun(Fun, [KV|Rest], {KVReds, Reds}, Acc) ->
fold(#view{btree=Btree}, Fun, Acc, Options) ->
WrapperFun =
- fun(KV, Reds, Acc2) ->
- fold_fun(Fun, expand_dups([KV],[]), Reds, Acc2)
+ fun(visit, KV, Reds, Acc2) ->
+ fold_fun(Fun, expand_dups([KV],[]), Reds, Acc2);
+ (traverse, LK, Red, Acc2)
+ when is_function(Fun, 4) ->
+ Fun(traverse, LK, Red, Acc2);
+ (traverse, _LK, Red, {_, Skip, _, _} = Acc2)
+ when Skip >= element(1, Red) ->
+ {skip, setelement(2, Acc2, Skip - element(1, Red))};
+ (traverse, _, _, Acc2) ->
+ {ok, Acc2}
end,
{ok, _LastReduce, _AccResult} = couch_btree:fold(Btree, WrapperFun, Acc, Options).
diff --git a/apps/couch/src/couch_view_compactor.erl b/apps/couch/src/couch_view_compactor.erl
index 69aaff00..8ea1dca2 100644
--- a/apps/couch/src/couch_view_compactor.erl
+++ b/apps/couch/src/couch_view_compactor.erl
@@ -20,14 +20,14 @@
%% @doc Compacts the views. GroupId must not include the _design/ prefix
start_compact(DbName, GroupId) ->
Pid = couch_view:get_group_server(DbName, <<"_design/",GroupId/binary>>),
- gen_server:cast(Pid, {start_compact, fun compact_group/2}).
+ gen_server:call(Pid, {start_compact, fun compact_group/3}).
%%=============================================================================
%% internal functions
%%=============================================================================
%% @spec compact_group(Group, NewGroup) -> ok
-compact_group(Group, EmptyGroup) ->
+compact_group(Group, EmptyGroup, DbName) ->
#group{
current_seq = Seq,
id_btree = IdBtree,
@@ -36,10 +36,8 @@ compact_group(Group, EmptyGroup) ->
} = Group,
#group{
- dbname = DbName,
fd = Fd,
id_btree = EmptyIdBtree,
- sig = Sig,
views = EmptyViews
} = EmptyGroup,
@@ -82,9 +80,26 @@ compact_group(Group, EmptyGroup) ->
views=NewViews,
current_seq=Seq
},
-
- Pid = ets:lookup_element(group_servers_by_sig, {DbName, Sig}, 2),
- gen_server:cast(Pid, {compact_done, NewGroup}).
+ maybe_retry_compact(Db, GroupId, NewGroup).
+
+maybe_retry_compact(#db{name = DbName} = Db, GroupId, NewGroup) ->
+ #group{sig = Sig, fd = NewFd} = NewGroup,
+ Header = {Sig, couch_view_group:get_index_header_data(NewGroup)},
+ ok = couch_file:write_header(NewFd, Header),
+ Pid = ets:lookup_element(group_servers_by_sig, {DbName, Sig}, 2),
+ case gen_server:call(Pid, {compact_done, NewGroup}) of
+ ok ->
+ couch_db:close(Db);
+ update ->
+ {ok, Db2} = couch_db:reopen(Db),
+ {_, Ref} = erlang:spawn_monitor(fun() ->
+ couch_view_updater:update(nil, NewGroup, Db2)
+ end),
+ receive
+ {'DOWN', Ref, _, _, {new_group, NewGroup2}} ->
+ maybe_retry_compact(Db2, GroupId, NewGroup2)
+ end
+ end.
%% @spec compact_view(View, EmptyView) -> CompactView
compact_view(View, EmptyView) ->
diff --git a/apps/couch/src/couch_view_group.erl b/apps/couch/src/couch_view_group.erl
index 11cb4c60..75644d6b 100644
--- a/apps/couch/src/couch_view_group.erl
+++ b/apps/couch/src/couch_view_group.erl
@@ -17,6 +17,9 @@
-export([start_link/1, request_group/2, trigger_group_update/2, request_group_info/1]).
-export([open_db_group/2, open_temp_group/5, design_doc_to_view_group/1,design_root/2]).
+%% Exports for the compactor
+-export([get_index_header_data/1]).
+
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
@@ -80,8 +83,7 @@ start_link(InitArgs) ->
init({{_, DbName, _} = InitArgs, ReturnPid, Ref}) ->
process_flag(trap_exit, true),
case prepare_group(InitArgs, false) of
- {ok, #group{fd=Fd, current_seq=Seq}=Group} ->
- {ok, Db} = couch_db:open(DbName, []),
+ {ok, Db, #group{fd=Fd, current_seq=Seq}=Group} ->
case Seq > couch_db:get_update_seq(Db) of
true ->
ReturnPid ! {Ref, self(), {error, invalid_view_seq}},
@@ -92,7 +94,7 @@ init({{_, DbName, _} = InitArgs, ReturnPid, Ref}) ->
{ok, #group_state{
db_name=DbName,
init_args=InitArgs,
- group=Group#group{dbname=DbName},
+ group=Group,
ref_counter=erlang:monitor(process,Fd)}}
end;
Error ->
@@ -118,16 +120,16 @@ init({{_, DbName, _} = InitArgs, ReturnPid, Ref}) ->
handle_call({request_group, RequestSeq}, From,
#group_state{
+ db_name=DbName,
group=#group{current_seq=Seq}=Group,
updater_pid=nil,
waiting_list=WaitList
}=State) when RequestSeq > Seq ->
Owner = self(),
- Pid = spawn_link(fun()-> couch_view_updater:update(Owner, Group) end),
+ Pid = spawn_link(fun()-> couch_view_updater:update(Owner, Group, DbName) end),
{noreply, State#group_state{
updater_pid=Pid,
- group=Group,
waiting_list=[{From,RequestSeq}|WaitList]
}, infinity};
@@ -148,40 +150,28 @@ handle_call({request_group, RequestSeq}, From,
waiting_list=[{From, RequestSeq}|WaitList]
}, infinity};
-handle_call({start_compact, CompactFun}, _From, State) ->
- {noreply, NewState} = handle_cast({start_compact, CompactFun}, State),
- {reply, {ok, NewState#group_state.compactor_pid}, NewState};
-
handle_call(request_group_info, _From, State) ->
GroupInfo = get_group_info(State),
- {reply, {ok, GroupInfo}, State}.
+ {reply, {ok, GroupInfo}, State};
-handle_cast({update_group, RequestSeq},
- #group_state{
- group=#group{current_seq=Seq}=Group,
- updater_pid=nil}=State) when RequestSeq > Seq ->
- Owner = self(),
- Pid = spawn_link(fun()-> couch_view_updater:update(Owner, Group) end),
- {noreply, State#group_state{updater_pid=Pid}};
-handle_cast({update_group, _RequestSeq}, State) ->
- {noreply, State};
-
-handle_cast({start_compact, CompactFun}, #group_state{compactor_pid=nil}
+handle_call({start_compact, CompactFun}, _From, #group_state{compactor_pid=nil}
= State) ->
#group_state{
- group = #group{dbname = DbName, name = GroupId, sig = GroupSig} = Group,
- init_args = {RootDir, _, _}
+ group = #group{name = GroupId, sig = GroupSig} = Group,
+ init_args = {RootDir, DbName, _}
} = State,
?LOG_INFO("View index compaction starting for ~s ~s", [DbName, GroupId]),
+ {ok, Db} = couch_db:open_int(DbName, []),
{ok, Fd} = open_index_file(compact, RootDir, DbName, GroupSig),
- NewGroup = reset_file(Fd, DbName, Group),
- Pid = spawn_link(fun() -> CompactFun(Group, NewGroup) end),
- {noreply, State#group_state{compactor_pid = Pid}};
-handle_cast({start_compact, _}, State) ->
+ NewGroup = reset_file(Db, Fd, DbName, Group),
+ couch_db:close(Db),
+ Pid = spawn_link(fun() -> CompactFun(Group, NewGroup, DbName) end),
+ {reply, {ok, Pid}, State#group_state{compactor_pid = Pid}};
+handle_call({start_compact, _}, _From, #group_state{compactor_pid=Pid} = State) ->
%% compact already running, this is a no-op
- {noreply, State};
+ {reply, {ok, Pid}, State};
-handle_cast({compact_done, #group{fd=NewFd, current_seq=NewSeq} = NewGroup},
+handle_call({compact_done, #group{fd=NewFd, current_seq=NewSeq} = NewGroup}, _From,
#group_state{group = #group{current_seq=OldSeq}} = State)
when NewSeq >= OldSeq ->
#group_state{
@@ -204,7 +194,7 @@ handle_cast({compact_done, #group{fd=NewFd, current_seq=NewSeq} = NewGroup},
unlink(UpdaterPid),
exit(UpdaterPid, view_compaction_complete),
Owner = self(),
- spawn_link(fun()-> couch_view_updater:update(Owner, NewGroup) end);
+ spawn_link(fun()-> couch_view_updater:update(Owner, NewGroup, DbName) end);
true ->
nil
end,
@@ -216,30 +206,30 @@ handle_cast({compact_done, #group{fd=NewFd, current_seq=NewSeq} = NewGroup},
erlang:demonitor(RefCounter),
self() ! delayed_commit,
- {noreply, State#group_state{
+ {reply, ok, State#group_state{
group=NewGroup,
ref_counter=erlang:monitor(process,NewFd),
compactor_pid=nil,
updater_pid=NewUpdaterPid
}};
-handle_cast({compact_done, NewGroup}, State) ->
+handle_call({compact_done, NewGroup}, _From, State) ->
#group_state{
group = #group{name = GroupId, current_seq = CurrentSeq},
init_args={_RootDir, DbName, _}
} = State,
?LOG_INFO("View index compaction still behind for ~s ~s -- current: ~p " ++
"compact: ~p", [DbName, GroupId, CurrentSeq, NewGroup#group.current_seq]),
- GroupServer = self(),
- Pid = spawn_link(fun() ->
- erlang:monitor(process, NewGroup#group.fd),
- {_,Ref} = erlang:spawn_monitor(fun() ->
- couch_view_updater:update(nil, NewGroup)
- end),
- receive {'DOWN', Ref, _, _, {new_group, NewGroup2}} ->
- gen_server:cast(GroupServer, {compact_done, NewGroup2})
- end
- end),
- {noreply, State#group_state{compactor_pid = Pid}};
+ {reply, update, State}.
+
+handle_cast({update_group, RequestSeq},
+ #group_state{
+ group=#group{current_seq=Seq}=Group,
+ updater_pid=nil}=State) when RequestSeq > Seq ->
+ Owner = self(),
+ Pid = spawn_link(fun()-> couch_view_updater:update(Owner, Group) end),
+ {noreply, State#group_state{updater_pid=Pid}};
+handle_cast({update_group, _RequestSeq}, State) ->
+ {noreply, State};
handle_cast({partial_update, Pid, NewGroup}, #group_state{updater_pid=Pid}
= State) ->
@@ -279,7 +269,7 @@ handle_info(delayed_commit, #group_state{db_name=DbName,group=Group}=State) ->
end;
handle_info({'EXIT', FromPid, {new_group, Group}},
- #group_state{
+ #group_state{db_name=DbName,
updater_pid=UpPid,
ref_counter=RefCounter,
waiting_list=WaitList,
@@ -293,22 +283,25 @@ handle_info({'EXIT', FromPid, {new_group, Group}},
{noreply, State#group_state{waiting_commit=true, waiting_list=[],
group=Group, updater_pid=nil}};
StillWaiting ->
- % we still have some waiters, reupdate the index
+ % we still have some waiters, reopen the database and reupdate the index
Owner = self(),
- Pid = spawn_link(fun() -> couch_view_updater:update(Owner, Group) end),
+ Pid = spawn_link(fun() -> couch_view_updater:update(Owner, Group, DbName) end),
{noreply, State#group_state{waiting_commit=true,
- waiting_list=StillWaiting, group=Group, updater_pid=Pid}}
+ waiting_list=StillWaiting, updater_pid=Pid}}
end;
handle_info({'EXIT', _, {new_group, _}}, State) ->
%% message from an old (probably pre-compaction) updater; ignore
{noreply, State};
-handle_info({'EXIT', FromPid, reset}, #group_state{init_args=InitArgs,
- updater_pid=FromPid}=State) ->
+handle_info({'EXIT', UpPid, reset},
+ #group_state{init_args=InitArgs, updater_pid=UpPid} = State) ->
case prepare_group(InitArgs, true) of
- {ok, ResetGroup} ->
+ {ok, Db, ResetGroup} ->
Owner = self(),
- Pid = spawn_link(fun()-> couch_view_updater:update(Owner, ResetGroup) end),
+ couch_db:close(Db),
+ Pid = spawn_link(fun() ->
+ couch_view_updater:update(Owner, ResetGroup, Db#db.name)
+ end),
{noreply, State#group_state{
updater_pid=Pid,
group=ResetGroup}};
@@ -368,29 +361,32 @@ reply_all(#group_state{waiting_list=WaitList}=State, Reply) ->
[catch gen_server:reply(Pid, Reply) || {Pid, _} <- WaitList],
State#group_state{waiting_list=[]}.
-prepare_group({Root, DbName, #group{dbname=X}=G}, Reset) when X =/= DbName ->
- prepare_group({Root, DbName, G#group{dbname=DbName}}, Reset);
prepare_group({RootDir, DbName, #group{sig=Sig}=Group}, ForceReset)->
- case open_index_file(RootDir, DbName, Sig) of
- {ok, Fd} ->
- if ForceReset ->
- % this can happen if we missed a purge
- {ok, reset_file(Fd, DbName, Group)};
- true ->
- % 09 UPGRADE CODE
- ok = couch_file:upgrade_old_header(Fd, <<$r, $c, $k, 0>>),
- case (catch couch_file:read_header(Fd)) of
- {ok, {Sig, HeaderInfo}} ->
- % sigs match!
- {ok, init_group(Fd, Group, HeaderInfo)};
- _ ->
- % this happens on a new file
- {ok, reset_file(Fd, DbName, Group)}
- end
+ case couch_db:open_int(DbName, []) of
+ {ok, Db} ->
+ case open_index_file(RootDir, DbName, Sig) of
+ {ok, Fd} ->
+ if ForceReset ->
+ % this can happen if we missed a purge
+ {ok, Db, reset_file(Db, Fd, DbName, Group)};
+ true ->
+ % 09 UPGRADE CODE
+ ok = couch_file:upgrade_old_header(Fd, <<$r, $c, $k, 0>>),
+ case (catch couch_file:read_header(Fd)) of
+ {ok, {Sig, HeaderInfo}} ->
+ % sigs match!
+ {ok, Db, init_group(Db, Fd, Group, HeaderInfo)};
+ _ ->
+ % this happens on a new file
+ {ok, Db, reset_file(Db, Fd, DbName, Group)}
+ end
+ end;
+ Error ->
+ catch delete_index_file(RootDir, DbName, Sig),
+ Error
end;
- Error ->
- catch delete_index_file(RootDir, DbName, Sig),
- Error
+ Else ->
+ Else
end.
get_index_header_data(#group{current_seq=Seq, purge_seq=PurgeSeq,
@@ -497,7 +493,7 @@ open_db_group(DbName, GroupId) ->
end)
end),
receive {'DOWN', Ref, process, Pid, {ok, Doc}} ->
- {ok, design_doc_to_view_group(Doc)};
+ {ok, design_doc_to_view_group(Doc)};
{'DOWN', Ref, process, Pid, Error} ->
Error
end.
@@ -575,31 +571,26 @@ design_doc_to_view_group(#doc{id=Id,body={Fields}}) ->
end, 0, lists:sort(dict:to_list(DictBySrc))),
set_view_sig(#group{name=Id, lib=Lib, views=Views, def_lang=Language, design_options=DesignOptions}).
-reset_group(DbName, #group{views=Views}=Group) ->
+reset_group(#group{views=Views}=Group) ->
Views2 = [View#view{btree=nil} || View <- Views],
- Group#group{dbname=DbName,fd=nil,query_server=nil,current_seq=0,
+ Group#group{fd=nil,query_server=nil,current_seq=0,
id_btree=nil,views=Views2}.
-reset_file(Fd, DbName, #group{sig=Sig,name=Name} = Group) ->
+reset_file(Db, Fd, DbName, #group{sig=Sig,name=Name} = Group) ->
?LOG_DEBUG("Resetting group index \"~s\" in db ~s", [Name, DbName]),
ok = couch_file:truncate(Fd, 0),
ok = couch_file:write_header(Fd, {Sig, nil}),
- init_group(Fd, reset_group(DbName, Group), nil).
+ init_group(Db, Fd, reset_group(Group), nil).
delete_index_file(RootDir, DbName, GroupSig) ->
couch_file:delete(RootDir, index_file_name(RootDir, DbName, GroupSig)).
-init_group(Fd, #group{dbname=DbName, views=Views}=Group, nil) ->
- case couch_db:open(DbName, []) of
- {ok, Db} ->
- PurgeSeq = try couch_db:get_purge_seq(Db) after couch_db:close(Db) end,
- Header = #index_header{purge_seq=PurgeSeq, view_states=[{nil, 0, 0} || _ <- Views]},
- init_group(Fd, Group, Header);
- {not_found, no_db_file} ->
- ?LOG_ERROR("~p no_db_file ~p", [?MODULE, DbName]),
- exit(no_db_file)
- end;
-init_group(Fd, #group{def_lang=Lang,views=Views}=Group, IndexHeader) ->
+init_group(Db, Fd, #group{views=Views}=Group, nil) ->
+ init_group(Db, Fd, Group,
+ #index_header{seq=0, purge_seq=couch_db:get_purge_seq(Db),
+ id_btree_state=nil, view_states=[{nil, 0, 0} || _ <- Views]});
+init_group(_Db, Fd, #group{def_lang=Lang,views=Views}=
+ Group, IndexHeader) ->
#index_header{seq=Seq, purge_seq=PurgeSeq,
id_btree_state=IdBtreeState, view_states=ViewStates} = IndexHeader,
StateUpdate = fun
diff --git a/apps/couch/src/couch_view_updater.erl b/apps/couch/src/couch_view_updater.erl
index 90cb20d4..8238e3e5 100644
--- a/apps/couch/src/couch_view_updater.erl
+++ b/apps/couch/src/couch_view_updater.erl
@@ -12,29 +12,35 @@
-module(couch_view_updater).
--export([update/2, do_maps/4, do_writes/5, load_docs/3]).
+-export([update/3, do_maps/4, do_writes/5, load_docs/3]).
-include("couch_db.hrl").
--spec update(_, #group{}) -> no_return().
+-spec update(_, #group{}, Dbname::binary()) -> no_return().
-update(Owner, Group) ->
+update(Owner, Group, DbName) when is_binary(DbName) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ try
+ update(Owner, Group, Db)
+ after
+ couch_db:close(Db)
+ end;
+
+update(Owner, Group, #db{name = DbName} = Db) ->
#group{
- dbname = DbName,
name = GroupName,
current_seq = Seq,
purge_seq = PurgeSeq
} = Group,
couch_task_status:add_task(<<"View Group Indexer">>, <<DbName/binary," ",GroupName/binary>>, <<"Starting index update">>),
- {ok, Db} = couch_db:open(DbName, []),
DbPurgeSeq = couch_db:get_purge_seq(Db),
Group2 =
if DbPurgeSeq == PurgeSeq ->
Group;
DbPurgeSeq == PurgeSeq + 1 ->
couch_task_status:update(<<"Removing purged entries from view index.">>),
- purge_index(Db, Group);
+ purge_index(Group, Db);
true ->
couch_task_status:update(<<"Resetting view index due to lost purge entries.">>),
exit(reset)
@@ -77,7 +83,7 @@ load_docs(DocInfo, _, {I, Db, MapQueue, DocOpts, IncludeDesign, Total} = Acc) ->
load_doc(Db, DocInfo, MapQueue, DocOpts, IncludeDesign),
{ok, setelement(1, Acc, I+1)}.
-purge_index(Db, #group{views=Views, id_btree=IdBtree}=Group) ->
+purge_index(#group{views=Views, id_btree=IdBtree}=Group, Db) ->
{ok, PurgedIdsRevs} = couch_db:get_last_purged(Db),
Ids = [Id || {Id, _Revs} <- PurgedIdsRevs],
{ok, Lookups, IdBtree2} = couch_btree:query_modify(IdBtree, Ids, [], Ids),
@@ -133,7 +139,7 @@ load_doc(Db, DI, MapQueue, DocOpts, IncludeDesign) ->
couch_work_queue:queue(MapQueue, {Seq, Doc})
end
end.
-
+
-spec do_maps(#group{}, pid(), pid(), any()) -> any().
do_maps(Group, MapQueue, WriteQueue, ViewEmptyKVs) ->
case couch_work_queue:dequeue(MapQueue) of
@@ -162,10 +168,10 @@ do_writes(Parent, Owner, Group, WriteQueue, InitialBuild) ->
if Go =:= stop ->
Parent ! {new_group, Group2};
true ->
- case Owner of
- nil -> ok;
- _ -> ok = gen_server:cast(Owner, {partial_update, Parent, Group2})
- end,
+ case Owner of
+ nil -> ok;
+ _ -> ok = gen_server:cast(Owner, {partial_update, Parent, Group2})
+ end,
?MODULE:do_writes(Parent, Owner, Group2, WriteQueue, InitialBuild)
end
end.
diff --git a/apps/couch/src/test_util.erl b/apps/couch/src/test_util.erl
index 55b95139..f086bf94 100644
--- a/apps/couch/src/test_util.erl
+++ b/apps/couch/src/test_util.erl
@@ -14,6 +14,7 @@
-export([init_code_path/0]).
-export([source_file/1, build_file/1, config_files/0]).
+-export([request/3, request/4]).
init_code_path() ->
code:load_abs("apps/couch/test/etap/etap").
@@ -31,3 +32,30 @@ config_files() ->
source_file("test/etap/random_port.ini")
].
+request(Url, Headers, Method) ->
+ request(Url, Headers, Method, []).
+
+request(Url, Headers, Method, Body) ->
+ request(Url, Headers, Method, Body, 3).
+
+request(_Url, _Headers, _Method, _Body, 0) ->
+ {error, request_failed};
+request(Url, Headers, Method, Body, N) ->
+ case code:is_loaded(ibrowse) of
+ false ->
+ {ok, _} = ibrowse:start();
+ _ ->
+ ok
+ end,
+ case ibrowse:send_req(Url, Headers, Method, Body) of
+ {ok, Code0, RespHeaders, RespBody0} ->
+ Code = list_to_integer(Code0),
+ RespBody = iolist_to_binary(RespBody0),
+ {ok, Code, RespHeaders, RespBody};
+ {error, {'EXIT', {normal, _}}} ->
+ % Connection closed right after a successful request that
+ % used the same connection.
+ request(Url, Headers, Method, Body, N - 1);
+ Error ->
+ Error
+ end.
diff --git a/apps/couch/test/etap/020-btree-basics.t b/apps/couch/test/etap/020-btree-basics.t
index 996d240a..c65d79c2 100755
--- a/apps/couch/test/etap/020-btree-basics.t
+++ b/apps/couch/test/etap/020-btree-basics.t
@@ -128,6 +128,7 @@ test_btree(Btree, KeyValues) ->
ok = test_key_access(Btree, KeyValues),
ok = test_lookup_access(Btree, KeyValues),
ok = test_final_reductions(Btree, KeyValues),
+ ok = test_traversal_callbacks(Btree, KeyValues),
true.
test_add_remove(Btree, OutKeyValues, RemainingKeyValues) ->
@@ -188,6 +189,18 @@ test_final_reductions(Btree, KeyValues) ->
KVLen = FoldLRed + FoldRRed,
ok.
+test_traversal_callbacks(Btree, KeyValues) ->
+ FoldFun =
+ fun
+ (visit, GroupedKey, Unreduced, Acc) ->
+ {ok, Acc andalso false};
+ (traverse, _LK, _Red, Acc) ->
+ {skip, Acc andalso true}
+ end,
+ % With 250 items the root is a kp. Always skipping should reduce to true.
+ {ok, _, true} = couch_btree:fold(Btree, FoldFun, true, [{dir, fwd}]),
+ ok.
+
shuffle(List) ->
randomize(round(math:log(length(List)) + 0.5), List).
diff --git a/apps/couch/test/javascript/run.tpl b/apps/couch/test/javascript/run.tpl
index c5abe6e7..1389a4f9 100644
--- a/apps/couch/test/javascript/run.tpl
+++ b/apps/couch/test/javascript/run.tpl
@@ -27,4 +27,4 @@ cat $SCRIPT_DIR/json2.js \
$SCRIPT_DIR/test/*.js \
$JS_TEST_DIR/couch_http.js \
$JS_TEST_DIR/cli_runner.js \
- | $COUCHJS -
+ | $COUCHJS --http -
diff --git a/couchjs/c_src/SConscript b/couchjs/c_src/SConscript
index cfce5605..b014e2ec 100644
--- a/couchjs/c_src/SConscript
+++ b/couchjs/c_src/SConscript
@@ -19,7 +19,10 @@ def require_lib(name):
print 'Could not find required library', name
Exit(1)
-env = Environment(CCFLAGS='-g -O2 -DXP_UNIX')
+def runcmd(cmd):
+ return commands.getstatusoutput(cmd)
+
+env = Environment(CC="c++", CCFLAGS='-g -O2 -DXP_UNIX')
if os.uname()[0] == 'SunOS':
env['CC'] = '/usr/sfw/bin/gcc'
@@ -33,12 +36,15 @@ if os.uname()[0] == 'FreeBSD':
env['LIB_COMPAT'] = 'compat'
if os.path.exists('/usr/bin/pkg-config'):
- (status, output) = commands.getstatusoutput("/usr/bin/pkg-config mozilla-js --cflags")
- if status == 0:
- env['CCFLAGS'] += output
- (status, output) = commands.getstatusoutput("/usr/bin/pkg-config mozilla-js --libs-only-L")
- if status == 0:
- env.Append(LINKFLAGS=output)
+ for pkg in ["mozilla-js-185", "mozilla-js"]:
+ (s1, output) = runcmd("/usr/bin/pkg-config %s --cflags" % pkg)
+ if s1 == 0:
+ env.Append(CCFLAGS=" " + output)
+ (s2, output) = runcmd("/usr/bin/pkg-config %s --libs-only-L" % pkg)
+ if s2 == 0:
+ env.Append(LINKFLAGS=output)
+ if s1 == 0 or s2 == 0:
+ break
if not env.GetOption('clean'):
conf = Configure(env, config_h='config.h')
@@ -46,12 +52,13 @@ if not env.GetOption('clean'):
require_lib('m')
require_lib('pthread')
require_lib('curl')
+ require_lib('nspr4')
## check for SpiderMonkey development header
- if conf.CheckHeader('js/jsapi.h'):
- jsapi = 'js/jsapi.h'
- elif conf.CheckHeader('mozjs/jsapi.h'):
+ if conf.CheckHeader('mozjs/jsapi.h'):
jsapi = 'mozjs/jsapi.h'
+ elif conf.CheckHeader('js/jsapi.h'):
+ jsapi = 'js/jsapi.h'
elif conf.CheckHeader('jsapi.h'):
jsapi = 'jsapi.h'
else:
@@ -60,16 +67,25 @@ if not env.GetOption('clean'):
Exit(1)
## check for SpiderMonkey library as libjs or libmozjs
- if not conf.CheckLibWithHeader('mozjs', jsapi, 'c', autoadd=1):
- if not conf.CheckLibWithHeader('js', jsapi, 'c', autoadd=1):
- print 'Could not find JS library.', \
- 'Is Mozilla SpiderMonkey installed?'
- Exit(1)
+ if not conf.CheckLibWithHeader('mozjs185-1.0', jsapi, 'c', autoadd=1):
+ if not conf.CheckLibWithHeader('mozjs', jsapi, 'c', autoadd=1):
+ if not conf.CheckLibWithHeader('js', jsapi, 'c', autoadd=1):
+ print 'Could not find JS library.', \
+ 'Is Mozilla SpiderMonkey installed?'
+ Exit(1)
- ## SpiderMonkey 1.8 has this callback we use for memory management
- if conf.CheckDeclaration('JS_SetOperationCallback', '#include <%s>' % jsapi):
- conf.Define('USE_JS_SETOPCB')
+ ## Detect the version of SpiderMonkey we're using
+ jsheader = "#include <%s>" % jsapi
+ versions = [
+ ("JS_NewCompartmentAndGlobalObject", "SM185"),
+ ("JS_ThrowStopIteration", "SM180"),
+ ("JS_GetStringCharsAndLength", "SM170")
+ ]
+ for func, vsn in versions:
+ if conf.CheckDeclaration(func, jsheader):
+ conf.Define(vsn)
+ break
env = conf.Finish()
-env.Program('couchjs', ['main.c', 'http.c', 'utf8.c'])
+env.Program('couchjs', ['main.c', 'http.c', 'utf8.c', 'util.c'])
diff --git a/couchjs/c_src/http.c b/couchjs/c_src/http.c
index b781f0ef..aa21515c 100644
--- a/couchjs/c_src/http.c
+++ b/couchjs/c_src/http.c
@@ -13,29 +13,87 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
-
#include "config.h"
-#ifdef HAVE_JS_JSAPI_H
-#include <js/jsapi.h>
-#elif HAVE_MOZJS_JSAPI_H
-#include <mozjs/jsapi.h>
-#else
-#include <jsapi.h>
-#endif
+#include "sm.h"
+#include "utf8.h"
+
+// Soft dependency on cURL bindings because they're
+// only used when running the JS tests from the
+// command line which is rare.
+#ifndef HAVE_LIBCURL
+
+void
+http_check_enabled()
+{
+ fprintf(stderr, "HTTP API was disabled at compile time.\n");
+ exit(3);
+}
+
+
+JSBool
+http_ctor(JSContext* cx, JSObject* req)
+{
+ return JS_FALSE;
+}
+
+
+JSBool
+http_dtor(JSContext* cx, JSObject* req)
+{
+ return JS_FALSE;
+}
+
+
+JSBool
+http_open(JSContext* cx, JSObject* req, jsval mth, jsval url, jsval snc)
+{
+ return JS_FALSE;
+}
+
+
+JSBool
+http_set_hdr(JSContext* cx, JSObject* req, jsval name, jsval val)
+{
+ return JS_FALSE;
+}
+
+
+JSBool
+http_send(JSContext* cx, JSObject* req, jsval body)
+{
+ return JS_FALSE;
+}
+
+
+int
+http_status(JSContext* cx, JSObject* req)
+{
+ return -1;
+}
+
+#else
#include <curl/curl.h>
-#include "utf8.h"
-#ifdef XP_WIN
+void
+http_check_enabled()
+{
+ return;
+}
+
+
// Map some of the string function names to things which exist on Windows
+#ifdef XP_WIN
#define strcasecmp _strcmpi
#define strncasecmp _strnicmp
#define snprintf _snprintf
#endif
+
typedef struct curl_slist CurlHeaders;
+
typedef struct {
int method;
char* url;
@@ -43,7 +101,9 @@ typedef struct {
jsint last_status;
} HTTPData;
-char* METHODS[] = {"GET", "HEAD", "POST", "PUT", "DELETE", "COPY", NULL};
+
+const char* METHODS[] = {"GET", "HEAD", "POST", "PUT", "DELETE", "COPY", NULL};
+
#define GET 0
#define HEAD 1
@@ -52,14 +112,17 @@ char* METHODS[] = {"GET", "HEAD", "POST", "PUT", "DELETE", "COPY", NULL};
#define DELETE 4
#define COPY 5
+
static JSBool
go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t blen);
+
static JSString*
str_from_binary(JSContext* cx, char* data, size_t length);
-static JSBool
-constructor(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+
+JSBool
+http_ctor(JSContext* cx, JSObject* req)
{
HTTPData* http = NULL;
JSBool ret = JS_FALSE;
@@ -76,7 +139,7 @@ constructor(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
http->req_headers = NULL;
http->last_status = -1;
- if(!JS_SetPrivate(cx, obj, http))
+ if(!JS_SetPrivate(cx, req, http))
{
JS_ReportError(cx, "Failed to set private CouchHTTP data.");
goto error;
@@ -92,90 +155,76 @@ success:
return ret;
}
-static void
-destructor(JSContext* cx, JSObject* obj)
+
+void
+http_dtor(JSContext* cx, JSObject* obj)
{
HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
- if(!http)
- {
- fprintf(stderr, "Unable to destroy invalid CouchHTTP instance.\n");
- }
- else
- {
+ if(http) {
if(http->url) free(http->url);
if(http->req_headers) curl_slist_free_all(http->req_headers);
free(http);
}
}
-static JSBool
-open(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+
+JSBool
+http_open(JSContext* cx, JSObject* req, jsval mth, jsval url, jsval snc)
{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
+ HTTPData* http = (HTTPData*) JS_GetPrivate(cx, req);
char* method = NULL;
- char* url = NULL;
- JSBool ret = JS_FALSE;
int methid;
+ JSBool ret = JS_FALSE;
- if(!http)
- {
+ if(!http) {
JS_ReportError(cx, "Invalid CouchHTTP instance.");
goto done;
}
- if(argv[0] == JSVAL_VOID)
- {
+ if(mth == JSVAL_VOID) {
JS_ReportError(cx, "You must specify a method.");
goto done;
}
- method = enc_string(cx, argv[0], NULL);
- if(!method)
- {
+ method = enc_string(cx, mth, NULL);
+ if(!method) {
JS_ReportError(cx, "Failed to encode method.");
goto done;
}
- for(methid = 0; METHODS[methid] != NULL; methid++)
- {
+ for(methid = 0; METHODS[methid] != NULL; methid++) {
if(strcasecmp(METHODS[methid], method) == 0) break;
}
- if(methid > COPY)
- {
+ if(methid > COPY) {
JS_ReportError(cx, "Invalid method specified.");
goto done;
}
http->method = methid;
- if(argv[1] == JSVAL_VOID)
- {
+ if(url == JSVAL_VOID) {
JS_ReportError(cx, "You must specify a URL.");
goto done;
}
- if(http->url)
- {
+ if(http->url != NULL) {
free(http->url);
http->url = NULL;
}
- http->url = enc_string(cx, argv[1], NULL);
- if(!http->url)
- {
+ http->url = enc_string(cx, url, NULL);
+ if(http->url == NULL) {
JS_ReportError(cx, "Failed to encode URL.");
goto done;
}
- if(argv[2] != JSVAL_VOID && argv[2] != JSVAL_FALSE)
- {
- JS_ReportError(cx, "Synchronous flag must be false if specified.");
+ if(snc != JSVAL_FALSE) {
+ JS_ReportError(cx, "Synchronous flag must be false.");
goto done;
}
- if(http->req_headers)
- {
+ if(http->req_headers) {
curl_slist_free_all(http->req_headers);
http->req_headers = NULL;
}
@@ -190,42 +239,42 @@ done:
return ret;
}
-static JSBool
-setheader(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+
+JSBool
+http_set_hdr(JSContext* cx, JSObject* req, jsval name, jsval val)
{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
+ HTTPData* http = (HTTPData*) JS_GetPrivate(cx, req);
char* keystr = NULL;
char* valstr = NULL;
char* hdrbuf = NULL;
size_t hdrlen = -1;
JSBool ret = JS_FALSE;
- if(!http)
- {
+ if(!http) {
JS_ReportError(cx, "Invalid CouchHTTP instance.");
goto done;
}
- if(argv[0] == JSVAL_VOID)
+ if(name == JSVAL_VOID)
{
JS_ReportError(cx, "You must speciy a header name.");
goto done;
}
- keystr = enc_string(cx, argv[0], NULL);
+ keystr = enc_string(cx, name, NULL);
if(!keystr)
{
JS_ReportError(cx, "Failed to encode header name.");
goto done;
}
- if(argv[1] == JSVAL_VOID)
+ if(val == JSVAL_VOID)
{
JS_ReportError(cx, "You must specify a header value.");
goto done;
}
- valstr = enc_string(cx, argv[1], NULL);
+ valstr = enc_string(cx, val, NULL);
if(!valstr)
{
JS_ReportError(cx, "Failed to encode header value.");
@@ -234,8 +283,7 @@ setheader(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
hdrlen = strlen(keystr) + strlen(valstr) + 3;
hdrbuf = (char*) malloc(hdrlen * sizeof(char));
- if(!hdrbuf)
- {
+ if(!hdrbuf) {
JS_ReportError(cx, "Failed to allocate header buffer.");
goto done;
}
@@ -249,121 +297,50 @@ done:
if(keystr) free(keystr);
if(valstr) free(valstr);
if(hdrbuf) free(hdrbuf);
-
return ret;
}
-static JSBool
-sendreq(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+JSBool
+http_send(JSContext* cx, JSObject* req, jsval body)
{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
- char* body = NULL;
+ HTTPData* http = (HTTPData*) JS_GetPrivate(cx, req);
+ char* bodystr = NULL;
size_t bodylen = 0;
JSBool ret = JS_FALSE;
- if(!http)
- {
+ if(!http) {
JS_ReportError(cx, "Invalid CouchHTTP instance.");
goto done;
}
- if(argv[0] != JSVAL_VOID && argv[0] != JS_GetEmptyStringValue(cx))
- {
- body = enc_string(cx, argv[0], &bodylen);
- if(!body)
- {
+ if(body != JSVAL_VOID && body != JS_GetEmptyStringValue(cx)) {
+ bodystr = enc_string(cx, body, &bodylen);
+ if(!bodystr) {
JS_ReportError(cx, "Failed to encode body.");
goto done;
}
}
- ret = go(cx, obj, http, body, bodylen);
+ ret = go(cx, req, http, bodystr, bodylen);
done:
- if(body) free(body);
+ if(bodystr) free(bodystr);
return ret;
}
-static JSBool
-status(JSContext* cx, JSObject* obj, jsval idval, jsval* vp)
+int
+http_status(JSContext* cx, JSObject* req)
{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
+ HTTPData* http = (HTTPData*) JS_GetPrivate(cx, req);
- if(!http)
- {
+ if(!http) {
JS_ReportError(cx, "Invalid CouchHTTP instance.");
return JS_FALSE;
}
- if(INT_FITS_IN_JSVAL(http->last_status))
- {
- *vp = INT_TO_JSVAL(http->last_status);
- return JS_TRUE;
- }
- else
- {
- JS_ReportError(cx, "INTERNAL: Invalid last_status");
- return JS_FALSE;
- }
-}
-
-JSClass CouchHTTPClass = {
- "CouchHTTP",
- JSCLASS_HAS_PRIVATE
- | JSCLASS_CONSTRUCT_PROTOTYPE
- | JSCLASS_HAS_RESERVED_SLOTS(2),
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_EnumerateStub,
- JS_ResolveStub,
- JS_ConvertStub,
- destructor,
- JSCLASS_NO_OPTIONAL_MEMBERS
-};
-
-JSPropertySpec CouchHTTPProperties[] = {
- {"status", 0, JSPROP_READONLY, status, NULL},
- {0, 0, 0, 0, 0}
-};
-
-JSFunctionSpec CouchHTTPFunctions[] = {
- {"_open", open, 3, 0, 0},
- {"_setRequestHeader", setheader, 2, 0, 0},
- {"_send", sendreq, 1, 0, 0},
- {0, 0, 0, 0, 0}
-};
-
-JSObject*
-install_http(JSContext* cx, JSObject* glbl)
-{
- JSObject* klass = NULL;
- HTTPData* http = NULL;
-
- klass = JS_InitClass(
- cx,
- glbl,
- NULL,
- &CouchHTTPClass,
- constructor,
- 0,
- CouchHTTPProperties,
- CouchHTTPFunctions,
- NULL,
- NULL
- );
-
- if(!klass)
- {
- fprintf(stderr, "Failed to initialize CouchHTTP class.\n");
- return NULL;
- }
-
- return klass;
+ return http->last_status;
}
-
// Curl Helpers
typedef struct {
@@ -373,6 +350,7 @@ typedef struct {
char* sendbuf;
size_t sendlen;
size_t sent;
+ int sent_once;
char* recvbuf;
size_t recvlen;
size_t read;
@@ -404,13 +382,13 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen)
state.sendbuf = body;
state.sendlen = bodylen;
state.sent = 0;
+ state.sent_once = 0;
state.recvbuf = NULL;
state.recvlen = 0;
state.read = 0;
- if(HTTP_HANDLE == NULL)
- {
+ if(HTTP_HANDLE == NULL) {
HTTP_HANDLE = curl_easy_init();
curl_easy_setopt(HTTP_HANDLE, CURLOPT_READFUNCTION, send_body);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKFUNCTION,
@@ -425,14 +403,12 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen)
"CouchHTTP Client - Relax");
}
- if(!HTTP_HANDLE)
- {
+ if(!HTTP_HANDLE) {
JS_ReportError(cx, "Failed to initialize cURL handle.");
goto done;
}
- if(http->method < 0 || http->method > COPY)
- {
+ if(http->method < 0 || http->method > COPY) {
JS_ReportError(cx, "INTERNAL: Unknown method.");
goto done;
}
@@ -442,27 +418,21 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen)
curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 1);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_UPLOAD, 0);
- if(http->method == HEAD)
- {
+ if(http->method == HEAD) {
curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOBODY, 1);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 0);
- }
- else if(http->method == POST || http->method == PUT)
- {
+ } else if(http->method == POST || http->method == PUT) {
curl_easy_setopt(HTTP_HANDLE, CURLOPT_UPLOAD, 1);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 0);
}
- if(body && bodylen)
- {
+ if(body && bodylen) {
curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, bodylen);
- }
- else
- {
+ } else {
curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, 0);
}
- //curl_easy_setopt(HTTP_HANDLE, CURLOPT_VERBOSE, 1);
+ // curl_easy_setopt(HTTP_HANDLE, CURLOPT_VERBOSE, 1);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_URL, http->url);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_HTTPHEADER, http->req_headers);
@@ -471,39 +441,32 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen)
curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEHEADER, &state);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEDATA, &state);
- if(curl_easy_perform(HTTP_HANDLE) != 0)
- {
+ if(curl_easy_perform(HTTP_HANDLE) != 0) {
JS_ReportError(cx, "Failed to execute HTTP request: %s", ERRBUF);
goto done;
}
- if(!state.resp_headers)
- {
+ if(!state.resp_headers) {
JS_ReportError(cx, "Failed to recieve HTTP headers.");
goto done;
}
tmp = OBJECT_TO_JSVAL(state.resp_headers);
if(!JS_DefineProperty(
- cx,
- obj,
+ cx, obj,
"_headers",
tmp,
- NULL,
- NULL,
+ NULL, NULL,
JSPROP_READONLY
- ))
- {
+ )) {
JS_ReportError(cx, "INTERNAL: Failed to set response headers.");
goto done;
}
- if(state.recvbuf) // Is good enough?
- {
+ if(state.recvbuf) {
state.recvbuf[state.read] = '\0';
jsbody = dec_string(cx, state.recvbuf, state.read+1);
- if(!jsbody)
- {
+ if(!jsbody) {
// If we can't decode the body as UTF-8 we forcefully
// convert it to a string by just forcing each byte
// to a jschar.
@@ -516,22 +479,17 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen)
}
}
tmp = STRING_TO_JSVAL(jsbody);
- }
- else
- {
+ } else {
tmp = JS_GetEmptyStringValue(cx);
}
if(!JS_DefineProperty(
- cx,
- obj,
+ cx, obj,
"responseText",
tmp,
- NULL,
- NULL,
+ NULL, NULL,
JSPROP_READONLY
- ))
- {
+ )) {
JS_ReportError(cx, "INTERNAL: Failed to set responseText.");
goto done;
}
@@ -549,15 +507,20 @@ send_body(void *ptr, size_t size, size_t nmem, void *data)
CurlState* state = (CurlState*) data;
size_t length = size * nmem;
size_t towrite = state->sendlen - state->sent;
- if(towrite == 0)
- {
+
+ // Assume this is cURL trying to resend a request that
+ // failed.
+ if(towrite == 0 && state->sent_once == 0) {
+ state->sent_once = 1;
return 0;
+ } else if(towrite == 0) {
+ state->sent = 0;
+ state->sent_once = 0;
+ towrite = state->sendlen;
}
if(length < towrite) towrite = length;
- //fprintf(stderr, "%lu %lu %lu %lu\n", state->bodyused, state->bodyread, length, towrite);
-
memcpy(ptr, state->sendbuf + state->sent, towrite);
state->sent += towrite;
@@ -581,15 +544,12 @@ recv_header(void *ptr, size_t size, size_t nmem, void *data)
char code[4];
char* header = (char*) ptr;
size_t length = size * nmem;
- size_t index = 0;
JSString* hdr = NULL;
jsuint hdrlen;
jsval hdrval;
- if(length > 7 && strncasecmp(header, "HTTP/1.", 7) == 0)
- {
- if(length < 12)
- {
+ if(length > 7 && strncasecmp(header, "HTTP/1.", 7) == 0) {
+ if(length < 12) {
return CURLE_WRITE_ERROR;
}
@@ -598,8 +558,7 @@ recv_header(void *ptr, size_t size, size_t nmem, void *data)
state->http->last_status = atoi(code);
state->resp_headers = JS_NewArrayObject(state->cx, 0, NULL);
- if(!state->resp_headers)
- {
+ if(!state->resp_headers) {
return CURLE_WRITE_ERROR;
}
@@ -607,26 +566,22 @@ recv_header(void *ptr, size_t size, size_t nmem, void *data)
}
// We get a notice at the \r\n\r\n after headers.
- if(length <= 2)
- {
+ if(length <= 2) {
return length;
}
// Append the new header to our array.
hdr = dec_string(state->cx, header, length);
- if(!hdr)
- {
+ if(!hdr) {
return CURLE_WRITE_ERROR;
}
- if(!JS_GetArrayLength(state->cx, state->resp_headers, &hdrlen))
- {
+ if(!JS_GetArrayLength(state->cx, state->resp_headers, &hdrlen)) {
return CURLE_WRITE_ERROR;
}
hdrval = STRING_TO_JSVAL(hdr);
- if(!JS_SetElement(state->cx, state->resp_headers, hdrlen, &hdrval))
- {
+ if(!JS_SetElement(state->cx, state->resp_headers, hdrlen, &hdrval)) {
return CURLE_WRITE_ERROR;
}
@@ -640,21 +595,19 @@ recv_body(void *ptr, size_t size, size_t nmem, void *data)
size_t length = size * nmem;
char* tmp = NULL;
- if(!state->recvbuf)
- {
+ if(!state->recvbuf) {
state->recvlen = 4096;
state->read = 0;
- state->recvbuf = JS_malloc(state->cx, state->recvlen);
+ state->recvbuf = (char*) JS_malloc(state->cx, state->recvlen);
}
- if(!state->recvbuf)
- {
+ if(!state->recvbuf) {
return CURLE_WRITE_ERROR;
}
// +1 so we can add '\0' back up in the go function.
while(length+1 > state->recvlen - state->read) state->recvlen *= 2;
- tmp = JS_realloc(state->cx, state->recvbuf, state->recvlen);
+ tmp = (char*) JS_realloc(state->cx, state->recvbuf, state->recvlen);
if(!tmp) return CURLE_WRITE_ERROR;
state->recvbuf = tmp;
@@ -672,8 +625,7 @@ str_from_binary(JSContext* cx, char* data, size_t length)
if(!conv) return NULL;
- for(i = 0; i < length; i++)
- {
+ for(i = 0; i < length; i++) {
conv[i] = (jschar) data[i];
}
@@ -682,3 +634,5 @@ str_from_binary(JSContext* cx, char* data, size_t length)
return ret;
}
+
+#endif /* HAVE_CURL */
diff --git a/couchjs/c_src/http.h b/couchjs/c_src/http.h
index b5f8c70f..75a0d561 100644
--- a/couchjs/c_src/http.h
+++ b/couchjs/c_src/http.h
@@ -13,6 +13,14 @@
#ifndef COUCH_JS_HTTP_H
#define COUCH_JS_HTTP_H
-JSObject* install_http(JSContext* cx, JSObject* global);
+#include "sm.h"
-#endif \ No newline at end of file
+void http_check_enabled();
+JSBool http_ctor(JSContext* cx, JSObject* req);
+void http_dtor(JSContext* cx, JSObject* req);
+JSBool http_open(JSContext* cx, JSObject* req, jsval mth, jsval url, jsval snc);
+JSBool http_set_hdr(JSContext* cx, JSObject* req, jsval name, jsval val);
+JSBool http_send(JSContext* cx, JSObject* req, jsval body);
+int http_status(JSContext* cx, JSObject* req);
+
+#endif
diff --git a/couchjs/c_src/main.c b/couchjs/c_src/main.c
index 25acaf55..209bb023 100644
--- a/couchjs/c_src/main.c
+++ b/couchjs/c_src/main.c
@@ -10,336 +10,12 @@
// License for the specific language governing permissions and limitations under
// the License.
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-
#include "config.h"
-#ifdef HAVE_JS_JSAPI_H
-#include <js/jsapi.h>
-#elif HAVE_MOZJS_JSAPI_H
-#include <mozjs/jsapi.h>
-#else
-#include <jsapi.h>
-#endif
-
-#include "utf8.h"
-#include "http.h"
-int gExitCode = 0;
-
-#ifdef JS_THREADSAFE
-#define SETUP_REQUEST(cx) \
- JS_SetContextThread(cx); \
- JS_BeginRequest(cx);
-#define FINISH_REQUEST(cx) \
- JS_EndRequest(cx); \
- JS_ClearContextThread(cx);
+#if defined(SM185)
+#include "sm185.c"
+#elif defined(SM180)
+#include "sm180.c"
#else
-#define SETUP_REQUEST(cx)
-#define FINISH_REQUEST(cx)
+#include "sm170.c"
#endif
-
-static JSBool
-evalcx(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- JSString *str;
- JSObject *sandbox;
- JSContext *subcx;
- const jschar *src;
- size_t srclen;
- JSBool ret = JS_FALSE;
- jsval v;
-
- sandbox = NULL;
- if(!JS_ConvertArguments(cx, argc, argv, "S / o", &str, &sandbox))
- {
- return JS_FALSE;
- }
-
- subcx = JS_NewContext(JS_GetRuntime(cx), 8L * 1024L);
- if(!subcx)
- {
- JS_ReportOutOfMemory(cx);
- return JS_FALSE;
- }
-
- SETUP_REQUEST(subcx);
-
- src = JS_GetStringChars(str);
- srclen = JS_GetStringLength(str);
-
- if(!sandbox)
- {
- sandbox = JS_NewObject(subcx, NULL, NULL, NULL);
- if(!sandbox || !JS_InitStandardClasses(subcx, sandbox)) goto done;
- }
-
- if(srclen == 0)
- {
- *rval = OBJECT_TO_JSVAL(sandbox);
- }
- else
- {
- JS_EvaluateUCScript(subcx, sandbox, src, srclen, NULL, 0, rval);
- }
-
- ret = JS_TRUE;
-
-done:
- FINISH_REQUEST(subcx);
- JS_DestroyContext(subcx);
- return ret;
-}
-
-static JSBool
-gc(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- JS_GC(cx);
- return JS_TRUE;
-}
-
-static JSBool
-print(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- uintN i;
- char *bytes;
-
- for(i = 0; i < argc; i++)
- {
- bytes = enc_string(cx, argv[i], NULL);
- if(!bytes) return JS_FALSE;
-
- fprintf(stdout, "%s%s", i ? " " : "", bytes);
- JS_free(cx, bytes);
- }
-
- fputc('\n', stdout);
- fflush(stdout);
- return JS_TRUE;
-}
-
-static JSBool
-quit(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- JS_ConvertArguments(cx, argc, argv, "/ i", &gExitCode);
- return JS_FALSE;
-}
-
-static char*
-readfp(JSContext* cx, FILE* fp, size_t* buflen)
-{
- char* bytes = NULL;
- char* tmp = NULL;
- size_t used = 0;
- size_t byteslen = 256;
- size_t readlen = 0;
-
- bytes = JS_malloc(cx, byteslen);
- if(bytes == NULL) return NULL;
-
- while((readlen = js_fgets(bytes+used, byteslen-used, stdin)) > 0)
- {
- used += readlen;
-
- if(bytes[used-1] == '\n')
- {
- bytes[used-1] = '\0';
- break;
- }
-
- // Double our buffer and read more.
- byteslen *= 2;
- tmp = JS_realloc(cx, bytes, byteslen);
- if(!tmp)
- {
- JS_free(cx, bytes);
- return NULL;
- }
- bytes = tmp;
- }
-
- *buflen = used;
- return bytes;
-}
-
-static JSBool
-readline(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval) {
- jschar *chars;
- JSString *str;
- char* bytes;
- char* tmp;
- size_t byteslen;
-
- /* GC Occasionally */
- JS_MaybeGC(cx);
-
- bytes = readfp(cx, stdin, &byteslen);
- if(!bytes) return JS_FALSE;
-
- /* Treat the empty string specially */
- if(byteslen == 0)
- {
- *rval = JS_GetEmptyStringValue(cx);
- JS_free(cx, bytes);
- return JS_TRUE;
- }
-
- /* Shrink the buffer to the real size */
- tmp = JS_realloc(cx, bytes, byteslen);
- if(!tmp)
- {
- JS_free(cx, bytes);
- return JS_FALSE;
- }
- bytes = tmp;
-
- str = dec_string(cx, bytes, byteslen);
- JS_free(cx, bytes);
-
- if(!str) return JS_FALSE;
-
- *rval = STRING_TO_JSVAL(str);
-
- return JS_TRUE;
-}
-
-static JSBool
-seal(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval) {
- JSObject *target;
- JSBool deep = JS_FALSE;
-
- if (!JS_ConvertArguments(cx, argc, argv, "o/b", &target, &deep))
- return JS_FALSE;
- if (!target)
- return JS_TRUE;
- return JS_SealObject(cx, target, deep);
-}
-
-static void
-execute_script(JSContext *cx, JSObject *obj, const char *filename) {
- FILE *file;
- JSScript *script;
- jsval result;
-
- if(!filename || strcmp(filename, "-") == 0)
- {
- file = stdin;
- }
- else
- {
- file = fopen(filename, "r");
- if (!file)
- {
- fprintf(stderr, "could not open script file %s\n", filename);
- gExitCode = 1;
- return;
- }
- }
-
- script = JS_CompileFileHandle(cx, obj, filename, file);
- if(script)
- {
- JS_ExecuteScript(cx, obj, script, &result);
- JS_DestroyScript(cx, script);
- }
-}
-
-static void
-printerror(JSContext *cx, const char *mesg, JSErrorReport *report)
-{
- if(!report || !JSREPORT_IS_WARNING(report->flags))
- {
- fprintf(stderr, "%s\n", mesg);
- }
-}
-
-static JSFunctionSpec global_functions[] = {
- {"evalcx", evalcx, 0, 0, 0},
- {"gc", gc, 0, 0, 0},
- {"print", print, 0, 0, 0},
- {"quit", quit, 0, 0, 0},
- {"readline", readline, 0, 0, 0},
- {"seal", seal, 0, 0, 0},
- {0, 0, 0, 0, 0}
-};
-
-static JSClass global_class = {
- "GlobalClass",
- JSCLASS_GLOBAL_FLAGS,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_EnumerateStub,
- JS_ResolveStub,
- JS_ConvertStub,
- JS_FinalizeStub,
- JSCLASS_NO_OPTIONAL_MEMBERS
-};
-
-int
-main(int argc, const char * argv[])
-{
- JSRuntime* rt = NULL;
- JSContext* cx = NULL;
- JSObject* global = NULL;
- JSFunctionSpec* sp = NULL;
- int i = 0;
-
- rt = JS_NewRuntime(64L * 1024L * 1024L);
- if (!rt) return 1;
-
- cx = JS_NewContext(rt, 8L * 1024L);
- if (!cx) return 1;
-
- JS_SetErrorReporter(cx, printerror);
- JS_ToggleOptions(cx, JSOPTION_XML);
-
- SETUP_REQUEST(cx);
-
- global = JS_NewObject(cx, &global_class, NULL, NULL);
- if (!global) return 1;
- if (!JS_InitStandardClasses(cx, global)) return 1;
-
- for(sp = global_functions; sp->name != NULL; sp++)
- {
- if(!JS_DefineFunction(cx, global,
- sp->name, sp->call, sp->nargs, sp->flags))
- {
- fprintf(stderr, "Failed to create function: %s\n", sp->name);
- return 1;
- }
- }
-
- if(!install_http(cx, global))
- {
- return 1;
- }
-
- JS_SetGlobalObject(cx, global);
-
- if(argc > 2)
- {
- fprintf(stderr, "incorrect number of arguments\n\n");
- fprintf(stderr, "usage: %s <scriptfile>\n", argv[0]);
- return 2;
- }
-
- if(argc == 0)
- {
- execute_script(cx, global, NULL);
- }
- else
- {
- execute_script(cx, global, argv[1]);
- }
-
- FINISH_REQUEST(cx);
-
- JS_DestroyContext(cx);
- JS_DestroyRuntime(rt);
- JS_ShutDown();
-
- return gExitCode;
-}
diff --git a/couchjs/c_src/sm.h b/couchjs/c_src/sm.h
new file mode 100644
index 00000000..a40e054d
--- /dev/null
+++ b/couchjs/c_src/sm.h
@@ -0,0 +1,13 @@
+#ifndef COUCHJS_SM_H
+#define COUCHJS_SM_H
+
+#include "config.h"
+#ifdef HAVE_JS_JSAPI_H
+#include <js/jsapi.h>
+#elif HAVE_MOZJS_JSAPI_H
+#include <mozjs/jsapi.h>
+#else
+#include <jsapi.h>
+#endif
+
+#endif // included sm.h
diff --git a/couchjs/c_src/sm170.c b/couchjs/c_src/sm170.c
new file mode 100644
index 00000000..44cb5870
--- /dev/null
+++ b/couchjs/c_src/sm170.c
@@ -0,0 +1,378 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "http.h"
+#include "sm.h"
+#include "utf8.h"
+#include "util.h"
+
+
+#ifdef JS_THREADSAFE
+#define SETUP_REQUEST(cx) \
+ JS_SetContextThread(cx); \
+ JS_BeginRequest(cx);
+#define FINISH_REQUEST(cx) \
+ JS_EndRequest(cx); \
+ JS_ClearContextThread(cx);
+#else
+#define SETUP_REQUEST(cx)
+#define FINISH_REQUEST(cx)
+#endif
+
+
+static JSBool
+req_ctor(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ return http_ctor(cx, obj);
+}
+
+
+static void
+req_dtor(JSContext* cx, JSObject* obj)
+{
+ http_dtor(cx, obj);
+}
+
+
+static JSBool
+req_open(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ JSBool ret = JS_FALSE;
+
+ if(argc == 2) {
+ ret = http_open(cx, obj, argv[0], argv[1], JSVAL_FALSE);
+ } else if(argc == 3) {
+ ret = http_open(cx, obj, argv[0], argv[1], argv[2]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.open");
+ }
+
+ *rval = JSVAL_VOID;
+ return ret;
+}
+
+
+static JSBool
+req_set_hdr(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ JSBool ret = JS_FALSE;
+ if(argc == 2) {
+ ret = http_set_hdr(cx, obj, argv[0], argv[1]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.set_header");
+ }
+
+ *rval = JSVAL_VOID;
+ return ret;
+}
+
+
+static JSBool
+req_send(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ JSBool ret = JS_FALSE;
+ if(argc == 1) {
+ ret = http_send(cx, obj, argv[0]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.send");
+ }
+
+ *rval = JSVAL_VOID;
+ return ret;
+}
+
+
+static JSBool
+req_status(JSContext* cx, JSObject* obj, jsval idval, jsval* rval)
+{
+ int status = http_status(cx, obj);
+ if(status < 0)
+ return JS_FALSE;
+
+ if(INT_FITS_IN_JSVAL(status)) {
+ *rval = INT_TO_JSVAL(status);
+ return JS_TRUE;
+ } else {
+ JS_ReportError(cx, "Invalid HTTP status.");
+ return JS_FALSE;
+ }
+}
+
+
+static JSBool
+evalcx(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
+{
+ JSString *str;
+ JSObject *sandbox;
+ JSContext *subcx;
+ const jschar *src;
+ size_t srclen;
+ JSBool ret = JS_FALSE;
+
+ sandbox = NULL;
+ if(!JS_ConvertArguments(cx, argc, argv, "S / o", &str, &sandbox)) {
+ return JS_FALSE;
+ }
+
+ subcx = JS_NewContext(JS_GetRuntime(cx), 8L * 1024L);
+ if(!subcx) {
+ JS_ReportOutOfMemory(cx);
+ return JS_FALSE;
+ }
+
+ SETUP_REQUEST(subcx);
+
+ src = JS_GetStringChars(str);
+ srclen = JS_GetStringLength(str);
+
+ if(!sandbox) {
+ sandbox = JS_NewObject(subcx, NULL, NULL, NULL);
+ if(!sandbox || !JS_InitStandardClasses(subcx, sandbox)) {
+ goto done;
+ }
+ }
+
+ if(srclen == 0) {
+ *rval = OBJECT_TO_JSVAL(sandbox);
+ } else {
+ JS_EvaluateUCScript(subcx, sandbox, src, srclen, NULL, 0, rval);
+ }
+
+ ret = JS_TRUE;
+
+done:
+ FINISH_REQUEST(subcx);
+ JS_DestroyContext(subcx);
+ return ret;
+}
+
+
+static JSBool
+gc(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ JS_GC(cx);
+ *rval = JSVAL_VOID;
+ return JS_TRUE;
+}
+
+
+static JSBool
+print(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ couch_print(cx, argc, argv);
+ *rval = JSVAL_VOID;
+ return JS_TRUE;
+}
+
+
+static JSBool
+quit(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ int exit_code = 0;
+ JS_ConvertArguments(cx, argc, argv, "/i", &exit_code);
+ exit(exit_code);
+}
+
+
+static JSBool
+readline(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ JSString* line;
+
+ /* GC Occasionally */
+ JS_MaybeGC(cx);
+
+ line = couch_readline(cx, stdin);
+ if(line == NULL) return JS_FALSE;
+
+ *rval = STRING_TO_JSVAL(line);
+ return JS_TRUE;
+}
+
+
+static JSBool
+seal(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ JSObject *target;
+ JSBool deep = JS_FALSE;
+
+ if(!JS_ConvertArguments(cx, argc, argv, "o/b", &target, &deep))
+ return JS_FALSE;
+
+ if(!target) {
+ *rval = JSVAL_VOID;
+ return JS_TRUE;
+ }
+
+ if(JS_SealObject(cx, obj, deep) != JS_TRUE)
+ return JS_FALSE;
+
+ *rval = JSVAL_VOID;
+ return JS_TRUE;
+}
+
+
+JSClass CouchHTTPClass = {
+ "CouchHTTP",
+ JSCLASS_HAS_PRIVATE
+ | JSCLASS_CONSTRUCT_PROTOTYPE
+ | JSCLASS_HAS_RESERVED_SLOTS(2),
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_EnumerateStub,
+ JS_ResolveStub,
+ JS_ConvertStub,
+ req_dtor,
+ JSCLASS_NO_OPTIONAL_MEMBERS
+};
+
+
+JSPropertySpec CouchHTTPProperties[] = {
+ {"status", 0, JSPROP_READONLY, req_status, NULL},
+ {0, 0, 0, 0, 0}
+};
+
+
+JSFunctionSpec CouchHTTPFunctions[] = {
+ {"_open", req_open, 3, 0, 0},
+ {"_setRequestHeader", req_set_hdr, 2, 0, 0},
+ {"_send", req_send, 1, 0, 0},
+ {0, 0, 0, 0, 0}
+};
+
+
+static JSClass global_class = {
+ "GlobalClass",
+ JSCLASS_GLOBAL_FLAGS,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_EnumerateStub,
+ JS_ResolveStub,
+ JS_ConvertStub,
+ JS_FinalizeStub,
+ JSCLASS_NO_OPTIONAL_MEMBERS
+};
+
+
+static JSFunctionSpec global_functions[] = {
+ {"evalcx", evalcx, 0, 0, 0},
+ {"gc", gc, 0, 0, 0},
+ {"print", print, 0, 0, 0},
+ {"quit", quit, 0, 0, 0},
+ {"readline", readline, 0, 0, 0},
+ {"seal", seal, 0, 0, 0},
+ {0, 0, 0, 0, 0}
+};
+
+
+int
+main(int argc, const char* argv[])
+{
+ JSRuntime* rt = NULL;
+ JSContext* cx = NULL;
+ JSObject* global = NULL;
+ JSObject* klass = NULL;
+ JSScript* script;
+ JSString* scriptsrc;
+ jschar* schars;
+ size_t slen;
+ jsval sroot;
+ jsval result;
+
+ couch_args* args = couch_parse_args(argc, argv);
+
+ rt = JS_NewRuntime(64L * 1024L * 1024L);
+ if(rt == NULL)
+ return 1;
+
+ cx = JS_NewContext(rt, args->stack_size);
+ if(cx == NULL)
+ return 1;
+
+ JS_SetErrorReporter(cx, couch_error);
+ JS_ToggleOptions(cx, JSOPTION_XML);
+
+ SETUP_REQUEST(cx);
+
+ global = JS_NewObject(cx, &global_class, NULL, NULL);
+ if(global == NULL)
+ return 1;
+
+ JS_SetGlobalObject(cx, global);
+
+ if(!JS_InitStandardClasses(cx, global))
+ return 1;
+
+ if(couch_load_funcs(cx, global, global_functions) != JS_TRUE)
+ return 1;
+
+ if(args->use_http) {
+ http_check_enabled();
+
+ klass = JS_InitClass(
+ cx, global,
+ NULL,
+ &CouchHTTPClass, req_ctor,
+ 0,
+ CouchHTTPProperties, CouchHTTPFunctions,
+ NULL, NULL
+ );
+
+ if(!klass)
+ {
+ fprintf(stderr, "Failed to initialize CouchHTTP class.\n");
+ exit(2);
+ }
+ }
+
+ // Convert script source to jschars.
+ scriptsrc = dec_string(cx, args->script, strlen(args->script));
+ if(!scriptsrc)
+ return 1;
+
+ schars = JS_GetStringChars(scriptsrc);
+ slen = JS_GetStringLength(scriptsrc);
+
+ // Root it so GC doesn't collect it.
+ sroot = STRING_TO_JSVAL(scriptsrc);
+ if(JS_AddRoot(cx, &sroot) != JS_TRUE) {
+ fprintf(stderr, "Internal root error.\n");
+ return 1;
+ }
+
+ // Compile and run
+ script = JS_CompileUCScript(cx, global, schars, slen, args->script_name, 1);
+ if(!script) {
+ fprintf(stderr, "Failed to compile script.\n");
+ return 1;
+ }
+
+ JS_ExecuteScript(cx, global, script, &result);
+
+ // Warning message if we don't remove it.
+ JS_RemoveRoot(cx, &sroot);
+
+ FINISH_REQUEST(cx);
+ JS_DestroyContext(cx);
+ JS_DestroyRuntime(rt);
+ JS_ShutDown();
+
+ return 0;
+}
diff --git a/couchjs/c_src/sm180.c b/couchjs/c_src/sm180.c
new file mode 100644
index 00000000..7dc2a974
--- /dev/null
+++ b/couchjs/c_src/sm180.c
@@ -0,0 +1,387 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "http.h"
+#include "sm.h"
+#include "utf8.h"
+#include "util.h"
+
+
+#define SETUP_REQUEST(cx) \
+ JS_SetContextThread(cx); \
+ JS_BeginRequest(cx);
+#define FINISH_REQUEST(cx) \
+ JS_EndRequest(cx); \
+ JS_ClearContextThread(cx);
+
+
+static JSBool
+req_ctor(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
+{
+ return http_ctor(cx, obj);
+}
+
+
+static void
+req_dtor(JSContext* cx, JSObject* obj)
+{
+ http_dtor(cx, obj);
+}
+
+
+static JSBool
+req_open(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSObject* obj = JS_THIS_OBJECT(cx, vp);
+ jsval* argv = JS_ARGV(cx, vp);
+ JSBool ret = JS_FALSE;
+
+ if(argc == 2) {
+ ret = http_open(cx, obj, argv[0], argv[1], JSVAL_FALSE);
+ } else if(argc == 3) {
+ ret = http_open(cx, obj, argv[0], argv[1], argv[2]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.open");
+ }
+
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return ret;
+}
+
+
+static JSBool
+req_set_hdr(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSObject* obj = JS_THIS_OBJECT(cx, vp);
+ jsval* argv = JS_ARGV(cx, vp);
+ JSBool ret = JS_FALSE;
+
+ if(argc == 2) {
+ ret = http_set_hdr(cx, obj, argv[0], argv[1]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.set_header");
+ }
+
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return ret;
+}
+
+
+static JSBool
+req_send(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSObject* obj = JS_THIS_OBJECT(cx, vp);
+ jsval* argv = JS_ARGV(cx, vp);
+ JSBool ret = JS_FALSE;
+
+ if(argc == 1) {
+ ret = http_send(cx, obj, argv[0]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.send");
+ }
+
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return ret;
+}
+
+
+static JSBool
+req_status(JSContext* cx, JSObject* obj, jsval idval, jsval* vp)
+{
+ int status = http_status(cx, obj);
+ if(status < 0)
+ return JS_FALSE;
+
+ if(INT_FITS_IN_JSVAL(status)) {
+ JS_SET_RVAL(cx, vp, INT_TO_JSVAL(status));
+ return JS_TRUE;
+ } else {
+ JS_ReportError(cx, "Invalid HTTP status.");
+ return JS_FALSE;
+ }
+}
+
+
+static JSBool
+evalcx(JSContext *cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ JSString *str;
+ JSObject *sandbox;
+ JSContext *subcx;
+ const jschar *src;
+ size_t srclen;
+ jsval rval;
+ JSBool ret = JS_FALSE;
+
+ sandbox = NULL;
+ if(!JS_ConvertArguments(cx, argc, argv, "S / o", &str, &sandbox)) {
+ return JS_FALSE;
+ }
+
+ subcx = JS_NewContext(JS_GetRuntime(cx), 8L * 1024L);
+ if(!subcx) {
+ JS_ReportOutOfMemory(cx);
+ return JS_FALSE;
+ }
+
+ SETUP_REQUEST(subcx);
+
+ src = JS_GetStringChars(str);
+ srclen = JS_GetStringLength(str);
+
+ if(!sandbox) {
+ sandbox = JS_NewObject(subcx, NULL, NULL, NULL);
+ if(!sandbox || !JS_InitStandardClasses(subcx, sandbox)) {
+ goto done;
+ }
+ }
+
+ if(srclen == 0) {
+ JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(sandbox));
+ } else {
+ JS_EvaluateUCScript(subcx, sandbox, src, srclen, NULL, 0, &rval);
+ JS_SET_RVAL(cx, vp, rval);
+ }
+
+ ret = JS_TRUE;
+
+done:
+ FINISH_REQUEST(subcx);
+ JS_DestroyContext(subcx);
+ return ret;
+}
+
+
+static JSBool
+gc(JSContext* cx, uintN argc, jsval* vp)
+{
+ JS_GC(cx);
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return JS_TRUE;
+}
+
+
+static JSBool
+print(JSContext* cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ couch_print(cx, argc, argv);
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return JS_TRUE;
+}
+
+
+static JSBool
+quit(JSContext* cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ int exit_code = 0;
+ JS_ConvertArguments(cx, argc, argv, "/i", &exit_code);
+ exit(exit_code);
+}
+
+
+static JSBool
+readline(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSString* line;
+
+ /* GC Occasionally */
+ JS_MaybeGC(cx);
+
+ line = couch_readline(cx, stdin);
+ if(line == NULL) return JS_FALSE;
+
+ JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(line));
+ return JS_TRUE;
+}
+
+
+static JSBool
+seal(JSContext* cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ JSObject *target;
+ JSBool deep = JS_FALSE;
+
+ if(!JS_ConvertArguments(cx, argc, argv, "o/b", &target, &deep))
+ return JS_FALSE;
+
+ if(!target) {
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return JS_TRUE;
+ }
+
+ if(JS_SealObject(cx, target, deep) != JS_TRUE)
+ return JS_FALSE;
+
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return JS_TRUE;
+}
+
+
+JSClass CouchHTTPClass = {
+ "CouchHTTP",
+ JSCLASS_HAS_PRIVATE
+ | JSCLASS_CONSTRUCT_PROTOTYPE
+ | JSCLASS_HAS_RESERVED_SLOTS(2),
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_EnumerateStub,
+ JS_ResolveStub,
+ JS_ConvertStub,
+ req_dtor,
+ JSCLASS_NO_OPTIONAL_MEMBERS
+};
+
+
+JSPropertySpec CouchHTTPProperties[] = {
+ {"status", 0, JSPROP_READONLY, req_status, NULL},
+ {0, 0, 0, 0, 0}
+};
+
+
+JSFunctionSpec CouchHTTPFunctions[] = {
+ JS_FS("_open", (JSNative) req_open, 3, JSFUN_FAST_NATIVE, 0),
+ JS_FS("_setRequestHeader", (JSNative) req_set_hdr, 2, JSFUN_FAST_NATIVE, 0),
+ JS_FS("_send", (JSNative) req_send, 1, JSFUN_FAST_NATIVE, 0),
+ JS_FS_END
+};
+
+
+static JSClass global_class = {
+ "GlobalClass",
+ JSCLASS_GLOBAL_FLAGS,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_EnumerateStub,
+ JS_ResolveStub,
+ JS_ConvertStub,
+ JS_FinalizeStub,
+ JSCLASS_NO_OPTIONAL_MEMBERS
+};
+
+
+static JSFunctionSpec global_functions[] = {
+ JS_FS("evalcx", (JSNative) evalcx, 0, JSFUN_FAST_NATIVE, 0),
+ JS_FS("gc", (JSNative) gc, 0, JSFUN_FAST_NATIVE, 0),
+ JS_FS("print", (JSNative) print, 0, JSFUN_FAST_NATIVE, 0),
+ JS_FS("quit", (JSNative) quit, 0, JSFUN_FAST_NATIVE, 0),
+ JS_FS("readline", (JSNative) readline, 0, JSFUN_FAST_NATIVE, 0),
+ JS_FS("seal", (JSNative) seal, 0, JSFUN_FAST_NATIVE, 0),
+ JS_FS_END
+};
+
+
+int
+main(int argc, const char* argv[])
+{
+ JSRuntime* rt = NULL;
+ JSContext* cx = NULL;
+ JSObject* global = NULL;
+ JSObject* klass = NULL;
+ JSScript* script;
+ JSString* scriptsrc;
+ jschar* schars;
+ size_t slen;
+ jsval sroot;
+ jsval result;
+
+ couch_args* args = couch_parse_args(argc, argv);
+
+ rt = JS_NewRuntime(64L * 1024L * 1024L);
+ if(rt == NULL)
+ return 1;
+
+ cx = JS_NewContext(rt, args->stack_size);
+ if(cx == NULL)
+ return 1;
+
+ JS_SetErrorReporter(cx, couch_error);
+ JS_ToggleOptions(cx, JSOPTION_XML);
+
+ SETUP_REQUEST(cx);
+
+ global = JS_NewObject(cx, &global_class, NULL, NULL);
+ if(global == NULL)
+ return 1;
+
+ JS_SetGlobalObject(cx, global);
+
+ if(!JS_InitStandardClasses(cx, global))
+ return 1;
+
+ if(couch_load_funcs(cx, global, global_functions) != JS_TRUE)
+ return 1;
+
+ if(args->use_http) {
+ http_check_enabled();
+
+ klass = JS_InitClass(
+ cx, global,
+ NULL,
+ &CouchHTTPClass, req_ctor,
+ 0,
+ CouchHTTPProperties, CouchHTTPFunctions,
+ NULL, NULL
+ );
+
+ if(!klass)
+ {
+ fprintf(stderr, "Failed to initialize CouchHTTP class.\n");
+ exit(2);
+ }
+ }
+
+ // Convert script source to jschars.
+ scriptsrc = dec_string(cx, args->script, strlen(args->script));
+ if(!scriptsrc)
+ return 1;
+
+ schars = JS_GetStringChars(scriptsrc);
+ slen = JS_GetStringLength(scriptsrc);
+
+ // Root it so GC doesn't collect it.
+ sroot = STRING_TO_JSVAL(scriptsrc);
+ if(JS_AddRoot(cx, &sroot) != JS_TRUE) {
+ fprintf(stderr, "Internal root error.\n");
+ return 1;
+ }
+
+ // Compile and run
+ script = JS_CompileUCScript(cx, global, schars, slen, args->script_name, 1);
+ if(!script) {
+ fprintf(stderr, "Failed to compile script.\n");
+ return 1;
+ }
+
+ JS_ExecuteScript(cx, global, script, &result);
+
+ // Warning message if we don't remove it.
+ JS_RemoveRoot(cx, &sroot);
+
+ FINISH_REQUEST(cx);
+ JS_DestroyContext(cx);
+ JS_DestroyRuntime(rt);
+ JS_ShutDown();
+
+ return 0;
+}
diff --git a/couchjs/c_src/sm185.c b/couchjs/c_src/sm185.c
new file mode 100644
index 00000000..9815f15c
--- /dev/null
+++ b/couchjs/c_src/sm185.c
@@ -0,0 +1,401 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "http.h"
+#include "sm.h"
+#include "utf8.h"
+#include "util.h"
+
+
+#define SETUP_REQUEST(cx) \
+ JS_SetContextThread(cx); \
+ JS_BeginRequest(cx);
+#define FINISH_REQUEST(cx) \
+ JS_EndRequest(cx); \
+ JS_ClearContextThread(cx);
+
+
+static JSClass global_class = {
+ "GlobalClass",
+ JSCLASS_GLOBAL_FLAGS,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_StrictPropertyStub,
+ JS_EnumerateStub,
+ JS_ResolveStub,
+ JS_ConvertStub,
+ JS_FinalizeStub,
+ JSCLASS_NO_OPTIONAL_MEMBERS
+};
+
+
+static JSBool
+req_ctor(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSBool ret;
+ JSObject* obj = JS_NewObjectForConstructor(cx, vp);
+ if(!obj) {
+ JS_ReportError(cx, "Failed to create CouchHTTP instance.\n");
+ return JS_FALSE;
+ }
+ ret = http_ctor(cx, obj);
+ JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(obj));
+ return ret;
+}
+
+
+static void
+req_dtor(JSContext* cx, JSObject* obj)
+{
+ http_dtor(cx, obj);
+}
+
+
+static JSBool
+req_open(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSObject* obj = JS_THIS_OBJECT(cx, vp);
+ jsval* argv = JS_ARGV(cx, vp);
+ JSBool ret = JS_FALSE;
+
+ if(argc == 2) {
+ ret = http_open(cx, obj, argv[0], argv[1], JSVAL_FALSE);
+ } else if(argc == 3) {
+ ret = http_open(cx, obj, argv[0], argv[1], argv[2]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.open");
+ }
+
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return ret;
+}
+
+
+static JSBool
+req_set_hdr(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSObject* obj = JS_THIS_OBJECT(cx, vp);
+ jsval* argv = JS_ARGV(cx, vp);
+ JSBool ret = JS_FALSE;
+
+ if(argc == 2) {
+ ret = http_set_hdr(cx, obj, argv[0], argv[1]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.set_header");
+ }
+
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return ret;
+}
+
+
+static JSBool
+req_send(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSObject* obj = JS_THIS_OBJECT(cx, vp);
+ jsval* argv = JS_ARGV(cx, vp);
+ JSBool ret = JS_FALSE;
+
+ if(argc == 1) {
+ ret = http_send(cx, obj, argv[0]);
+ } else {
+ JS_ReportError(cx, "Invalid call to CouchHTTP.send");
+ }
+
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return ret;
+}
+
+
+static JSBool
+req_status(JSContext* cx, JSObject* obj, jsid pid, jsval* vp)
+{
+ int status = http_status(cx, obj);
+ if(status < 0)
+ return JS_FALSE;
+
+ JS_SET_RVAL(cx, vp, INT_TO_JSVAL(status));
+ return JS_TRUE;
+}
+
+
+static JSBool
+evalcx(JSContext *cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ JSString* str;
+ JSObject* sandbox;
+ JSObject* global;
+ JSContext* subcx;
+ JSCrossCompartmentCall* call = NULL;
+ const jschar* src;
+ size_t srclen;
+ jsval rval;
+ JSBool ret = JS_FALSE;
+
+ sandbox = NULL;
+ if(!JS_ConvertArguments(cx, argc, argv, "S / o", &str, &sandbox)) {
+ return JS_FALSE;
+ }
+
+ subcx = JS_NewContext(JS_GetRuntime(cx), 8L * 1024L);
+ if(!subcx) {
+ JS_ReportOutOfMemory(cx);
+ return JS_FALSE;
+ }
+
+ SETUP_REQUEST(subcx);
+
+ src = JS_GetStringCharsAndLength(cx, str, &srclen);
+
+ // Re-use the compartment associated with the main context,
+ // rather than creating a new compartment */
+ global = JS_GetGlobalObject(cx);
+ if(global == NULL) goto done;
+ call = JS_EnterCrossCompartmentCall(subcx, global);
+
+ if(!sandbox) {
+ sandbox = JS_NewGlobalObject(subcx, &global_class);
+ if(!sandbox || !JS_InitStandardClasses(subcx, sandbox)) {
+ goto done;
+ }
+ }
+
+ if(srclen == 0) {
+ JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(sandbox));
+ } else {
+ JS_EvaluateUCScript(subcx, sandbox, src, srclen, NULL, 0, &rval);
+ JS_SET_RVAL(cx, vp, rval);
+ }
+
+ ret = JS_TRUE;
+
+done:
+ JS_LeaveCrossCompartmentCall(call);
+ FINISH_REQUEST(subcx);
+ JS_DestroyContext(subcx);
+ return ret;
+}
+
+
+static JSBool
+gc(JSContext* cx, uintN argc, jsval* vp)
+{
+ JS_GC(cx);
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return JS_TRUE;
+}
+
+
+static JSBool
+print(JSContext* cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ couch_print(cx, argc, argv);
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return JS_TRUE;
+}
+
+
+static JSBool
+quit(JSContext* cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ int exit_code = 0;
+ JS_ConvertArguments(cx, argc, argv, "/i", &exit_code);
+ exit(exit_code);
+}
+
+
+static JSBool
+readline(JSContext* cx, uintN argc, jsval* vp)
+{
+ JSString* line;
+
+ /* GC Occasionally */
+ JS_MaybeGC(cx);
+
+ line = couch_readline(cx, stdin);
+ if(line == NULL) return JS_FALSE;
+
+ JS_SET_RVAL(cx, vp, STRING_TO_JSVAL(line));
+ return JS_TRUE;
+}
+
+
+static JSBool
+seal(JSContext* cx, uintN argc, jsval* vp)
+{
+ jsval* argv = JS_ARGV(cx, vp);
+ JSObject *target;
+ JSBool deep = JS_FALSE;
+ JSBool ret;
+
+ if(!JS_ConvertArguments(cx, argc, argv, "o/b", &target, &deep))
+ return JS_FALSE;
+
+ if(!target) {
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return JS_TRUE;
+ }
+
+
+ ret = deep ? JS_DeepFreezeObject(cx, target) : JS_FreezeObject(cx, target);
+ JS_SET_RVAL(cx, vp, JSVAL_VOID);
+ return ret;
+}
+
+
+JSClass CouchHTTPClass = {
+ "CouchHTTP",
+ JSCLASS_HAS_PRIVATE
+ | JSCLASS_CONSTRUCT_PROTOTYPE
+ | JSCLASS_HAS_RESERVED_SLOTS(2),
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_PropertyStub,
+ JS_StrictPropertyStub,
+ JS_EnumerateStub,
+ JS_ResolveStub,
+ JS_ConvertStub,
+ req_dtor,
+ JSCLASS_NO_OPTIONAL_MEMBERS
+};
+
+
+JSPropertySpec CouchHTTPProperties[] = {
+ {"status", 0, JSPROP_READONLY, req_status, NULL},
+ {0, 0, 0, 0, 0}
+};
+
+
+JSFunctionSpec CouchHTTPFunctions[] = {
+ JS_FS("_open", req_open, 3, 0),
+ JS_FS("_setRequestHeader", req_set_hdr, 2, 0),
+ JS_FS("_send", req_send, 1, 0),
+ JS_FS_END
+};
+
+
+static JSFunctionSpec global_functions[] = {
+ JS_FS("evalcx", evalcx, 0, 0),
+ JS_FS("gc", gc, 0, 0),
+ JS_FS("print", print, 0, 0),
+ JS_FS("quit", quit, 0, 0),
+ JS_FS("readline", readline, 0, 0),
+ JS_FS("seal", seal, 0, 0),
+ JS_FS_END
+};
+
+
+int
+main(int argc, const char* argv[])
+{
+ JSRuntime* rt = NULL;
+ JSContext* cx = NULL;
+ JSObject* global = NULL;
+ JSCrossCompartmentCall *call = NULL;
+ JSObject* klass = NULL;
+ JSObject* script;
+ JSString* scriptsrc;
+ const jschar* schars;
+ size_t slen;
+ jsval sroot;
+ jsval result;
+
+ couch_args* args = couch_parse_args(argc, argv);
+
+ rt = JS_NewRuntime(64L * 1024L * 1024L);
+ if(rt == NULL)
+ return 1;
+
+ cx = JS_NewContext(rt, args->stack_size);
+ if(cx == NULL)
+ return 1;
+
+ JS_SetErrorReporter(cx, couch_error);
+ JS_ToggleOptions(cx, JSOPTION_XML);
+
+ SETUP_REQUEST(cx);
+
+ global = JS_NewCompartmentAndGlobalObject(cx, &global_class, NULL);
+ if(global == NULL)
+ return 1;
+
+ call = JS_EnterCrossCompartmentCall(cx, global);
+
+ JS_SetGlobalObject(cx, global);
+
+ if(!JS_InitStandardClasses(cx, global))
+ return 1;
+
+ if(couch_load_funcs(cx, global, global_functions) != JS_TRUE)
+ return 1;
+
+ if(args->use_http) {
+ http_check_enabled();
+
+ klass = JS_InitClass(
+ cx, global,
+ NULL,
+ &CouchHTTPClass, req_ctor,
+ 0,
+ CouchHTTPProperties, CouchHTTPFunctions,
+ NULL, NULL
+ );
+
+ if(!klass)
+ {
+ fprintf(stderr, "Failed to initialize CouchHTTP class.\n");
+ exit(2);
+ }
+ }
+
+ // Convert script source to jschars.
+ scriptsrc = dec_string(cx, args->script, strlen(args->script));
+ if(!scriptsrc)
+ return 1;
+
+ schars = JS_GetStringCharsAndLength(cx, scriptsrc, &slen);
+
+ // Root it so GC doesn't collect it.
+ sroot = STRING_TO_JSVAL(scriptsrc);
+ if(JS_AddValueRoot(cx, &sroot) != JS_TRUE) {
+ fprintf(stderr, "Internal root error.\n");
+ return 1;
+ }
+
+ // Compile and run
+ script = JS_CompileUCScript(cx, global, schars, slen, args->script_name, 1);
+ if(!script) {
+ fprintf(stderr, "Failed to compile script.\n");
+ return 1;
+ }
+
+ JS_ExecuteScript(cx, global, script, &result);
+
+ // Warning message if we don't remove it.
+ JS_RemoveValueRoot(cx, &sroot);
+
+ JS_LeaveCrossCompartmentCall(call);
+ FINISH_REQUEST(cx);
+ JS_DestroyContext(cx);
+ JS_DestroyRuntime(rt);
+ JS_ShutDown();
+
+ return 0;
+}
diff --git a/couchjs/c_src/utf8.c b/couchjs/c_src/utf8.c
index 57928ba9..ace6badb 100644
--- a/couchjs/c_src/utf8.c
+++ b/couchjs/c_src/utf8.c
@@ -11,13 +11,7 @@
// the License.
#include "config.h"
-#ifdef HAVE_JS_JSAPI_H
-#include <js/jsapi.h>
-#elif HAVE_MOZJS_JSAPI_H
-#include <mozjs/jsapi.h>
-#else
-#include <jsapi.h>
-#endif
+#include "sm.h"
static int
enc_char(uint8 *utf8Buffer, uint32 ucs4Char)
@@ -128,7 +122,7 @@ char*
enc_string(JSContext* cx, jsval arg, size_t* buflen)
{
JSString* str = NULL;
- jschar* src = NULL;
+ const jschar* src = NULL;
char* bytes = NULL;
size_t srclen = 0;
size_t byteslen = 0;
@@ -136,12 +130,16 @@ enc_string(JSContext* cx, jsval arg, size_t* buflen)
str = JS_ValueToString(cx, arg);
if(!str) goto error;
+#ifdef SM185
+ src = JS_GetStringCharsAndLength(cx, str, &srclen);
+#else
src = JS_GetStringChars(str);
srclen = JS_GetStringLength(str);
+#endif
if(!enc_charbuf(src, srclen, NULL, &byteslen)) goto error;
- bytes = JS_malloc(cx, (byteslen) + 1);
+ bytes = (char*) JS_malloc(cx, (byteslen) + 1);
bytes[byteslen] = 0;
if(!enc_charbuf(src, srclen, bytes, &byteslen)) goto error;
@@ -273,7 +271,7 @@ dec_string(JSContext* cx, const char* bytes, size_t byteslen)
if(!dec_charbuf(bytes, byteslen, NULL, &charslen)) goto error;
- chars = JS_malloc(cx, (charslen + 1) * sizeof(jschar));
+ chars = (jschar*) JS_malloc(cx, (charslen + 1) * sizeof(jschar));
if(!chars) return NULL;
chars[charslen] = 0;
diff --git a/couchjs/c_src/util.c b/couchjs/c_src/util.c
new file mode 100644
index 00000000..f5ef3e82
--- /dev/null
+++ b/couchjs/c_src/util.c
@@ -0,0 +1,235 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+#include "utf8.h"
+
+
+char*
+slurp_file(char* buf, const char* file)
+{
+ FILE* fp;
+ char fbuf[16384];
+ char* tmp;
+ size_t nread = 0;
+ size_t buflen = 0;
+
+ if(strcmp(file, "-") == 0) {
+ fp = stdin;
+ } else {
+ fp = fopen(file, "r");
+ if(fp == NULL) {
+ fprintf(stderr, "Failed to read file: %s\n", file);
+ exit(3);
+ }
+ }
+
+ while((nread = fread(fbuf, 1, 16384, fp)) > 0) {
+ if(buf == NULL) {
+ buflen = nread;
+ buf = (char*) malloc(nread + 1);
+ if(buf == NULL) {
+ fprintf(stderr, "Out of memory.\n");
+ exit(3);
+ }
+ memcpy(buf, fbuf, buflen);
+ buf[buflen] = '\0';
+ } else {
+ buflen = strlen(buf);
+ tmp = (char*) malloc(buflen + nread + 1);
+ if(tmp == NULL) {
+ fprintf(stderr, "Out of memory.\n");
+ exit(3);
+ }
+ memcpy(tmp, buf, buflen);
+ memcpy(tmp+buflen, fbuf, nread);
+ tmp[buflen+nread] = '\0';
+ free(buf);
+ buf = tmp;
+ }
+ }
+ return buf;
+}
+
+couch_args*
+couch_parse_args(int argc, const char* argv[])
+{
+ couch_args* args;
+ int i = 1;
+
+ args = (couch_args*) malloc(sizeof(couch_args));
+ if(args == NULL)
+ return NULL;
+
+ memset(args, '\0', sizeof(couch_args));
+ args->stack_size = 8L * 1024L;
+
+ while(i < argc) {
+ if(strcmp("--http", argv[i]) == 0) {
+ args->use_http = 1;
+ } else if(strcmp("--stack-size", argv[i]) == 0) {
+ args->stack_size = atoi(argv[i+1]);
+ if(args->stack_size <= 0) {
+ fprintf(stderr, "Invalid stack size.\n");
+ exit(2);
+ }
+ } else {
+ args->script = slurp_file(args->script, argv[i]);
+ if(args->script_name == NULL) {
+ if(strcmp(argv[i], "-") == 0) {
+ args->script_name = "<stdin>";
+ } else {
+ args->script_name = argv[i];
+ }
+ } else {
+ args->script_name = "<multiple_files>";
+ }
+ }
+ i++;
+ }
+
+ if(args->script_name == NULL || args->script == NULL) {
+ fprintf(stderr, "No script provided.\n");
+ exit(3);
+ }
+
+ return args;
+}
+
+
+int
+couch_fgets(char* buf, int size, FILE* fp)
+{
+ int n, i, c;
+
+ if(size <= 0) return -1;
+ n = size - 1;
+
+ for(i = 0; i < n && (c = getc(fp)) != EOF; i++) {
+ buf[i] = c;
+ if(c == '\n') {
+ i++;
+ break;
+ }
+ }
+
+ buf[i] = '\0';
+ return i;
+}
+
+
+JSString*
+couch_readline(JSContext* cx, FILE* fp)
+{
+ JSString* str;
+ char* bytes = NULL;
+ char* tmp = NULL;
+ size_t used = 0;
+ size_t byteslen = 256;
+ size_t readlen = 0;
+
+ bytes = (char*) JS_malloc(cx, byteslen);
+ if(bytes == NULL) return NULL;
+
+ while((readlen = couch_fgets(bytes+used, byteslen-used, fp)) > 0) {
+ used += readlen;
+
+ if(bytes[used-1] == '\n') {
+ bytes[used-1] = '\0';
+ break;
+ }
+
+ // Double our buffer and read more.
+ byteslen *= 2;
+ tmp = (char*) JS_realloc(cx, bytes, byteslen);
+ if(!tmp) {
+ JS_free(cx, bytes);
+ return NULL;
+ }
+
+ bytes = tmp;
+ }
+
+ // Treat empty strings specially
+ if(used == 0) {
+ JS_free(cx, bytes);
+ return JSVAL_TO_STRING(JS_GetEmptyStringValue(cx));
+ }
+
+ // Shring the buffer to the actual data size
+ tmp = (char*) JS_realloc(cx, bytes, used);
+ if(!tmp) {
+ JS_free(cx, bytes);
+ return NULL;
+ }
+ bytes = tmp;
+ byteslen = used;
+
+ str = dec_string(cx, bytes, byteslen);
+ JS_free(cx, bytes);
+ return str;
+}
+
+
+JSObject*
+couch_readfile(JSContext* cx, FILE* fp)
+{
+ return NULL;
+}
+
+
+void
+couch_print(JSContext* cx, uintN argc, jsval* argv)
+{
+ char *bytes;
+ uintN i;
+
+ for(i = 0; i < argc; i++)
+ {
+ bytes = enc_string(cx, argv[i], NULL);
+ if(!bytes) return;
+
+ fprintf(stdout, "%s%s", i ? " " : "", bytes);
+ JS_free(cx, bytes);
+ }
+
+ fputc('\n', stdout);
+ fflush(stdout);
+}
+
+
+void
+couch_error(JSContext* cx, const char* mesg, JSErrorReport* report)
+{
+ if(!report || !JSREPORT_IS_WARNING(report->flags))
+ {
+ fprintf(stderr, "[couchjs] %s\n", mesg);
+ }
+}
+
+
+JSBool
+couch_load_funcs(JSContext* cx, JSObject* obj, JSFunctionSpec* funcs)
+{
+ JSFunctionSpec* f;
+ for(f = funcs; f->name != NULL; f++) {
+ if(!JS_DefineFunction(cx, obj, f->name, f->call, f->nargs, f->flags)) {
+ fprintf(stderr, "Failed to create function: %s\n", f->name);
+ return JS_FALSE;
+ }
+ }
+ return JS_TRUE;
+}
+
diff --git a/couchjs/c_src/util.h b/couchjs/c_src/util.h
new file mode 100644
index 00000000..54ccdaa5
--- /dev/null
+++ b/couchjs/c_src/util.h
@@ -0,0 +1,34 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#ifndef COUCHJS_UTIL_H
+#define COUCHJS_UTIL_H
+
+#include "sm.h"
+
+typedef struct {
+ int use_http;
+ int stack_size;
+ const char* script_name;
+ char* script;
+} couch_args;
+
+void couch_usage();
+couch_args* couch_parse_args(int argc, const char* argv[]);
+int couch_fgets(char* buf, int size, FILE* fp);
+JSString* couch_readline(JSContext* cx, FILE* fp);
+void couch_print(JSContext* cx, uintN argc, jsval* argv);
+void couch_error(JSContext* cx, const char* mesg, JSErrorReport* report);
+JSBool couch_load_funcs(JSContext* cx, JSObject* obj, JSFunctionSpec* funcs);
+
+
+#endif // Included util.h
diff --git a/couchjs/js/mimeparse.js b/couchjs/js/mimeparse.js
index 3642a194..42b600fa 100644
--- a/couchjs/js/mimeparse.js
+++ b/couchjs/js/mimeparse.js
@@ -97,7 +97,7 @@ var Mimeparse = (function() {
if ((type == targetType || type == "*" || targetType == "*") &&
(subtype == targetSubtype || subtype == "*" || targetSubtype == "*")) {
var matchCount = 0;
- for (param in targetParams) {
+ for (var param in targetParams) {
if (param != 'q' && params[param] && params[param] == targetParams[param]) {
matchCount += 1;
}
diff --git a/couchjs/js/render.js b/couchjs/js/render.js
index d207db41..93ff6332 100644
--- a/couchjs/js/render.js
+++ b/couchjs/js/render.js
@@ -220,10 +220,10 @@ var Render = (function() {
resetList();
Mime.resetProvides();
var resp = fun.apply(ddoc, args) || {};
+ resp = maybeWrapResponse(resp);
// handle list() style API
if (chunks.length && chunks.length > 0) {
- resp = maybeWrapResponse(resp);
resp.headers = resp.headers || {};
for(var header in startResp) {
resp.headers[header] = startResp[header]
@@ -233,8 +233,12 @@ var Render = (function() {
}
if (Mime.providesUsed) {
- resp = Mime.runProvides(args[1], ddoc);
- resp = applyContentType(maybeWrapResponse(resp), Mime.responseContentType);
+ var provided_resp = Mime.runProvides(args[1], ddoc) || {};
+ provided_resp = maybeWrapResponse(provided_resp);
+ resp.body = (resp.body || "") + chunks.join("");
+ resp.body += provided_resp.body || "";
+ resp = applyContentType(resp, Mime.responseContentType);
+ resetList();
}
var type = typeOf(resp);
diff --git a/couchjs/js/util.js b/couchjs/js/util.js
index e4386701..d498ee64 100644
--- a/couchjs/js/util.js
+++ b/couchjs/js/util.js
@@ -46,7 +46,7 @@ var resolveModule = function(names, mod, root) {
} else if (root) {
mod = {current : root};
}
- if (!mod.current[n]) {
+ if (mod.current[n] === undefined) {
throw ["error", "invalid_require_path", 'Object has no property "'+n+'". '+JSON.stringify(mod.current)];
}
return resolveModule(names, {
@@ -63,6 +63,11 @@ var Couch = {
},
compileFunction : function(source, ddoc) {
if (!source) throw(["error","not_found","missing function"]);
+ // Some newer SpiderMonkey's appear to not like evaluating
+ // an anonymous function at global scope. Simple fix just
+ // wraps the source with parens so the function object is
+ // returned correctly.
+ source = "(" + source + ")";
try {
if (sandbox) {
if (ddoc) {
@@ -91,9 +96,9 @@ var Couch = {
}
sandbox.require = require;
}
- var functionObject = evalcx(source, sandbox);
+ var functionObject = evalcx("(" + source + ")", sandbox);
} else {
- var functionObject = eval(source);
+ var functionObject = eval("(" + source + ")");
}
} catch (err) {
throw(["error", "compilation_error", err.toSource() + " (" + source + ")"]);
diff --git a/rel/overlay/etc/default.ini b/rel/overlay/etc/default.ini
index cb032152..55371083 100644
--- a/rel/overlay/etc/default.ini
+++ b/rel/overlay/etc/default.ini
@@ -51,6 +51,7 @@ allow_jsonp = false
;server_options = [{backlog, 128}, {acceptor_pool_size, 16}]
; For more socket options, consult Erlang's module 'inet' man page.
;socket_options = [{recbuf, 262144}, {sndbuf, 262144}, {nodelay, true}]
+log_max_chunk_size = 1000000
[ssl]
port = 6984
diff --git a/rel/overlay/share/www/script/couch_test_runner.js b/rel/overlay/share/www/script/couch_test_runner.js
index 55a6533f..e14640b6 100644
--- a/rel/overlay/share/www/script/couch_test_runner.js
+++ b/rel/overlay/share/www/script/couch_test_runner.js
@@ -414,9 +414,22 @@ function waitForSuccess(fun, tag) {
function waitForRestart() {
var waiting = true;
- while (waiting) {
+ // Wait for the server to go down but don't
+ // wait too long because we might miss the
+ // unavailable period.
+ var count = 25;
+ while (waiting && count > 0) {
+ count--;
try {
CouchDB.request("GET", "/");
+ } catch(e) {
+ waiting = false;
+ }
+ }
+ // Wait for it to come back up
+ waiting = true;
+ while (waiting) {
+ try {
CouchDB.request("GET", "/");
waiting = false;
} catch(e) {
diff --git a/rel/overlay/share/www/script/futon.browse.js b/rel/overlay/share/www/script/futon.browse.js
index 0228b83e..5c10773a 100644
--- a/rel/overlay/share/www/script/futon.browse.js
+++ b/rel/overlay/share/www/script/futon.browse.js
@@ -1275,8 +1275,7 @@
return false;
}).prependTo($("a", li));
}
- },
-
+ }
});
function encodeAttachment(name) {
diff --git a/rel/overlay/share/www/script/test/all_docs.js b/rel/overlay/share/www/script/test/all_docs.js
index 1d83aa95..1afe701d 100644
--- a/rel/overlay/share/www/script/test/all_docs.js
+++ b/rel/overlay/share/www/script/test/all_docs.js
@@ -41,6 +41,13 @@ couchTests.all_docs = function(debug) {
var all = db.allDocs({startkey:"2"});
T(all.offset == 2);
+ // Confirm that queries may assume raw collation.
+ var raw = db.allDocs({
+ startkey: "org.couchdb.user:",
+ endkey: "org.couchdb.user;"
+ });
+ TEquals(0, raw.rows.length);
+
// check that the docs show up in the seq view in the order they were created
var changes = db.changes();
var ids = ["0","3","1","2"];
diff --git a/rel/overlay/share/www/script/test/basics.js b/rel/overlay/share/www/script/test/basics.js
index 30c27c11..5dcf9fa9 100644
--- a/rel/overlay/share/www/script/test/basics.js
+++ b/rel/overlay/share/www/script/test/basics.js
@@ -246,4 +246,23 @@ couchTests.basics = function(debug) {
result = JSON.parse(xhr.responseText);
TEquals("bad_request", result.error);
TEquals("You tried to DELETE a database with a ?=rev parameter. Did you mean to DELETE a document instead?", result.reason);
+
+ // On restart, a request for creating a database that already exists can
+ // not override the existing database file
+ db = new CouchDB("test_suite_foobar");
+ db.deleteDb();
+ xhr = CouchDB.request("PUT", "/" + db.name);
+ TEquals(201, xhr.status);
+
+ TEquals(true, db.save({"_id": "doc1"}).ok);
+ TEquals(true, db.ensureFullCommit().ok);
+
+ TEquals(1, db.info().doc_count);
+
+ restartServer();
+
+ xhr = CouchDB.request("PUT", "/" + db.name);
+ TEquals(412, xhr.status);
+
+ TEquals(1, db.info().doc_count);
};
diff --git a/rel/overlay/share/www/script/test/changes.js b/rel/overlay/share/www/script/test/changes.js
index ea22bfb3..284f1985 100644
--- a/rel/overlay/share/www/script/test/changes.js
+++ b/rel/overlay/share/www/script/test/changes.js
@@ -507,6 +507,32 @@ couchTests.changes = function(debug) {
CouchDB.request("GET", "/" + db.name + "/_changes");
TEquals(0, CouchDB.requestStats('httpd', 'clients_requesting_changes').current);
+ // COUCHDB-1256
+ T(db.deleteDb());
+ T(db.createDb());
+
+ T(db.save({"_id":"foo", "a" : 123}).ok);
+ T(db.save({"_id":"bar", "a" : 456}).ok);
+
+ options = {
+ headers: {"Content-Type": "application/json"},
+ body: JSON.stringify({"_rev":"1-cc609831f0ca66e8cd3d4c1e0d98108a", "a":456})
+ };
+ req = CouchDB.request("PUT", "/" + db.name + "/foo?new_edits=false", options);
+
+ req = CouchDB.request("GET", "/" + db.name + "/_changes?style=all_docs");
+ resp = JSON.parse(req.responseText);
+
+ TEquals(3, resp.last_seq);
+ TEquals(2, resp.results.length);
+
+ req = CouchDB.request("GET", "/" + db.name + "/_changes?style=all_docs&since=2");
+ resp = JSON.parse(req.responseText);
+
+ TEquals(3, resp.last_seq);
+ TEquals(1, resp.results.length);
+ TEquals(2, resp.results[0].changes.length);
+
// cleanup
db.deleteDb();
};
diff --git a/rel/overlay/share/www/script/test/design_docs.js b/rel/overlay/share/www/script/test/design_docs.js
index 702f0441..dd38858a 100644
--- a/rel/overlay/share/www/script/test/design_docs.js
+++ b/rel/overlay/share/www/script/test/design_docs.js
@@ -421,6 +421,45 @@ couchTests.design_docs = function(debug) {
run_on_modified_server(server_config, testFun);
+ // COUCHDB-1227 - if a design document is deleted, by adding a "_deleted"
+ // field with the boolean value true, its validate_doc_update functions
+ // should no longer have effect.
+ db.deleteDb();
+ db.createDb();
+ var ddoc = {
+ _id: "_design/test",
+ language: "javascript",
+ validate_doc_update: (function(newDoc, oldDoc, userCtx, secObj) {
+ if (newDoc.value % 2 == 0) {
+ throw({forbidden: "dont like even numbers"});
+ }
+ return true;
+ }).toString()
+ };
+
+ TEquals(true, db.save(ddoc).ok);
+ try {
+ db.save({_id: "doc1", value: 4});
+ T(false, "doc insertion should have failed");
+ } catch (x) {
+ TEquals("forbidden", x.error);
+ }
+
+ var doc = db.open("doc1");
+ TEquals(null, doc);
+ ddoc._deleted = true;
+ TEquals(true, db.save(ddoc).ok);
+
+ try {
+ TEquals(true, db.save({_id: "doc1", value: 4}).ok);
+ } catch (x) {
+ T(false, "doc insertion should have succeeded");
+ }
+
+ doc = db.open("doc1");
+ TEquals(true, doc !== null, "doc was not persisted");
+ TEquals(4, doc.value);
+
// cleanup
db.deleteDb();
db2.deleteDb();
diff --git a/rel/overlay/share/www/script/test/etags_views.js b/rel/overlay/share/www/script/test/etags_views.js
index 34116f71..f6a4e1a5 100644
--- a/rel/overlay/share/www/script/test/etags_views.js
+++ b/rel/overlay/share/www/script/test/etags_views.js
@@ -70,6 +70,14 @@ couchTests.etags_views = function(debug) {
xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView");
var etag1 = xhr.getResponseHeader("etag");
T(etag1 == etag);
+
+ // verify ETag always changes for include_docs=true on update
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView?include_docs=true");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(db.save({"_id":"doc2", "foo":"bar"}).ok);
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView?include_docs=true");
+ var etag2 = xhr.getResponseHeader("etag");
+ T(etag1 != etag2);
// Verify that purges affect etags
xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/fooView");
diff --git a/rel/overlay/share/www/script/test/jsonp.js b/rel/overlay/share/www/script/test/jsonp.js
index 9aba7189..d1bca94a 100644
--- a/rel/overlay/share/www/script/test/jsonp.js
+++ b/rel/overlay/share/www/script/test/jsonp.js
@@ -48,6 +48,7 @@ couchTests.jsonp = function(debug) {
// Test unchunked callbacks.
var xhr = CouchDB.request("GET", "/test_suite_db/0?callback=jsonp_no_chunk");
+ TEquals("text/javascript", xhr.getResponseHeader("Content-Type"));
T(xhr.status == 200);
jsonp_flag = 0;
eval(xhr.responseText);
@@ -70,6 +71,7 @@ couchTests.jsonp = function(debug) {
var url = "/test_suite_db/_design/test/_view/all_docs?callback=jsonp_chunk";
xhr = CouchDB.request("GET", url);
+ TEquals("text/javascript", xhr.getResponseHeader("Content-Type"));
T(xhr.status == 200);
jsonp_flag = 0;
eval(xhr.responseText);
diff --git a/rel/overlay/share/www/script/test/recreate_doc.js b/rel/overlay/share/www/script/test/recreate_doc.js
index 05843558..f9723793 100644
--- a/rel/overlay/share/www/script/test/recreate_doc.js
+++ b/rel/overlay/share/www/script/test/recreate_doc.js
@@ -77,4 +77,69 @@ couchTests.recreate_doc = function(debug) {
} catch (e) {
T(e.error == "conflict");
}
+
+ db.deleteDb();
+ db.createDb();
+
+ // Helper function to create a doc with multiple revisions
+ // that are compacted away to ?REV_MISSING.
+
+ var createDoc = function(docid) {
+ var ret = [{_id: docid, count: 0}];
+ T(db.save(ret[0]).ok);
+ for(var i = 0; i < 2; i++) {
+ ret[ret.length] = {
+ _id: docid,
+ _rev: ret[ret.length-1]._rev,
+ count: ret[ret.length-1].count+1
+ };
+ T(db.save(ret[ret.length-1]).ok);
+ }
+ db.compact();
+ while(db.info().compact_running) {}
+ return ret;
+ }
+
+ // Helper function to check that there are no duplicates
+ // in the changes feed and that it has proper update
+ // sequence ordering.
+
+ var checkChanges = function() {
+ // Assert that there are no duplicates in _changes.
+ var req = CouchDB.request("GET", "/test_suite_db/_changes");
+ var resp = JSON.parse(req.responseText);
+ var docids = {};
+ var prev_seq = -1;
+ for(var i = 0; i < resp.results.length; i++) {
+ row = resp.results[i];
+ T(row.seq > prev_seq, "Unordered _changes feed.");
+ T(docids[row.id] === undefined, "Duplicates in _changes feed.");
+ prev_seq = row.seq;
+ docids[row.id] = true;
+ }
+ };
+
+ // COUCHDB-1265 - Check that the changes feed remains proper
+ // after we try and break the update_seq tree.
+
+ // This first case is the one originally reported and "fixed"
+ // in COUCHDB-1265. Reinserting an old revision into the
+ // revision tree causes duplicates in the update_seq tree.
+
+ var revs = createDoc("a");
+ T(db.save(revs[1], {new_edits: false}).ok);
+ T(db.save(revs[revs.length-1]).ok);
+ checkChanges();
+
+ // The original fix for COUCHDB-1265 is not entirely correct
+ // as it didn't consider the possibility that a compaction
+ // might run after the original tree screw up.
+
+ revs = createDoc("b");
+ T(db.save(revs[1], {new_edits: false}).ok);
+ db.compact();
+ while(db.info().compact_running) {}
+ T(db.save(revs[revs.length-1]).ok);
+ checkChanges();
+
};
diff --git a/rel/overlay/share/www/script/test/show_documents.js b/rel/overlay/share/www/script/test/show_documents.js
index 55ed9698..cf73ed57 100644
--- a/rel/overlay/share/www/script/test/show_documents.js
+++ b/rel/overlay/share/www/script/test/show_documents.js
@@ -90,6 +90,24 @@ couchTests.show_documents = function(debug) {
start({"X-Couch-Test-Header": "Yeah"});
send("Hey");
}),
+ "list-api-provides" : stringFun(function(doc, req) {
+ provides("text", function(){
+ send("foo, ");
+ send("bar, ");
+ send("baz!");
+ })
+ }),
+ "list-api-provides-and-return" : stringFun(function(doc, req) {
+ provides("text", function(){
+ send("4, ");
+ send("5, ");
+ send("6, ");
+ return "7!";
+ })
+ send("1, ");
+ send("2, ");
+ return "3, ";
+ }),
"list-api-mix" : stringFun(function(doc, req) {
start({"X-Couch-Test-Header": "Yeah"});
send("Hey ");
@@ -395,6 +413,14 @@ couchTests.show_documents = function(debug) {
T(xhr.responseText == "Hey");
TEquals("Yeah", xhr.getResponseHeader("X-Couch-Test-Header"), "header should be cool");
+ // test list() compatible API with provides function
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/list-api-provides/foo?format=text");
+ TEquals(xhr.responseText, "foo, bar, baz!", "should join chunks to response body");
+
+ // should keep next result order: chunks + return value + provided chunks + provided return value
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/list-api-provides-and-return/foo?format=text");
+ TEquals(xhr.responseText, "1, 2, 3, 4, 5, 6, 7!", "should not break 1..7 range");
+
xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/list-api-mix/foo");
T(xhr.responseText == "Hey Dude");
TEquals("Yeah", xhr.getResponseHeader("X-Couch-Test-Header"), "header should be cool");
diff --git a/rel/overlay/share/www/script/test/update_documents.js b/rel/overlay/share/www/script/test/update_documents.js
index 49d3b68a..59af4597 100644
--- a/rel/overlay/share/www/script/test/update_documents.js
+++ b/rel/overlay/share/www/script/test/update_documents.js
@@ -75,6 +75,17 @@ couchTests.update_documents = function(debug) {
}),
"get-uuid" : stringFun(function(doc, req) {
return [null, req.uuid];
+ }),
+ "code-n-bump" : stringFun(function(doc,req) {
+ if (!doc.counter) doc.counter = 0;
+ doc.counter += 1;
+ var message = "<h1>bumped it!</h1>";
+ resp = {"code": 302, "body": message}
+ return [doc, resp];
+ }),
+ "resp-code" : stringFun(function(doc,req) {
+ resp = {"code": 302}
+ return [null, resp];
})
}
};
@@ -165,4 +176,31 @@ couchTests.update_documents = function(debug) {
T(xhr.status == 200);
T(xhr.responseText.length == 32);
+ // COUCHDB-1229 - allow slashes in doc ids for update handlers
+ // /db/_design/doc/_update/handler/doc/id
+
+ var doc = {
+ _id:"with/slash",
+ counter:1
+ };
+ db.save(doc);
+ xhr = CouchDB.request("PUT", "/test_suite_db/_design/update/_update/bump-counter/with/slash");
+ TEquals(201, xhr.status, "should return a 200 status");
+ TEquals("<h1>bumped it!</h1>", xhr.responseText, "should report bumping");
+
+ var doc = db.open("with/slash");
+ TEquals(2, doc.counter, "counter should be 2");
+
+ // COUCHDB-648 - the code in the JSON response should be honored
+
+ xhr = CouchDB.request("PUT", "/test_suite_db/_design/update/_update/code-n-bump/"+docid, {
+ headers : {"X-Couch-Full-Commit":"true"}
+ });
+ T(xhr.status == 302);
+ T(xhr.responseText == "<h1>bumped it!</h1>");
+ doc = db.open(docid);
+ T(doc.counter == 3);
+
+ xhr = CouchDB.request("POST", "/test_suite_db/_design/update/_update/resp-code/");
+ T(xhr.status == 302);
};
diff --git a/rel/overlay/share/www/script/test/view_collation_raw.js b/rel/overlay/share/www/script/test/view_collation_raw.js
index 31624cdb..779f7eb8 100644
--- a/rel/overlay/share/www/script/test/view_collation_raw.js
+++ b/rel/overlay/share/www/script/test/view_collation_raw.js
@@ -76,12 +76,19 @@ couchTests.view_collation_raw = function(debug) {
}
}
T(db.save(designDoc).ok);
+
+ // Confirm that everything collates correctly.
var rows = db.view("test/test").rows;
for (i=0; i<values.length; i++) {
T(equals(rows[i].key, values[i]));
}
- // everything has collated correctly. Now to check the descending output
+ // Confirm that couch allows raw semantics in key ranges.
+ rows = db.view("test/test", {startkey:"Z", endkey:"a"}).rows;
+ TEquals(1, rows.length);
+ TEquals("a", rows[0].key);
+
+ // Check the descending output.
rows = db.view("test/test", {descending: true}).rows;
for (i=0; i<values.length; i++) {
T(equals(rows[i].key, values[values.length - 1 -i]));
diff --git a/test/etap/072-cleanup.t b/test/etap/072-cleanup.t
index 61790bc6..6f97193d 100755
--- a/test/etap/072-cleanup.t
+++ b/test/etap/072-cleanup.t
@@ -41,7 +41,6 @@ main(_) ->
test() ->
{ok, _} = couch_server_sup:start_link(test_util:config_files()),
- ok = application:start(inets),
couch_server:delete(?TEST_DB, []),
timer:sleep(1000),
@@ -110,11 +109,8 @@ db_url() ->
binary_to_list(?TEST_DB).
query_view(DDoc, View) ->
- {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
- get,
- {db_url() ++ "/_design/" ++ DDoc ++ "/_view/" ++ View, []},
- [],
- [{sync, true}]),
+ {ok, Code, _Headers, _Body} = test_util:request(
+ db_url() ++ "/_design/" ++ DDoc ++ "/_view/" ++ View, [], get),
etap:is(Code, 200, "Built view index for " ++ DDoc ++ "."),
ok.