summaryrefslogtreecommitdiff
path: root/1.1.x/src
diff options
context:
space:
mode:
authorRobert Newson <rnewson@apache.org>2011-05-17 11:20:48 +0000
committerRobert Newson <rnewson@apache.org>2011-05-17 11:20:48 +0000
commitfa1dcf93a3661437e8751c2e1febcd63b6fc9075 (patch)
tree786689afd34fdbd506bf1dd0e53b3de54de011d3 /1.1.x/src
parente8e4b0d293021fe90326a85828f3cfb087bf18b7 (diff)
parent0670ae37c8af16014c71b15a0a4752e62b9dd9dd (diff)
tagging 1.1.0
git-svn-id: https://svn.apache.org/repos/asf/couchdb/tags/1.1.0@1104155 13f79535-47bb-0310-9956-ffa450edef68
Diffstat (limited to '1.1.x/src')
-rw-r--r--1.1.x/src/Makefile.am13
-rw-r--r--1.1.x/src/couchdb/Makefile.am209
-rw-r--r--1.1.x/src/couchdb/couch.app.tpl.in29
-rw-r--r--1.1.x/src/couchdb/couch.erl39
-rw-r--r--1.1.x/src/couchdb/couch_app.erl56
-rw-r--r--1.1.x/src/couchdb/couch_auth_cache.erl419
-rw-r--r--1.1.x/src/couchdb/couch_btree.erl679
-rw-r--r--1.1.x/src/couchdb/couch_changes.erl339
-rw-r--r--1.1.x/src/couchdb/couch_config.erl254
-rw-r--r--1.1.x/src/couchdb/couch_config_writer.erl86
-rw-r--r--1.1.x/src/couchdb/couch_db.erl1210
-rw-r--r--1.1.x/src/couchdb/couch_db.hrl278
-rw-r--r--1.1.x/src/couchdb/couch_db_update_notifier.erl73
-rw-r--r--1.1.x/src/couchdb/couch_db_update_notifier_sup.erl63
-rw-r--r--1.1.x/src/couchdb/couch_db_updater.erl896
-rw-r--r--1.1.x/src/couchdb/couch_doc.erl527
-rw-r--r--1.1.x/src/couchdb/couch_event_sup.erl73
-rw-r--r--1.1.x/src/couchdb/couch_external_manager.erl101
-rw-r--r--1.1.x/src/couchdb/couch_external_server.erl69
-rw-r--r--1.1.x/src/couchdb/couch_file.erl614
-rw-r--r--1.1.x/src/couchdb/couch_httpd.erl997
-rw-r--r--1.1.x/src/couchdb/couch_httpd_auth.erl359
-rw-r--r--1.1.x/src/couchdb/couch_httpd_db.erl1283
-rw-r--r--1.1.x/src/couchdb/couch_httpd_external.erl169
-rw-r--r--1.1.x/src/couchdb/couch_httpd_misc_handlers.erl284
-rw-r--r--1.1.x/src/couchdb/couch_httpd_oauth.erl176
-rw-r--r--1.1.x/src/couchdb/couch_httpd_proxy.erl431
-rw-r--r--1.1.x/src/couchdb/couch_httpd_rewrite.erl434
-rw-r--r--1.1.x/src/couchdb/couch_httpd_show.erl404
-rw-r--r--1.1.x/src/couchdb/couch_httpd_stats_handlers.erl56
-rw-r--r--1.1.x/src/couchdb/couch_httpd_vhost.erl403
-rw-r--r--1.1.x/src/couchdb/couch_httpd_view.erl755
-rw-r--r--1.1.x/src/couchdb/couch_js_functions.hrl226
-rw-r--r--1.1.x/src/couchdb/couch_key_tree.erl332
-rw-r--r--1.1.x/src/couchdb/couch_log.erl193
-rw-r--r--1.1.x/src/couchdb/couch_native_process.erl402
-rw-r--r--1.1.x/src/couchdb/couch_os_daemons.erl364
-rw-r--r--1.1.x/src/couchdb/couch_os_process.erl185
-rw-r--r--1.1.x/src/couchdb/couch_query_servers.erl589
-rw-r--r--1.1.x/src/couchdb/couch_ref_counter.erl111
-rw-r--r--1.1.x/src/couchdb/couch_rep.erl972
-rw-r--r--1.1.x/src/couchdb/couch_rep_att.erl119
-rw-r--r--1.1.x/src/couchdb/couch_rep_changes_feed.erl503
-rw-r--r--1.1.x/src/couchdb/couch_rep_httpc.erl317
-rw-r--r--1.1.x/src/couchdb/couch_rep_missing_revs.erl198
-rw-r--r--1.1.x/src/couchdb/couch_rep_reader.erl283
-rw-r--r--1.1.x/src/couchdb/couch_rep_sup.erl31
-rw-r--r--1.1.x/src/couchdb/couch_rep_writer.erl165
-rw-r--r--1.1.x/src/couchdb/couch_replication_manager.erl383
-rw-r--r--1.1.x/src/couchdb/couch_server.erl405
-rw-r--r--1.1.x/src/couchdb/couch_server_sup.erl220
-rw-r--r--1.1.x/src/couchdb/couch_stats_aggregator.erl297
-rw-r--r--1.1.x/src/couchdb/couch_stats_collector.erl136
-rw-r--r--1.1.x/src/couchdb/couch_stream.erl357
-rw-r--r--1.1.x/src/couchdb/couch_task_status.erl124
-rw-r--r--1.1.x/src/couchdb/couch_util.erl478
-rw-r--r--1.1.x/src/couchdb/couch_uuids.erl95
-rw-r--r--1.1.x/src/couchdb/couch_view.erl460
-rw-r--r--1.1.x/src/couchdb/couch_view_compactor.erl102
-rw-r--r--1.1.x/src/couchdb/couch_view_group.erl642
-rw-r--r--1.1.x/src/couchdb/couch_view_updater.erl265
-rw-r--r--1.1.x/src/couchdb/couch_work_queue.erl155
-rw-r--r--1.1.x/src/couchdb/priv/Makefile.am93
-rw-r--r--1.1.x/src/couchdb/priv/couch_js/http.c675
-rw-r--r--1.1.x/src/couchdb/priv/couch_js/http.h18
-rw-r--r--1.1.x/src/couchdb/priv/couch_js/main.c338
-rw-r--r--1.1.x/src/couchdb/priv/couch_js/utf8.c286
-rw-r--r--1.1.x/src/couchdb/priv/couch_js/utf8.h19
-rw-r--r--1.1.x/src/couchdb/priv/icu_driver/couch_icu_driver.c177
-rw-r--r--1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable.sh20
-rw-r--r--1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable_win.c145
-rw-r--r--1.1.x/src/couchdb/priv/stat_descriptions.cfg.in50
-rw-r--r--1.1.x/src/erlang-oauth/Makefile.am50
-rw-r--r--1.1.x/src/erlang-oauth/oauth.app.in20
-rw-r--r--1.1.x/src/erlang-oauth/oauth.erl107
-rw-r--r--1.1.x/src/erlang-oauth/oauth_hmac_sha1.erl11
-rw-r--r--1.1.x/src/erlang-oauth/oauth_http.erl22
-rw-r--r--1.1.x/src/erlang-oauth/oauth_plaintext.erl10
-rw-r--r--1.1.x/src/erlang-oauth/oauth_rsa_sha1.erl30
-rw-r--r--1.1.x/src/erlang-oauth/oauth_unix.erl16
-rw-r--r--1.1.x/src/erlang-oauth/oauth_uri.erl88
-rw-r--r--1.1.x/src/etap/Makefile.am44
-rw-r--r--1.1.x/src/etap/etap.erl416
-rw-r--r--1.1.x/src/etap/etap_application.erl72
-rw-r--r--1.1.x/src/etap/etap_can.erl79
-rw-r--r--1.1.x/src/etap/etap_exception.erl66
-rw-r--r--1.1.x/src/etap/etap_process.erl42
-rw-r--r--1.1.x/src/etap/etap_report.erl343
-rw-r--r--1.1.x/src/etap/etap_request.erl89
-rw-r--r--1.1.x/src/etap/etap_string.erl47
-rw-r--r--1.1.x/src/etap/etap_web.erl65
-rw-r--r--1.1.x/src/ibrowse/Makefile.am49
-rw-r--r--1.1.x/src/ibrowse/ibrowse.app.in13
-rw-r--r--1.1.x/src/ibrowse/ibrowse.erl863
-rw-r--r--1.1.x/src/ibrowse/ibrowse.hrl21
-rw-r--r--1.1.x/src/ibrowse/ibrowse_app.erl63
-rw-r--r--1.1.x/src/ibrowse/ibrowse_http_client.erl1855
-rw-r--r--1.1.x/src/ibrowse/ibrowse_lb.erl235
-rw-r--r--1.1.x/src/ibrowse/ibrowse_lib.erl391
-rw-r--r--1.1.x/src/ibrowse/ibrowse_sup.erl63
-rw-r--r--1.1.x/src/ibrowse/ibrowse_test.erl513
-rw-r--r--1.1.x/src/mochiweb/Makefile.am102
-rw-r--r--1.1.x/src/mochiweb/internal.hrl3
-rw-r--r--1.1.x/src/mochiweb/mochifmt.erl425
-rw-r--r--1.1.x/src/mochiweb/mochifmt_records.erl38
-rw-r--r--1.1.x/src/mochiweb/mochifmt_std.erl30
-rw-r--r--1.1.x/src/mochiweb/mochiglobal.erl107
-rw-r--r--1.1.x/src/mochiweb/mochihex.erl91
-rw-r--r--1.1.x/src/mochiweb/mochijson.erl531
-rw-r--r--1.1.x/src/mochiweb/mochijson2.erl802
-rw-r--r--1.1.x/src/mochiweb/mochilists.erl104
-rw-r--r--1.1.x/src/mochiweb/mochilogfile2.erl140
-rw-r--r--1.1.x/src/mochiweb/mochinum.erl331
-rw-r--r--1.1.x/src/mochiweb/mochitemp.erl310
-rw-r--r--1.1.x/src/mochiweb/mochiutf8.erl316
-rw-r--r--1.1.x/src/mochiweb/mochiweb.app.in32
-rw-r--r--1.1.x/src/mochiweb/mochiweb.app.src9
-rw-r--r--1.1.x/src/mochiweb/mochiweb.erl289
-rw-r--r--1.1.x/src/mochiweb/mochiweb_acceptor.erl48
-rw-r--r--1.1.x/src/mochiweb/mochiweb_app.erl27
-rw-r--r--1.1.x/src/mochiweb/mochiweb_charref.erl308
-rw-r--r--1.1.x/src/mochiweb/mochiweb_cookies.erl309
-rw-r--r--1.1.x/src/mochiweb/mochiweb_cover.erl75
-rw-r--r--1.1.x/src/mochiweb/mochiweb_echo.erl38
-rw-r--r--1.1.x/src/mochiweb/mochiweb_headers.erl299
-rw-r--r--1.1.x/src/mochiweb/mochiweb_html.erl1061
-rw-r--r--1.1.x/src/mochiweb/mochiweb_http.erl273
-rw-r--r--1.1.x/src/mochiweb/mochiweb_io.erl46
-rw-r--r--1.1.x/src/mochiweb/mochiweb_mime.erl94
-rw-r--r--1.1.x/src/mochiweb/mochiweb_multipart.erl824
-rw-r--r--1.1.x/src/mochiweb/mochiweb_request.erl768
-rw-r--r--1.1.x/src/mochiweb/mochiweb_response.erl64
-rw-r--r--1.1.x/src/mochiweb/mochiweb_skel.erl86
-rw-r--r--1.1.x/src/mochiweb/mochiweb_socket.erl84
-rw-r--r--1.1.x/src/mochiweb/mochiweb_socket_server.erl272
-rw-r--r--1.1.x/src/mochiweb/mochiweb_sup.erl41
-rw-r--r--1.1.x/src/mochiweb/mochiweb_util.erl973
-rw-r--r--1.1.x/src/mochiweb/reloader.erl161
138 files changed, 0 insertions, 37902 deletions
diff --git a/1.1.x/src/Makefile.am b/1.1.x/src/Makefile.am
deleted file mode 100644
index b9529f94..00000000
--- a/1.1.x/src/Makefile.am
+++ /dev/null
@@ -1,13 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-## http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-SUBDIRS = couchdb erlang-oauth etap ibrowse mochiweb
diff --git a/1.1.x/src/couchdb/Makefile.am b/1.1.x/src/couchdb/Makefile.am
deleted file mode 100644
index 92f6dcf6..00000000
--- a/1.1.x/src/couchdb/Makefile.am
+++ /dev/null
@@ -1,209 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-## http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-SUBDIRS = priv
-
-# devdocdir = $(localdocdir)/developer/couchdb
-couchlibdir = $(localerlanglibdir)/couch-$(version)
-couchincludedir = $(couchlibdir)/include
-couchebindir = $(couchlibdir)/ebin
-
-couchinclude_DATA = couch_db.hrl couch_js_functions.hrl
-couchebin_DATA = $(compiled_files)
-
-# dist_devdoc_DATA = $(doc_base) $(doc_modules)
-
-CLEANFILES = $(compiled_files) $(doc_base)
-
-# CLEANFILES = $(doc_modules) edoc-info
-
-source_files = \
- couch.erl \
- couch_app.erl \
- couch_auth_cache.erl \
- couch_btree.erl \
- couch_changes.erl \
- couch_config.erl \
- couch_config_writer.erl \
- couch_db.erl \
- couch_db_update_notifier.erl \
- couch_db_update_notifier_sup.erl \
- couch_doc.erl \
- couch_event_sup.erl \
- couch_external_manager.erl \
- couch_external_server.erl \
- couch_file.erl \
- couch_httpd.erl \
- couch_httpd_db.erl \
- couch_httpd_auth.erl \
- couch_httpd_oauth.erl \
- couch_httpd_external.erl \
- couch_httpd_show.erl \
- couch_httpd_view.erl \
- couch_httpd_misc_handlers.erl \
- couch_httpd_proxy.erl \
- couch_httpd_rewrite.erl \
- couch_httpd_stats_handlers.erl \
- couch_httpd_vhost.erl \
- couch_key_tree.erl \
- couch_log.erl \
- couch_native_process.erl \
- couch_os_daemons.erl \
- couch_os_process.erl \
- couch_query_servers.erl \
- couch_ref_counter.erl \
- couch_rep.erl \
- couch_rep_att.erl \
- couch_rep_changes_feed.erl \
- couch_rep_httpc.erl \
- couch_rep_missing_revs.erl \
- couch_rep_reader.erl \
- couch_rep_sup.erl \
- couch_rep_writer.erl \
- couch_replication_manager.erl \
- couch_server.erl \
- couch_server_sup.erl \
- couch_stats_aggregator.erl \
- couch_stats_collector.erl \
- couch_stream.erl \
- couch_task_status.erl \
- couch_util.erl \
- couch_uuids.erl \
- couch_view.erl \
- couch_view_compactor.erl \
- couch_view_updater.erl \
- couch_view_group.erl \
- couch_db_updater.erl \
- couch_work_queue.erl
-
-EXTRA_DIST = $(source_files) couch_db.hrl couch_js_functions.hrl
-
-compiled_files = \
- couch.app \
- couch.beam \
- couch_app.beam \
- couch_auth_cache.beam \
- couch_btree.beam \
- couch_changes.beam \
- couch_config.beam \
- couch_config_writer.beam \
- couch_db.beam \
- couch_db_update_notifier.beam \
- couch_db_update_notifier_sup.beam \
- couch_doc.beam \
- couch_event_sup.beam \
- couch_external_manager.beam \
- couch_external_server.beam \
- couch_file.beam \
- couch_httpd.beam \
- couch_httpd_db.beam \
- couch_httpd_auth.beam \
- couch_httpd_oauth.beam \
- couch_httpd_proxy.beam \
- couch_httpd_external.beam \
- couch_httpd_show.beam \
- couch_httpd_view.beam \
- couch_httpd_misc_handlers.beam \
- couch_httpd_rewrite.beam \
- couch_httpd_stats_handlers.beam \
- couch_httpd_vhost.beam \
- couch_key_tree.beam \
- couch_log.beam \
- couch_native_process.beam \
- couch_os_daemons.beam \
- couch_os_process.beam \
- couch_query_servers.beam \
- couch_ref_counter.beam \
- couch_rep.beam \
- couch_rep_att.beam \
- couch_rep_changes_feed.beam \
- couch_rep_httpc.beam \
- couch_rep_missing_revs.beam \
- couch_rep_reader.beam \
- couch_rep_sup.beam \
- couch_rep_writer.beam \
- couch_replication_manager.beam \
- couch_server.beam \
- couch_server_sup.beam \
- couch_stats_aggregator.beam \
- couch_stats_collector.beam \
- couch_stream.beam \
- couch_task_status.beam \
- couch_util.beam \
- couch_uuids.beam \
- couch_view.beam \
- couch_view_compactor.beam \
- couch_view_updater.beam \
- couch_view_group.beam \
- couch_db_updater.beam \
- couch_work_queue.beam
-
-# doc_base = \
-# erlang.png \
-# index.html \
-# modules-frame.html \
-# overview-summary.html \
-# packages-frame.html \
-# stylesheet.css
-
-# doc_modules = \
-# couch_btree.html \
-# couch_config.html \
-# couch_config_writer.html \
-# couch_db.html \
-# couch_db_update_notifier.html \
-# couch_db_update_notifier_sup.html \
-# couch_doc.html \
-# couch_event_sup.html \
-# couch_file.html \
-# couch_httpd.html \
-# couch_key_tree.html \
-# couch_log.html \
-# couch_query_servers.html \
-# couch_rep.html \
-# couch_rep_sup.html \
-# couch_server.html \
-# couch_server_sup.html \
-# couch_stream.html \
-# couch_util.html \
-# couch_view.html
-
-if WINDOWS
-couch.app: couch.app.tpl
- modules=`find . -name "couch*.erl" -exec basename {} .erl \; | tr '\n' ',' | sed "s/,$$//"`; \
- sed -e "s|%package_name%|@package_name@|g" \
- -e "s|%version%|@version@|g" \
- -e "s|@modules@|$$modules|g" \
- -e "s|%localconfdir%|../etc/couchdb|g" \
- -e "s|@defaultini@|default.ini|g" \
- -e "s|@localini@|local.ini|g" > \
- $@ < $<
-else
-couch.app: couch.app.tpl
- modules=`{ find . -name "*.erl" -exec basename {} .erl \; | tr '\n' ','; echo ''; } | sed "s/,$$//"`; \
- sed -e "s|%package_name%|@package_name@|g" \
- -e "s|%version%|@version@|g" \
- -e "s|@modules@|$$modules|g" \
- -e "s|%localconfdir%|@localconfdir@|g" \
- -e "s|@defaultini@|default.ini|g" \
- -e "s|@localini@|local.ini|g" > \
- $@ < $<
- chmod +x $@
-endif
-
-# $(dist_devdoc_DATA): edoc-info
-
-# $(ERL) -noshell -run edoc_run files [\"$<\"]
-
-%.beam: %.erl couch_db.hrl couch_js_functions.hrl
- $(ERLC) $(ERLC_FLAGS) ${TEST} $<;
-
diff --git a/1.1.x/src/couchdb/couch.app.tpl.in b/1.1.x/src/couchdb/couch.app.tpl.in
deleted file mode 100644
index 36b0b34c..00000000
--- a/1.1.x/src/couchdb/couch.app.tpl.in
+++ /dev/null
@@ -1,29 +0,0 @@
-{application, couch, [
- {description, "@package_name@"},
- {vsn, "@version@"},
- {modules, [@modules@]},
- {registered, [
- couch_config,
- couch_db_update,
- couch_db_update_notifier_sup,
- couch_external_manager,
- couch_httpd,
- couch_log,
- couch_primary_services,
- couch_query_servers,
- couch_rep_sup,
- couch_secondary_services,
- couch_server,
- couch_server_sup,
- couch_stats_aggregator,
- couch_stats_collector,
- couch_task_status,
- couch_view
- ]},
- {mod, {couch_app, [
- "%localconfdir%/@defaultini@",
- "%localconfdir%/@localini@"
- ]}},
- {applications, [kernel, stdlib]},
- {included_applications, [crypto, sasl, inets, oauth, ibrowse, mochiweb]}
-]}.
diff --git a/1.1.x/src/couchdb/couch.erl b/1.1.x/src/couchdb/couch.erl
deleted file mode 100644
index 956e9489..00000000
--- a/1.1.x/src/couchdb/couch.erl
+++ /dev/null
@@ -1,39 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch).
-
--compile(export_all).
-
-start() ->
- application:start(couch).
-
-stop() ->
- application:stop(couch).
-
-restart() ->
- case stop() of
- ok ->
- start();
- {error, {not_started,couch}} ->
- start();
- {error, Reason} ->
- {error, Reason}
- end.
-
-reload() ->
- case supervisor:terminate_child(couch_server_sup, couch_config) of
- ok ->
- supervisor:restart_child(couch_server_sup, couch_config);
- {error, Reason} ->
- {error, Reason}
- end.
diff --git a/1.1.x/src/couchdb/couch_app.erl b/1.1.x/src/couchdb/couch_app.erl
deleted file mode 100644
index 232953d9..00000000
--- a/1.1.x/src/couchdb/couch_app.erl
+++ /dev/null
@@ -1,56 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_app).
-
--behaviour(application).
-
--include("couch_db.hrl").
-
--export([start/2, stop/1]).
-
-start(_Type, DefaultIniFiles) ->
- IniFiles = get_ini_files(DefaultIniFiles),
- case start_apps([crypto, public_key, sasl, inets, oauth, ssl, ibrowse, mochiweb]) of
- ok ->
- couch_server_sup:start_link(IniFiles);
- {error, Reason} ->
- {error, Reason}
- end.
-
-stop(_) ->
- ok.
-
-get_ini_files(Default) ->
- case init:get_argument(couch_ini) of
- error ->
- Default;
- {ok, [[]]} ->
- Default;
- {ok, [Values]} ->
- Values
- end.
-
-start_apps([]) ->
- ok;
-start_apps([App|Rest]) ->
- case application:start(App) of
- ok ->
- start_apps(Rest);
- {error, {already_started, App}} ->
- start_apps(Rest);
- {error, _Reason} when App =:= public_key ->
- % ignore on R12B5
- start_apps(Rest);
- {error, _Reason} ->
- {error, {app_would_not_start, App}}
- end.
diff --git a/1.1.x/src/couchdb/couch_auth_cache.erl b/1.1.x/src/couchdb/couch_auth_cache.erl
deleted file mode 100644
index e0715b88..00000000
--- a/1.1.x/src/couchdb/couch_auth_cache.erl
+++ /dev/null
@@ -1,419 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_auth_cache).
--behaviour(gen_server).
-
-% public API
--export([get_user_creds/1]).
-
-% gen_server API
--export([start_link/0, init/1, handle_call/3, handle_info/2, handle_cast/2]).
--export([code_change/3, terminate/2]).
-
--include("couch_db.hrl").
--include("couch_js_functions.hrl").
-
--define(STATE, auth_state_ets).
--define(BY_USER, auth_by_user_ets).
--define(BY_ATIME, auth_by_atime_ets).
-
--record(state, {
- max_cache_size = 0,
- cache_size = 0,
- db_notifier = nil
-}).
-
-
--spec get_user_creds(UserName::string() | binary()) ->
- Credentials::list() | nil.
-
-get_user_creds(UserName) when is_list(UserName) ->
- get_user_creds(?l2b(UserName));
-
-get_user_creds(UserName) ->
- UserCreds = case couch_config:get("admins", ?b2l(UserName)) of
- "-hashed-" ++ HashedPwdAndSalt ->
- % the name is an admin, now check to see if there is a user doc
- % which has a matching name, salt, and password_sha
- [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","),
- case get_from_cache(UserName) of
- nil ->
- [{<<"roles">>, [<<"_admin">>]},
- {<<"salt">>, ?l2b(Salt)},
- {<<"password_sha">>, ?l2b(HashedPwd)}];
- UserProps when is_list(UserProps) ->
- DocRoles = couch_util:get_value(<<"roles">>, UserProps),
- [{<<"roles">>, [<<"_admin">> | DocRoles]},
- {<<"salt">>, ?l2b(Salt)},
- {<<"password_sha">>, ?l2b(HashedPwd)}]
- end;
- _Else ->
- get_from_cache(UserName)
- end,
- validate_user_creds(UserCreds).
-
-
-get_from_cache(UserName) ->
- exec_if_auth_db(
- fun(_AuthDb) ->
- maybe_refresh_cache(),
- case ets:lookup(?BY_USER, UserName) of
- [] ->
- gen_server:call(?MODULE, {fetch, UserName}, infinity);
- [{UserName, {Credentials, _ATime}}] ->
- couch_stats_collector:increment({couchdb, auth_cache_hits}),
- gen_server:cast(?MODULE, {cache_hit, UserName}),
- Credentials
- end
- end,
- nil
- ).
-
-
-validate_user_creds(nil) ->
- nil;
-validate_user_creds(UserCreds) ->
- case couch_util:get_value(<<"_conflicts">>, UserCreds) of
- undefined ->
- ok;
- _ConflictList ->
- throw({unauthorized,
- <<"User document conflicts must be resolved before the document",
- " is used for authentication purposes.">>
- })
- end,
- UserCreds.
-
-
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-
-init(_) ->
- ?STATE = ets:new(?STATE, [set, protected, named_table]),
- ?BY_USER = ets:new(?BY_USER, [set, protected, named_table]),
- ?BY_ATIME = ets:new(?BY_ATIME, [ordered_set, private, named_table]),
- AuthDbName = couch_config:get("couch_httpd_auth", "authentication_db"),
- true = ets:insert(?STATE, {auth_db_name, ?l2b(AuthDbName)}),
- true = ets:insert(?STATE, {auth_db, open_auth_db()}),
- process_flag(trap_exit, true),
- ok = couch_config:register(
- fun("couch_httpd_auth", "auth_cache_size", SizeList) ->
- Size = list_to_integer(SizeList),
- ok = gen_server:call(?MODULE, {new_max_cache_size, Size}, infinity)
- end
- ),
- ok = couch_config:register(
- fun("couch_httpd_auth", "authentication_db", DbName) ->
- ok = gen_server:call(?MODULE, {new_auth_db, ?l2b(DbName)}, infinity)
- end
- ),
- {ok, Notifier} = couch_db_update_notifier:start_link(fun handle_db_event/1),
- State = #state{
- db_notifier = Notifier,
- max_cache_size = list_to_integer(
- couch_config:get("couch_httpd_auth", "auth_cache_size", "50")
- )
- },
- {ok, State}.
-
-
-handle_db_event({Event, DbName}) ->
- [{auth_db_name, AuthDbName}] = ets:lookup(?STATE, auth_db_name),
- case DbName =:= AuthDbName of
- true ->
- case Event of
- deleted -> gen_server:call(?MODULE, auth_db_deleted, infinity);
- created -> gen_server:call(?MODULE, auth_db_created, infinity);
- compacted -> gen_server:call(?MODULE, auth_db_compacted, infinity);
- _Else -> ok
- end;
- false ->
- ok
- end.
-
-
-handle_call({new_auth_db, AuthDbName}, _From, State) ->
- NewState = clear_cache(State),
- true = ets:insert(?STATE, {auth_db_name, AuthDbName}),
- true = ets:insert(?STATE, {auth_db, open_auth_db()}),
- {reply, ok, NewState};
-
-handle_call(auth_db_deleted, _From, State) ->
- NewState = clear_cache(State),
- true = ets:insert(?STATE, {auth_db, nil}),
- {reply, ok, NewState};
-
-handle_call(auth_db_created, _From, State) ->
- NewState = clear_cache(State),
- true = ets:insert(?STATE, {auth_db, open_auth_db()}),
- {reply, ok, NewState};
-
-handle_call(auth_db_compacted, _From, State) ->
- exec_if_auth_db(
- fun(AuthDb) ->
- true = ets:insert(?STATE, {auth_db, reopen_auth_db(AuthDb)})
- end
- ),
- {reply, ok, State};
-
-handle_call({new_max_cache_size, NewSize}, _From, State) ->
- case NewSize >= State#state.cache_size of
- true ->
- ok;
- false ->
- lists:foreach(
- fun(_) ->
- LruTime = ets:last(?BY_ATIME),
- [{LruTime, UserName}] = ets:lookup(?BY_ATIME, LruTime),
- true = ets:delete(?BY_ATIME, LruTime),
- true = ets:delete(?BY_USER, UserName)
- end,
- lists:seq(1, State#state.cache_size - NewSize)
- )
- end,
- NewState = State#state{
- max_cache_size = NewSize,
- cache_size = lists:min([NewSize, State#state.cache_size])
- },
- {reply, ok, NewState};
-
-handle_call({fetch, UserName}, _From, State) ->
- {Credentials, NewState} = case ets:lookup(?BY_USER, UserName) of
- [{UserName, {Creds, ATime}}] ->
- couch_stats_collector:increment({couchdb, auth_cache_hits}),
- cache_hit(UserName, Creds, ATime),
- {Creds, State};
- [] ->
- couch_stats_collector:increment({couchdb, auth_cache_misses}),
- Creds = get_user_props_from_db(UserName),
- State1 = add_cache_entry(UserName, Creds, erlang:now(), State),
- {Creds, State1}
- end,
- {reply, Credentials, NewState};
-
-handle_call(refresh, _From, State) ->
- exec_if_auth_db(fun refresh_entries/1),
- {reply, ok, State}.
-
-
-handle_cast({cache_hit, UserName}, State) ->
- case ets:lookup(?BY_USER, UserName) of
- [{UserName, {Credentials, ATime}}] ->
- cache_hit(UserName, Credentials, ATime);
- _ ->
- ok
- end,
- {noreply, State}.
-
-
-handle_info(_Msg, State) ->
- {noreply, State}.
-
-
-terminate(_Reason, #state{db_notifier = Notifier}) ->
- couch_db_update_notifier:stop(Notifier),
- exec_if_auth_db(fun(AuthDb) -> catch couch_db:close(AuthDb) end),
- true = ets:delete(?BY_USER),
- true = ets:delete(?BY_ATIME),
- true = ets:delete(?STATE).
-
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
-clear_cache(State) ->
- exec_if_auth_db(fun(AuthDb) -> catch couch_db:close(AuthDb) end),
- true = ets:delete_all_objects(?BY_USER),
- true = ets:delete_all_objects(?BY_ATIME),
- State#state{cache_size = 0}.
-
-
-add_cache_entry(UserName, Credentials, ATime, State) ->
- case State#state.cache_size >= State#state.max_cache_size of
- true ->
- free_mru_cache_entry();
- false ->
- ok
- end,
- true = ets:insert(?BY_ATIME, {ATime, UserName}),
- true = ets:insert(?BY_USER, {UserName, {Credentials, ATime}}),
- State#state{cache_size = couch_util:get_value(size, ets:info(?BY_USER))}.
-
-
-free_mru_cache_entry() ->
- case ets:last(?BY_ATIME) of
- '$end_of_table' ->
- ok; % empty cache
- LruTime ->
- [{LruTime, UserName}] = ets:lookup(?BY_ATIME, LruTime),
- true = ets:delete(?BY_ATIME, LruTime),
- true = ets:delete(?BY_USER, UserName)
- end.
-
-
-cache_hit(UserName, Credentials, ATime) ->
- NewATime = erlang:now(),
- true = ets:delete(?BY_ATIME, ATime),
- true = ets:insert(?BY_ATIME, {NewATime, UserName}),
- true = ets:insert(?BY_USER, {UserName, {Credentials, NewATime}}).
-
-
-refresh_entries(AuthDb) ->
- case reopen_auth_db(AuthDb) of
- nil ->
- ok;
- AuthDb2 ->
- case AuthDb2#db.update_seq > AuthDb#db.update_seq of
- true ->
- {ok, _, _} = couch_db:enum_docs_since(
- AuthDb2,
- AuthDb#db.update_seq,
- fun(DocInfo, _, _) -> refresh_entry(AuthDb2, DocInfo) end,
- AuthDb#db.update_seq,
- []
- ),
- true = ets:insert(?STATE, {auth_db, AuthDb2});
- false ->
- ok
- end
- end.
-
-
-refresh_entry(Db, #doc_info{high_seq = DocSeq} = DocInfo) ->
- case is_user_doc(DocInfo) of
- {true, UserName} ->
- case ets:lookup(?BY_USER, UserName) of
- [] ->
- ok;
- [{UserName, {_OldCreds, ATime}}] ->
- {ok, Doc} = couch_db:open_doc(Db, DocInfo, [conflicts, deleted]),
- NewCreds = user_creds(Doc),
- true = ets:insert(?BY_USER, {UserName, {NewCreds, ATime}})
- end;
- false ->
- ok
- end,
- {ok, DocSeq}.
-
-
-user_creds(#doc{deleted = true}) ->
- nil;
-user_creds(#doc{} = Doc) ->
- {Creds} = couch_query_servers:json_doc(Doc),
- Creds.
-
-
-is_user_doc(#doc_info{id = <<"org.couchdb.user:", UserName/binary>>}) ->
- {true, UserName};
-is_user_doc(_) ->
- false.
-
-
-maybe_refresh_cache() ->
- case cache_needs_refresh() of
- true ->
- ok = gen_server:call(?MODULE, refresh, infinity);
- false ->
- ok
- end.
-
-
-cache_needs_refresh() ->
- exec_if_auth_db(
- fun(AuthDb) ->
- case reopen_auth_db(AuthDb) of
- nil ->
- false;
- AuthDb2 ->
- AuthDb2#db.update_seq > AuthDb#db.update_seq
- end
- end,
- false
- ).
-
-
-reopen_auth_db(AuthDb) ->
- case (catch couch_db:reopen(AuthDb)) of
- {ok, AuthDb2} ->
- AuthDb2;
- _ ->
- nil
- end.
-
-
-exec_if_auth_db(Fun) ->
- exec_if_auth_db(Fun, ok).
-
-exec_if_auth_db(Fun, DefRes) ->
- case ets:lookup(?STATE, auth_db) of
- [{auth_db, #db{} = AuthDb}] ->
- Fun(AuthDb);
- _ ->
- DefRes
- end.
-
-
-open_auth_db() ->
- [{auth_db_name, DbName}] = ets:lookup(?STATE, auth_db_name),
- {ok, AuthDb} = ensure_users_db_exists(DbName, [sys_db]),
- AuthDb.
-
-
-get_user_props_from_db(UserName) ->
- exec_if_auth_db(
- fun(AuthDb) ->
- Db = reopen_auth_db(AuthDb),
- DocId = <<"org.couchdb.user:", UserName/binary>>,
- try
- {ok, Doc} = couch_db:open_doc(Db, DocId, [conflicts]),
- {DocProps} = couch_query_servers:json_doc(Doc),
- DocProps
- catch
- _:_Error ->
- nil
- end
- end,
- nil
- ).
-
-ensure_users_db_exists(DbName, Options) ->
- Options1 = [{user_ctx, #user_ctx{roles=[<<"_admin">>]}} | Options],
- case couch_db:open(DbName, Options1) of
- {ok, Db} ->
- ensure_auth_ddoc_exists(Db, <<"_design/_auth">>),
- {ok, Db};
- _Error ->
- {ok, Db} = couch_db:create(DbName, Options1),
- ok = ensure_auth_ddoc_exists(Db, <<"_design/_auth">>),
- {ok, Db}
- end.
-
-ensure_auth_ddoc_exists(Db, DDocId) ->
- case couch_db:open_doc(Db, DDocId) of
- {not_found, _Reason} ->
- {ok, AuthDesign} = auth_design_doc(DDocId),
- {ok, _Rev} = couch_db:update_doc(Db, AuthDesign, []);
- _ ->
- ok
- end,
- ok.
-
-auth_design_doc(DocId) ->
- DocProps = [
- {<<"_id">>, DocId},
- {<<"language">>,<<"javascript">>},
- {<<"validate_doc_update">>, ?AUTH_DB_DOC_VALIDATE_FUNCTION}
- ],
- {ok, couch_doc:from_json_obj({DocProps})}.
diff --git a/1.1.x/src/couchdb/couch_btree.erl b/1.1.x/src/couchdb/couch_btree.erl
deleted file mode 100644
index f8c126f3..00000000
--- a/1.1.x/src/couchdb/couch_btree.erl
+++ /dev/null
@@ -1,679 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_btree).
-
--export([open/2, open/3, query_modify/4, add/2, add_remove/3]).
--export([fold/4, full_reduce/1, final_reduce/2, foldl/3, foldl/4]).
--export([fold_reduce/4, lookup/2, get_state/1, set_options/2]).
-
--define(CHUNK_THRESHOLD, 16#4ff).
-
--record(btree,
- {fd,
- root,
- extract_kv = fun({Key, Value}) -> {Key, Value} end,
- assemble_kv = fun(Key, Value) -> {Key, Value} end,
- less = fun(A, B) -> A < B end,
- reduce = nil
- }).
-
-extract(#btree{extract_kv=Extract}, Value) ->
- Extract(Value).
-
-assemble(#btree{assemble_kv=Assemble}, Key, Value) ->
- Assemble(Key, Value).
-
-less(#btree{less=Less}, A, B) ->
- Less(A, B).
-
-% pass in 'nil' for State if a new Btree.
-open(State, Fd) ->
- {ok, #btree{root=State, fd=Fd}}.
-
-set_options(Bt, []) ->
- Bt;
-set_options(Bt, [{split, Extract}|Rest]) ->
- set_options(Bt#btree{extract_kv=Extract}, Rest);
-set_options(Bt, [{join, Assemble}|Rest]) ->
- set_options(Bt#btree{assemble_kv=Assemble}, Rest);
-set_options(Bt, [{less, Less}|Rest]) ->
- set_options(Bt#btree{less=Less}, Rest);
-set_options(Bt, [{reduce, Reduce}|Rest]) ->
- set_options(Bt#btree{reduce=Reduce}, Rest).
-
-open(State, Fd, Options) ->
- {ok, set_options(#btree{root=State, fd=Fd}, Options)}.
-
-get_state(#btree{root=Root}) ->
- Root.
-
-final_reduce(#btree{reduce=Reduce}, Val) ->
- final_reduce(Reduce, Val);
-final_reduce(Reduce, {[], []}) ->
- Reduce(reduce, []);
-final_reduce(_Bt, {[], [Red]}) ->
- Red;
-final_reduce(Reduce, {[], Reductions}) ->
- Reduce(rereduce, Reductions);
-final_reduce(Reduce, {KVs, Reductions}) ->
- Red = Reduce(reduce, KVs),
- final_reduce(Reduce, {[], [Red | Reductions]}).
-
-fold_reduce(#btree{root=Root}=Bt, Fun, Acc, Options) ->
- Dir = couch_util:get_value(dir, Options, fwd),
- StartKey = couch_util:get_value(start_key, Options),
- EndKey = case couch_util:get_value(end_key_gt, Options) of
- undefined -> couch_util:get_value(end_key, Options);
- LastKey -> LastKey
- end,
- KeyGroupFun = couch_util:get_value(key_group_fun, Options, fun(_,_) -> true end),
- {StartKey2, EndKey2} =
- case Dir of
- rev -> {EndKey, StartKey};
- fwd -> {StartKey, EndKey}
- end,
- try
- {ok, Acc2, GroupedRedsAcc2, GroupedKVsAcc2, GroupedKey2} =
- reduce_stream_node(Bt, Dir, Root, StartKey2, EndKey2, undefined, [], [],
- KeyGroupFun, Fun, Acc),
- if GroupedKey2 == undefined ->
- {ok, Acc2};
- true ->
- case Fun(GroupedKey2, {GroupedKVsAcc2, GroupedRedsAcc2}, Acc2) of
- {ok, Acc3} -> {ok, Acc3};
- {stop, Acc3} -> {ok, Acc3}
- end
- end
- catch
- throw:{stop, AccDone} -> {ok, AccDone}
- end.
-
-full_reduce(#btree{root=nil,reduce=Reduce}) ->
- {ok, Reduce(reduce, [])};
-full_reduce(#btree{root={_P, Red}}) ->
- {ok, Red}.
-
-% wraps a 2 arity function with the proper 3 arity function
-convert_fun_arity(Fun) when is_function(Fun, 2) ->
- fun(KV, _Reds, AccIn) -> Fun(KV, AccIn) end;
-convert_fun_arity(Fun) when is_function(Fun, 3) ->
- Fun. % Already arity 3
-
-make_key_in_end_range_function(#btree{less=Less}, fwd, Options) ->
- case couch_util:get_value(end_key_gt, Options) of
- undefined ->
- case couch_util:get_value(end_key, Options) of
- undefined ->
- fun(_Key) -> true end;
- LastKey ->
- fun(Key) -> not Less(LastKey, Key) end
- end;
- EndKey ->
- fun(Key) -> Less(Key, EndKey) end
- end;
-make_key_in_end_range_function(#btree{less=Less}, rev, Options) ->
- case couch_util:get_value(end_key_gt, Options) of
- undefined ->
- case couch_util:get_value(end_key, Options) of
- undefined ->
- fun(_Key) -> true end;
- LastKey ->
- fun(Key) -> not Less(Key, LastKey) end
- end;
- EndKey ->
- fun(Key) -> Less(EndKey, Key) end
- end.
-
-
-foldl(Bt, Fun, Acc) ->
- fold(Bt, Fun, Acc, []).
-
-foldl(Bt, Fun, Acc, Options) ->
- fold(Bt, Fun, Acc, Options).
-
-
-fold(#btree{root=nil}, _Fun, Acc, _Options) ->
- {ok, {[], []}, Acc};
-fold(#btree{root=Root}=Bt, Fun, Acc, Options) ->
- Dir = couch_util:get_value(dir, Options, fwd),
- InRange = make_key_in_end_range_function(Bt, Dir, Options),
- Result =
- case couch_util:get_value(start_key, Options) of
- undefined ->
- stream_node(Bt, [], Bt#btree.root, InRange, Dir,
- convert_fun_arity(Fun), Acc);
- StartKey ->
- stream_node(Bt, [], Bt#btree.root, StartKey, InRange, Dir,
- convert_fun_arity(Fun), Acc)
- end,
- case Result of
- {ok, Acc2}->
- {_P, FullReduction} = Root,
- {ok, {[], [FullReduction]}, Acc2};
- {stop, LastReduction, Acc2} ->
- {ok, LastReduction, Acc2}
- end.
-
-add(Bt, InsertKeyValues) ->
- add_remove(Bt, InsertKeyValues, []).
-
-add_remove(Bt, InsertKeyValues, RemoveKeys) ->
- {ok, [], Bt2} = query_modify(Bt, [], InsertKeyValues, RemoveKeys),
- {ok, Bt2}.
-
-query_modify(Bt, LookupKeys, InsertValues, RemoveKeys) ->
- #btree{root=Root} = Bt,
- InsertActions = lists:map(
- fun(KeyValue) ->
- {Key, Value} = extract(Bt, KeyValue),
- {insert, Key, Value}
- end, InsertValues),
- RemoveActions = [{remove, Key, nil} || Key <- RemoveKeys],
- FetchActions = [{fetch, Key, nil} || Key <- LookupKeys],
- SortFun =
- fun({OpA, A, _}, {OpB, B, _}) ->
- case A == B of
- % A and B are equal, sort by op.
- true -> op_order(OpA) < op_order(OpB);
- false ->
- less(Bt, A, B)
- end
- end,
- Actions = lists:sort(SortFun, lists:append([InsertActions, RemoveActions, FetchActions])),
- {ok, KeyPointers, QueryResults, Bt2} = modify_node(Bt, Root, Actions, []),
- {ok, NewRoot, Bt3} = complete_root(Bt2, KeyPointers),
- {ok, QueryResults, Bt3#btree{root=NewRoot}}.
-
-% for ordering different operations with the same key.
-% fetch < remove < insert
-op_order(fetch) -> 1;
-op_order(remove) -> 2;
-op_order(insert) -> 3.
-
-lookup(#btree{root=Root, less=Less}=Bt, Keys) ->
- SortedKeys = lists:sort(Less, Keys),
- {ok, SortedResults} = lookup(Bt, Root, SortedKeys),
- % We want to return the results in the same order as the keys were input
- % but we may have changed the order when we sorted. So we need to put the
- % order back into the results.
- couch_util:reorder_results(Keys, SortedResults).
-
-lookup(_Bt, nil, Keys) ->
- {ok, [{Key, not_found} || Key <- Keys]};
-lookup(Bt, {Pointer, _Reds}, Keys) ->
- {NodeType, NodeList} = get_node(Bt, Pointer),
- case NodeType of
- kp_node ->
- lookup_kpnode(Bt, list_to_tuple(NodeList), 1, Keys, []);
- kv_node ->
- lookup_kvnode(Bt, list_to_tuple(NodeList), 1, Keys, [])
- end.
-
-lookup_kpnode(_Bt, _NodeTuple, _LowerBound, [], Output) ->
- {ok, lists:reverse(Output)};
-lookup_kpnode(_Bt, NodeTuple, LowerBound, Keys, Output) when tuple_size(NodeTuple) < LowerBound ->
- {ok, lists:reverse(Output, [{Key, not_found} || Key <- Keys])};
-lookup_kpnode(Bt, NodeTuple, LowerBound, [FirstLookupKey | _] = LookupKeys, Output) ->
- N = find_first_gteq(Bt, NodeTuple, LowerBound, tuple_size(NodeTuple), FirstLookupKey),
- {Key, PointerInfo} = element(N, NodeTuple),
- SplitFun = fun(LookupKey) -> not less(Bt, Key, LookupKey) end,
- case lists:splitwith(SplitFun, LookupKeys) of
- {[], GreaterQueries} ->
- lookup_kpnode(Bt, NodeTuple, N + 1, GreaterQueries, Output);
- {LessEqQueries, GreaterQueries} ->
- {ok, Results} = lookup(Bt, PointerInfo, LessEqQueries),
- lookup_kpnode(Bt, NodeTuple, N + 1, GreaterQueries, lists:reverse(Results, Output))
- end.
-
-
-lookup_kvnode(_Bt, _NodeTuple, _LowerBound, [], Output) ->
- {ok, lists:reverse(Output)};
-lookup_kvnode(_Bt, NodeTuple, LowerBound, Keys, Output) when tuple_size(NodeTuple) < LowerBound ->
- % keys not found
- {ok, lists:reverse(Output, [{Key, not_found} || Key <- Keys])};
-lookup_kvnode(Bt, NodeTuple, LowerBound, [LookupKey | RestLookupKeys], Output) ->
- N = find_first_gteq(Bt, NodeTuple, LowerBound, tuple_size(NodeTuple), LookupKey),
- {Key, Value} = element(N, NodeTuple),
- case less(Bt, LookupKey, Key) of
- true ->
- % LookupKey is less than Key
- lookup_kvnode(Bt, NodeTuple, N, RestLookupKeys, [{LookupKey, not_found} | Output]);
- false ->
- case less(Bt, Key, LookupKey) of
- true ->
- % LookupKey is greater than Key
- lookup_kvnode(Bt, NodeTuple, N+1, RestLookupKeys, [{LookupKey, not_found} | Output]);
- false ->
- % LookupKey is equal to Key
- lookup_kvnode(Bt, NodeTuple, N, RestLookupKeys, [{LookupKey, {ok, assemble(Bt, LookupKey, Value)}} | Output])
- end
- end.
-
-
-complete_root(Bt, []) ->
- {ok, nil, Bt};
-complete_root(Bt, [{_Key, PointerInfo}])->
- {ok, PointerInfo, Bt};
-complete_root(Bt, KPs) ->
- {ok, ResultKeyPointers, Bt2} = write_node(Bt, kp_node, KPs),
- complete_root(Bt2, ResultKeyPointers).
-
-%%%%%%%%%%%%% The chunkify function sucks! %%%%%%%%%%%%%
-% It is inaccurate as it does not account for compression when blocks are
-% written. Plus with the "case byte_size(term_to_binary(InList)) of" code
-% it's probably really inefficient.
-
-chunkify(InList) ->
- case byte_size(term_to_binary(InList)) of
- Size when Size > ?CHUNK_THRESHOLD ->
- NumberOfChunksLikely = ((Size div ?CHUNK_THRESHOLD) + 1),
- ChunkThreshold = Size div NumberOfChunksLikely,
- chunkify(InList, ChunkThreshold, [], 0, []);
- _Else ->
- [InList]
- end.
-
-chunkify([], _ChunkThreshold, [], 0, OutputChunks) ->
- lists:reverse(OutputChunks);
-chunkify([], _ChunkThreshold, OutList, _OutListSize, OutputChunks) ->
- lists:reverse([lists:reverse(OutList) | OutputChunks]);
-chunkify([InElement | RestInList], ChunkThreshold, OutList, OutListSize, OutputChunks) ->
- case byte_size(term_to_binary(InElement)) of
- Size when (Size + OutListSize) > ChunkThreshold andalso OutList /= [] ->
- chunkify(RestInList, ChunkThreshold, [], 0, [lists:reverse([InElement | OutList]) | OutputChunks]);
- Size ->
- chunkify(RestInList, ChunkThreshold, [InElement | OutList], OutListSize + Size, OutputChunks)
- end.
-
-modify_node(Bt, RootPointerInfo, Actions, QueryOutput) ->
- case RootPointerInfo of
- nil ->
- NodeType = kv_node,
- NodeList = [];
- {Pointer, _Reds} ->
- {NodeType, NodeList} = get_node(Bt, Pointer)
- end,
- NodeTuple = list_to_tuple(NodeList),
-
- {ok, NewNodeList, QueryOutput2, Bt2} =
- case NodeType of
- kp_node -> modify_kpnode(Bt, NodeTuple, 1, Actions, [], QueryOutput);
- kv_node -> modify_kvnode(Bt, NodeTuple, 1, Actions, [], QueryOutput)
- end,
- case NewNodeList of
- [] -> % no nodes remain
- {ok, [], QueryOutput2, Bt2};
- NodeList -> % nothing changed
- {LastKey, _LastValue} = element(tuple_size(NodeTuple), NodeTuple),
- {ok, [{LastKey, RootPointerInfo}], QueryOutput2, Bt2};
- _Else2 ->
- {ok, ResultList, Bt3} = write_node(Bt2, NodeType, NewNodeList),
- {ok, ResultList, QueryOutput2, Bt3}
- end.
-
-reduce_node(#btree{reduce=nil}, _NodeType, _NodeList) ->
- [];
-reduce_node(#btree{reduce=R}, kp_node, NodeList) ->
- R(rereduce, [Red || {_K, {_P, Red}} <- NodeList]);
-reduce_node(#btree{reduce=R}=Bt, kv_node, NodeList) ->
- R(reduce, [assemble(Bt, K, V) || {K, V} <- NodeList]).
-
-
-get_node(#btree{fd = Fd}, NodePos) ->
- {ok, {NodeType, NodeList}} = couch_file:pread_term(Fd, NodePos),
- {NodeType, NodeList}.
-
-write_node(Bt, NodeType, NodeList) ->
- % split up nodes into smaller sizes
- NodeListList = chunkify(NodeList),
- % now write out each chunk and return the KeyPointer pairs for those nodes
- ResultList = [
- begin
- {ok, Pointer} = couch_file:append_term(Bt#btree.fd, {NodeType, ANodeList}),
- {LastKey, _} = lists:last(ANodeList),
- {LastKey, {Pointer, reduce_node(Bt, NodeType, ANodeList)}}
- end
- ||
- ANodeList <- NodeListList
- ],
- {ok, ResultList, Bt}.
-
-modify_kpnode(Bt, {}, _LowerBound, Actions, [], QueryOutput) ->
- modify_node(Bt, nil, Actions, QueryOutput);
-modify_kpnode(Bt, NodeTuple, LowerBound, [], ResultNode, QueryOutput) ->
- {ok, lists:reverse(ResultNode, bounded_tuple_to_list(NodeTuple, LowerBound,
- tuple_size(NodeTuple), [])), QueryOutput, Bt};
-modify_kpnode(Bt, NodeTuple, LowerBound,
- [{_, FirstActionKey, _}|_]=Actions, ResultNode, QueryOutput) ->
- Sz = tuple_size(NodeTuple),
- N = find_first_gteq(Bt, NodeTuple, LowerBound, Sz, FirstActionKey),
- case N =:= Sz of
- true ->
- % perform remaining actions on last node
- {_, PointerInfo} = element(Sz, NodeTuple),
- {ok, ChildKPs, QueryOutput2, Bt2} =
- modify_node(Bt, PointerInfo, Actions, QueryOutput),
- NodeList = lists:reverse(ResultNode, bounded_tuple_to_list(NodeTuple, LowerBound,
- Sz - 1, ChildKPs)),
- {ok, NodeList, QueryOutput2, Bt2};
- false ->
- {NodeKey, PointerInfo} = element(N, NodeTuple),
- SplitFun = fun({_ActionType, ActionKey, _ActionValue}) ->
- not less(Bt, NodeKey, ActionKey)
- end,
- {LessEqQueries, GreaterQueries} = lists:splitwith(SplitFun, Actions),
- {ok, ChildKPs, QueryOutput2, Bt2} =
- modify_node(Bt, PointerInfo, LessEqQueries, QueryOutput),
- ResultNode2 = lists:reverse(ChildKPs, bounded_tuple_to_revlist(NodeTuple,
- LowerBound, N - 1, ResultNode)),
- modify_kpnode(Bt2, NodeTuple, N+1, GreaterQueries, ResultNode2, QueryOutput2)
- end.
-
-bounded_tuple_to_revlist(_Tuple, Start, End, Tail) when Start > End ->
- Tail;
-bounded_tuple_to_revlist(Tuple, Start, End, Tail) ->
- bounded_tuple_to_revlist(Tuple, Start+1, End, [element(Start, Tuple)|Tail]).
-
-bounded_tuple_to_list(Tuple, Start, End, Tail) ->
- bounded_tuple_to_list2(Tuple, Start, End, [], Tail).
-
-bounded_tuple_to_list2(_Tuple, Start, End, Acc, Tail) when Start > End ->
- lists:reverse(Acc, Tail);
-bounded_tuple_to_list2(Tuple, Start, End, Acc, Tail) ->
- bounded_tuple_to_list2(Tuple, Start + 1, End, [element(Start, Tuple) | Acc], Tail).
-
-find_first_gteq(_Bt, _Tuple, Start, End, _Key) when Start == End ->
- End;
-find_first_gteq(Bt, Tuple, Start, End, Key) ->
- Mid = Start + ((End - Start) div 2),
- {TupleKey, _} = element(Mid, Tuple),
- case less(Bt, TupleKey, Key) of
- true ->
- find_first_gteq(Bt, Tuple, Mid+1, End, Key);
- false ->
- find_first_gteq(Bt, Tuple, Start, Mid, Key)
- end.
-
-modify_kvnode(Bt, NodeTuple, LowerBound, [], ResultNode, QueryOutput) ->
- {ok, lists:reverse(ResultNode, bounded_tuple_to_list(NodeTuple, LowerBound, tuple_size(NodeTuple), [])), QueryOutput, Bt};
-modify_kvnode(Bt, NodeTuple, LowerBound, [{ActionType, ActionKey, ActionValue} | RestActions], ResultNode, QueryOutput) when LowerBound > tuple_size(NodeTuple) ->
- case ActionType of
- insert ->
- modify_kvnode(Bt, NodeTuple, LowerBound, RestActions, [{ActionKey, ActionValue} | ResultNode], QueryOutput);
- remove ->
- % just drop the action
- modify_kvnode(Bt, NodeTuple, LowerBound, RestActions, ResultNode, QueryOutput);
- fetch ->
- % the key/value must not exist in the tree
- modify_kvnode(Bt, NodeTuple, LowerBound, RestActions, ResultNode, [{not_found, {ActionKey, nil}} | QueryOutput])
- end;
-modify_kvnode(Bt, NodeTuple, LowerBound, [{ActionType, ActionKey, ActionValue} | RestActions], AccNode, QueryOutput) ->
- N = find_first_gteq(Bt, NodeTuple, LowerBound, tuple_size(NodeTuple), ActionKey),
- {Key, Value} = element(N, NodeTuple),
- ResultNode = bounded_tuple_to_revlist(NodeTuple, LowerBound, N - 1, AccNode),
- case less(Bt, ActionKey, Key) of
- true ->
- case ActionType of
- insert ->
- % ActionKey is less than the Key, so insert
- modify_kvnode(Bt, NodeTuple, N, RestActions, [{ActionKey, ActionValue} | ResultNode], QueryOutput);
- remove ->
- % ActionKey is less than the Key, just drop the action
- modify_kvnode(Bt, NodeTuple, N, RestActions, ResultNode, QueryOutput);
- fetch ->
- % ActionKey is less than the Key, the key/value must not exist in the tree
- modify_kvnode(Bt, NodeTuple, N, RestActions, ResultNode, [{not_found, {ActionKey, nil}} | QueryOutput])
- end;
- false ->
- % ActionKey and Key are maybe equal.
- case less(Bt, Key, ActionKey) of
- false ->
- case ActionType of
- insert ->
- modify_kvnode(Bt, NodeTuple, N+1, RestActions, [{ActionKey, ActionValue} | ResultNode], QueryOutput);
- remove ->
- modify_kvnode(Bt, NodeTuple, N+1, RestActions, ResultNode, QueryOutput);
- fetch ->
- % ActionKey is equal to the Key, insert into the QueryOuput, but re-process the node
- % since an identical action key can follow it.
- modify_kvnode(Bt, NodeTuple, N, RestActions, ResultNode, [{ok, assemble(Bt, Key, Value)} | QueryOutput])
- end;
- true ->
- modify_kvnode(Bt, NodeTuple, N + 1, [{ActionType, ActionKey, ActionValue} | RestActions], [{Key, Value} | ResultNode], QueryOutput)
- end
- end.
-
-
-reduce_stream_node(_Bt, _Dir, nil, _KeyStart, _KeyEnd, GroupedKey, GroupedKVsAcc,
- GroupedRedsAcc, _KeyGroupFun, _Fun, Acc) ->
- {ok, Acc, GroupedRedsAcc, GroupedKVsAcc, GroupedKey};
-reduce_stream_node(Bt, Dir, {P, _R}, KeyStart, KeyEnd, GroupedKey, GroupedKVsAcc,
- GroupedRedsAcc, KeyGroupFun, Fun, Acc) ->
- case get_node(Bt, P) of
- {kp_node, NodeList} ->
- reduce_stream_kp_node(Bt, Dir, NodeList, KeyStart, KeyEnd, GroupedKey,
- GroupedKVsAcc, GroupedRedsAcc, KeyGroupFun, Fun, Acc);
- {kv_node, KVs} ->
- reduce_stream_kv_node(Bt, Dir, KVs, KeyStart, KeyEnd, GroupedKey,
- GroupedKVsAcc, GroupedRedsAcc, KeyGroupFun, Fun, Acc)
- end.
-
-reduce_stream_kv_node(Bt, Dir, KVs, KeyStart, KeyEnd,
- GroupedKey, GroupedKVsAcc, GroupedRedsAcc,
- KeyGroupFun, Fun, Acc) ->
-
- GTEKeyStartKVs =
- case KeyStart of
- undefined ->
- KVs;
- _ ->
- lists:dropwhile(fun({Key,_}) -> less(Bt, Key, KeyStart) end, KVs)
- end,
- KVs2 =
- case KeyEnd of
- undefined ->
- GTEKeyStartKVs;
- _ ->
- lists:takewhile(
- fun({Key,_}) ->
- not less(Bt, KeyEnd, Key)
- end, GTEKeyStartKVs)
- end,
- reduce_stream_kv_node2(Bt, adjust_dir(Dir, KVs2), GroupedKey, GroupedKVsAcc, GroupedRedsAcc,
- KeyGroupFun, Fun, Acc).
-
-
-reduce_stream_kv_node2(_Bt, [], GroupedKey, GroupedKVsAcc, GroupedRedsAcc,
- _KeyGroupFun, _Fun, Acc) ->
- {ok, Acc, GroupedRedsAcc, GroupedKVsAcc, GroupedKey};
-reduce_stream_kv_node2(Bt, [{Key, Value}| RestKVs], GroupedKey, GroupedKVsAcc,
- GroupedRedsAcc, KeyGroupFun, Fun, Acc) ->
- case GroupedKey of
- undefined ->
- reduce_stream_kv_node2(Bt, RestKVs, Key,
- [assemble(Bt,Key,Value)], [], KeyGroupFun, Fun, Acc);
- _ ->
-
- case KeyGroupFun(GroupedKey, Key) of
- true ->
- reduce_stream_kv_node2(Bt, RestKVs, GroupedKey,
- [assemble(Bt,Key,Value)|GroupedKVsAcc], GroupedRedsAcc, KeyGroupFun,
- Fun, Acc);
- false ->
- case Fun(GroupedKey, {GroupedKVsAcc, GroupedRedsAcc}, Acc) of
- {ok, Acc2} ->
- reduce_stream_kv_node2(Bt, RestKVs, Key, [assemble(Bt,Key,Value)],
- [], KeyGroupFun, Fun, Acc2);
- {stop, Acc2} ->
- throw({stop, Acc2})
- end
- end
- end.
-
-reduce_stream_kp_node(Bt, Dir, NodeList, KeyStart, KeyEnd,
- GroupedKey, GroupedKVsAcc, GroupedRedsAcc,
- KeyGroupFun, Fun, Acc) ->
- Nodes =
- case KeyStart of
- undefined ->
- NodeList;
- _ ->
- lists:dropwhile(
- fun({Key,_}) ->
- less(Bt, Key, KeyStart)
- end, NodeList)
- end,
- NodesInRange =
- case KeyEnd of
- undefined ->
- Nodes;
- _ ->
- {InRange, MaybeInRange} = lists:splitwith(
- fun({Key,_}) ->
- less(Bt, Key, KeyEnd)
- end, Nodes),
- InRange ++ case MaybeInRange of [] -> []; [FirstMaybe|_] -> [FirstMaybe] end
- end,
- reduce_stream_kp_node2(Bt, Dir, adjust_dir(Dir, NodesInRange), KeyStart, KeyEnd,
- GroupedKey, GroupedKVsAcc, GroupedRedsAcc, KeyGroupFun, Fun, Acc).
-
-
-reduce_stream_kp_node2(Bt, Dir, [{_Key, NodeInfo} | RestNodeList], KeyStart, KeyEnd,
- undefined, [], [], KeyGroupFun, Fun, Acc) ->
- {ok, Acc2, GroupedRedsAcc2, GroupedKVsAcc2, GroupedKey2} =
- reduce_stream_node(Bt, Dir, NodeInfo, KeyStart, KeyEnd, undefined,
- [], [], KeyGroupFun, Fun, Acc),
- reduce_stream_kp_node2(Bt, Dir, RestNodeList, KeyStart, KeyEnd, GroupedKey2,
- GroupedKVsAcc2, GroupedRedsAcc2, KeyGroupFun, Fun, Acc2);
-reduce_stream_kp_node2(Bt, Dir, NodeList, KeyStart, KeyEnd,
- GroupedKey, GroupedKVsAcc, GroupedRedsAcc, KeyGroupFun, Fun, Acc) ->
- {Grouped0, Ungrouped0} = lists:splitwith(fun({Key,_}) ->
- KeyGroupFun(GroupedKey, Key) end, NodeList),
- {GroupedNodes, UngroupedNodes} =
- case Grouped0 of
- [] ->
- {Grouped0, Ungrouped0};
- _ ->
- [FirstGrouped | RestGrouped] = lists:reverse(Grouped0),
- {RestGrouped, [FirstGrouped | Ungrouped0]}
- end,
- GroupedReds = [R || {_, {_,R}} <- GroupedNodes],
- case UngroupedNodes of
- [{_Key, NodeInfo}|RestNodes] ->
- {ok, Acc2, GroupedRedsAcc2, GroupedKVsAcc2, GroupedKey2} =
- reduce_stream_node(Bt, Dir, NodeInfo, KeyStart, KeyEnd, GroupedKey,
- GroupedKVsAcc, GroupedReds ++ GroupedRedsAcc, KeyGroupFun, Fun, Acc),
- reduce_stream_kp_node2(Bt, Dir, RestNodes, KeyStart, KeyEnd, GroupedKey2,
- GroupedKVsAcc2, GroupedRedsAcc2, KeyGroupFun, Fun, Acc2);
- [] ->
- {ok, Acc, GroupedReds ++ GroupedRedsAcc, GroupedKVsAcc, GroupedKey}
- end.
-
-adjust_dir(fwd, List) ->
- List;
-adjust_dir(rev, List) ->
- lists:reverse(List).
-
-stream_node(Bt, Reds, {Pointer, _Reds}, StartKey, InRange, Dir, Fun, Acc) ->
- {NodeType, NodeList} = get_node(Bt, Pointer),
- case NodeType of
- kp_node ->
- stream_kp_node(Bt, Reds, adjust_dir(Dir, NodeList), StartKey, InRange, Dir, Fun, Acc);
- kv_node ->
- stream_kv_node(Bt, Reds, adjust_dir(Dir, NodeList), StartKey, InRange, Dir, Fun, Acc)
- end.
-
-stream_node(Bt, Reds, {Pointer, _Reds}, InRange, Dir, Fun, Acc) ->
- {NodeType, NodeList} = get_node(Bt, Pointer),
- case NodeType of
- kp_node ->
- stream_kp_node(Bt, Reds, adjust_dir(Dir, NodeList), InRange, Dir, Fun, Acc);
- kv_node ->
- stream_kv_node2(Bt, Reds, [], adjust_dir(Dir, NodeList), InRange, Dir, Fun, Acc)
- end.
-
-stream_kp_node(_Bt, _Reds, [], _InRange, _Dir, _Fun, Acc) ->
- {ok, Acc};
-stream_kp_node(Bt, Reds, [{_Key, {Pointer, Red}} | Rest], InRange, Dir, Fun, Acc) ->
- case stream_node(Bt, Reds, {Pointer, Red}, InRange, Dir, Fun, Acc) of
- {ok, Acc2} ->
- stream_kp_node(Bt, [Red | Reds], Rest, InRange, Dir, Fun, Acc2);
- {stop, LastReds, Acc2} ->
- {stop, LastReds, Acc2}
- end.
-
-drop_nodes(_Bt, Reds, _StartKey, []) ->
- {Reds, []};
-drop_nodes(Bt, Reds, StartKey, [{NodeKey, {Pointer, Red}} | RestKPs]) ->
- case less(Bt, NodeKey, StartKey) of
- true -> drop_nodes(Bt, [Red | Reds], StartKey, RestKPs);
- false -> {Reds, [{NodeKey, {Pointer, Red}} | RestKPs]}
- end.
-
-stream_kp_node(Bt, Reds, KPs, StartKey, InRange, Dir, Fun, Acc) ->
- {NewReds, NodesToStream} =
- case Dir of
- fwd ->
- % drop all nodes sorting before the key
- drop_nodes(Bt, Reds, StartKey, KPs);
- rev ->
- % keep all nodes sorting before the key, AND the first node to sort after
- RevKPs = lists:reverse(KPs),
- case lists:splitwith(fun({Key, _Pointer}) -> less(Bt, Key, StartKey) end, RevKPs) of
- {_RevsBefore, []} ->
- % everything sorts before it
- {Reds, KPs};
- {RevBefore, [FirstAfter | Drop]} ->
- {[Red || {_K,{_P,Red}} <- Drop] ++ Reds,
- [FirstAfter | lists:reverse(RevBefore)]}
- end
- end,
- case NodesToStream of
- [] ->
- {ok, Acc};
- [{_Key, {Pointer, Red}} | Rest] ->
- case stream_node(Bt, NewReds, {Pointer, Red}, StartKey, InRange, Dir, Fun, Acc) of
- {ok, Acc2} ->
- stream_kp_node(Bt, [Red | NewReds], Rest, InRange, Dir, Fun, Acc2);
- {stop, LastReds, Acc2} ->
- {stop, LastReds, Acc2}
- end
- end.
-
-stream_kv_node(Bt, Reds, KVs, StartKey, InRange, Dir, Fun, Acc) ->
- DropFun =
- case Dir of
- fwd ->
- fun({Key, _}) -> less(Bt, Key, StartKey) end;
- rev ->
- fun({Key, _}) -> less(Bt, StartKey, Key) end
- end,
- {LTKVs, GTEKVs} = lists:splitwith(DropFun, KVs),
- AssembleLTKVs = [assemble(Bt,K,V) || {K,V} <- LTKVs],
- stream_kv_node2(Bt, Reds, AssembleLTKVs, GTEKVs, InRange, Dir, Fun, Acc).
-
-stream_kv_node2(_Bt, _Reds, _PrevKVs, [], _InRange, _Dir, _Fun, Acc) ->
- {ok, Acc};
-stream_kv_node2(Bt, Reds, PrevKVs, [{K,V} | RestKVs], InRange, Dir, Fun, Acc) ->
- case InRange(K) of
- false ->
- {stop, {PrevKVs, Reds}, Acc};
- true ->
- AssembledKV = assemble(Bt, K, V),
- case Fun(AssembledKV, {PrevKVs, Reds}, Acc) of
- {ok, Acc2} ->
- stream_kv_node2(Bt, Reds, [AssembledKV | PrevKVs], RestKVs, InRange, Dir, Fun, Acc2);
- {stop, Acc2} ->
- {stop, {PrevKVs, Reds}, Acc2}
- end
- end.
diff --git a/1.1.x/src/couchdb/couch_changes.erl b/1.1.x/src/couchdb/couch_changes.erl
deleted file mode 100644
index 6eb6f7e1..00000000
--- a/1.1.x/src/couchdb/couch_changes.erl
+++ /dev/null
@@ -1,339 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_changes).
--include("couch_db.hrl").
-
--export([handle_changes/3]).
-
-%% @type Req -> #httpd{} | {json_req, JsonObj()}
-handle_changes(#changes_args{style=Style}=Args1, Req, Db) ->
- #changes_args{feed = Feed} = Args = Args1#changes_args{
- filter = make_filter_fun(Args1#changes_args.filter, Style, Req, Db)
- },
- StartSeq = case Args#changes_args.dir of
- rev ->
- couch_db:get_update_seq(Db);
- fwd ->
- Args#changes_args.since
- end,
- if Feed == "continuous" orelse Feed == "longpoll" ->
- fun(CallbackAcc) ->
- {Callback, UserAcc} = get_callback_acc(CallbackAcc),
- Self = self(),
- {ok, Notify} = couch_db_update_notifier:start_link(
- fun({_, DbName}) when DbName == Db#db.name ->
- Self ! db_updated;
- (_) ->
- ok
- end
- ),
- UserAcc2 = start_sending_changes(Callback, UserAcc, Feed),
- {Timeout, TimeoutFun} = get_changes_timeout(Args, Callback),
- try
- keep_sending_changes(
- Args,
- Callback,
- UserAcc2,
- Db,
- StartSeq,
- <<"">>,
- Timeout,
- TimeoutFun
- )
- after
- couch_db_update_notifier:stop(Notify),
- get_rest_db_updated(ok) % clean out any remaining update messages
- end
- end;
- true ->
- fun(CallbackAcc) ->
- {Callback, UserAcc} = get_callback_acc(CallbackAcc),
- UserAcc2 = start_sending_changes(Callback, UserAcc, Feed),
- {ok, {_, LastSeq, _Prepend, _, _, UserAcc3, _, _, _, _}} =
- send_changes(
- Args#changes_args{feed="normal"},
- Callback,
- UserAcc2,
- Db,
- StartSeq,
- <<>>
- ),
- end_sending_changes(Callback, UserAcc3, LastSeq, Feed)
- end
- end.
-
-get_callback_acc({Callback, _UserAcc} = Pair) when is_function(Callback, 3) ->
- Pair;
-get_callback_acc(Callback) when is_function(Callback, 2) ->
- {fun(Ev, Data, _) -> Callback(Ev, Data) end, ok}.
-
-%% @type Req -> #httpd{} | {json_req, JsonObj()}
-make_filter_fun([$_ | _] = FilterName, Style, Req, Db) ->
- builtin_filter_fun(FilterName, Style, Req, Db);
-make_filter_fun(FilterName, Style, Req, Db) ->
- os_filter_fun(FilterName, Style, Req, Db).
-
-os_filter_fun(FilterName, Style, Req, Db) ->
- case [list_to_binary(couch_httpd:unquote(Part))
- || Part <- string:tokens(FilterName, "/")] of
- [] ->
- fun(_Db2, #doc_info{revs=Revs}) ->
- builtin_results(Style, Revs)
- end;
- [DName, FName] ->
- DesignId = <<"_design/", DName/binary>>,
- DDoc = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
- % validate that the ddoc has the filter fun
- #doc{body={Props}} = DDoc,
- couch_util:get_nested_json_value({Props}, [<<"filters">>, FName]),
- fun(Db2, DocInfo) ->
- DocInfos =
- case Style of
- main_only ->
- [DocInfo];
- all_docs ->
- [DocInfo#doc_info{revs=[Rev]}|| Rev <- DocInfo#doc_info.revs]
- end,
- Docs = [Doc || {ok, Doc} <- [
- couch_db:open_doc(Db2, DocInfo2, [deleted, conflicts])
- || DocInfo2 <- DocInfos]],
- {ok, Passes} = couch_query_servers:filter_docs(
- Req, Db2, DDoc, FName, Docs
- ),
- [{[{<<"rev">>, couch_doc:rev_to_str({RevPos,RevId})}]}
- || {Pass, #doc{revs={RevPos,[RevId|_]}}}
- <- lists:zip(Passes, Docs), Pass == true]
- end;
- _Else ->
- throw({bad_request,
- "filter parameter must be of the form `designname/filtername`"})
- end.
-
-builtin_filter_fun("_doc_ids", Style, {json_req, {Props}}, _Db) ->
- filter_docids(couch_util:get_value(<<"doc_ids">>, Props), Style);
-builtin_filter_fun("_doc_ids", Style, #httpd{method='POST'}=Req, _Db) ->
- {Props} = couch_httpd:json_body_obj(Req),
- DocIds = couch_util:get_value(<<"doc_ids">>, Props, nil),
- filter_docids(DocIds, Style);
-builtin_filter_fun("_doc_ids", Style, #httpd{method='GET'}=Req, _Db) ->
- DocIds = ?JSON_DECODE(couch_httpd:qs_value(Req, "doc_ids", "null")),
- filter_docids(DocIds, Style);
-builtin_filter_fun("_design", Style, _Req, _Db) ->
- filter_designdoc(Style);
-builtin_filter_fun(_FilterName, _Style, _Req, _Db) ->
- throw({bad_request, "unknown builtin filter name"}).
-
-filter_docids(DocIds, Style) when is_list(DocIds)->
- fun(_Db, #doc_info{id=DocId, revs=Revs}) ->
- case lists:member(DocId, DocIds) of
- true ->
- builtin_results(Style, Revs);
- _ -> []
- end
- end;
-filter_docids(_, _) ->
- throw({bad_request, "`doc_ids` filter parameter is not a list."}).
-
-filter_designdoc(Style) ->
- fun(_Db, #doc_info{id=DocId, revs=Revs}) ->
- case DocId of
- <<"_design", _/binary>> ->
- builtin_results(Style, Revs);
- _ -> []
- end
- end.
-
-builtin_results(Style, [#rev_info{rev=Rev}|_]=Revs) ->
- case Style of
- main_only ->
- [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}];
- all_docs ->
- [{[{<<"rev">>, couch_doc:rev_to_str(R)}]}
- || #rev_info{rev=R} <- Revs]
- end.
-
-get_changes_timeout(Args, Callback) ->
- #changes_args{
- heartbeat = Heartbeat,
- timeout = Timeout,
- feed = ResponseType
- } = Args,
- DefaultTimeout = list_to_integer(
- couch_config:get("httpd", "changes_timeout", "60000")
- ),
- case Heartbeat of
- undefined ->
- case Timeout of
- undefined ->
- {DefaultTimeout, fun(UserAcc) -> {stop, UserAcc} end};
- infinity ->
- {infinity, fun(UserAcc) -> {stop, UserAcc} end};
- _ ->
- {lists:min([DefaultTimeout, Timeout]),
- fun(UserAcc) -> {stop, UserAcc} end}
- end;
- true ->
- {DefaultTimeout,
- fun(UserAcc) -> {ok, Callback(timeout, ResponseType, UserAcc)} end};
- _ ->
- {lists:min([DefaultTimeout, Heartbeat]),
- fun(UserAcc) -> {ok, Callback(timeout, ResponseType, UserAcc)} end}
- end.
-
-start_sending_changes(_Callback, UserAcc, "continuous") ->
- UserAcc;
-start_sending_changes(Callback, UserAcc, ResponseType) ->
- Callback(start, ResponseType, UserAcc).
-
-send_changes(Args, Callback, UserAcc, Db, StartSeq, Prepend) ->
- #changes_args{
- style = Style,
- include_docs = IncludeDocs,
- conflicts = Conflicts,
- limit = Limit,
- feed = ResponseType,
- dir = Dir,
- filter = FilterFun
- } = Args,
- couch_db:changes_since(
- Db,
- Style,
- StartSeq,
- fun changes_enumerator/2,
- [{dir, Dir}],
- {Db, StartSeq, Prepend, FilterFun, Callback, UserAcc, ResponseType,
- Limit, IncludeDocs, Conflicts}
- ).
-
-keep_sending_changes(Args, Callback, UserAcc, Db, StartSeq, Prepend, Timeout,
- TimeoutFun) ->
- #changes_args{
- feed = ResponseType,
- limit = Limit,
- db_open_options = DbOptions
- } = Args,
- % ?LOG_INFO("send_changes start ~p",[StartSeq]),
- {ok, {_, EndSeq, Prepend2, _, _, UserAcc2, _, NewLimit, _, _}} = send_changes(
- Args#changes_args{dir=fwd}, Callback, UserAcc, Db, StartSeq, Prepend
- ),
- % ?LOG_INFO("send_changes last ~p",[EndSeq]),
- couch_db:close(Db),
- if Limit > NewLimit, ResponseType == "longpoll" ->
- end_sending_changes(Callback, UserAcc2, EndSeq, ResponseType);
- true ->
- case wait_db_updated(Timeout, TimeoutFun, UserAcc2) of
- {updated, UserAcc3} ->
- % ?LOG_INFO("wait_db_updated updated ~p",[{Db#db.name, EndSeq}]),
- DbOptions1 = [{user_ctx, Db#db.user_ctx} | DbOptions],
- case couch_db:open(Db#db.name, DbOptions1) of
- {ok, Db2} ->
- keep_sending_changes(
- Args#changes_args{limit=NewLimit},
- Callback,
- UserAcc3,
- Db2,
- EndSeq,
- Prepend2,
- Timeout,
- TimeoutFun
- );
- _Else ->
- end_sending_changes(Callback, UserAcc2, EndSeq, ResponseType)
- end;
- {stop, UserAcc3} ->
- % ?LOG_INFO("wait_db_updated stop ~p",[{Db#db.name, EndSeq}]),
- end_sending_changes(Callback, UserAcc3, EndSeq, ResponseType)
- end
- end.
-
-end_sending_changes(Callback, UserAcc, EndSeq, ResponseType) ->
- Callback({stop, EndSeq}, ResponseType, UserAcc).
-
-changes_enumerator(DocInfo, {Db, _, _, FilterFun, Callback, UserAcc,
- "continuous", Limit, IncludeDocs, Conflicts}) ->
-
- #doc_info{high_seq = Seq} = DocInfo,
- Results0 = FilterFun(Db, DocInfo),
- Results = [Result || Result <- Results0, Result /= null],
- Go = if Limit =< 1 -> stop; true -> ok end,
- case Results of
- [] ->
- {Go, {Db, Seq, nil, FilterFun, Callback, UserAcc, "continuous", Limit,
- IncludeDocs, Conflicts}
- };
- _ ->
- ChangesRow = changes_row(Db, Results, DocInfo, IncludeDocs, Conflicts),
- UserAcc2 = Callback({change, ChangesRow, <<>>}, "continuous", UserAcc),
- {Go, {Db, Seq, nil, FilterFun, Callback, UserAcc2, "continuous",
- Limit - 1, IncludeDocs, Conflicts}
- }
- end;
-changes_enumerator(DocInfo, {Db, _, Prepend, FilterFun, Callback, UserAcc,
- ResponseType, Limit, IncludeDocs, Conflicts}) ->
-
- #doc_info{high_seq = Seq} = DocInfo,
- Results0 = FilterFun(Db, DocInfo),
- Results = [Result || Result <- Results0, Result /= null],
- Go = if (Limit =< 1) andalso Results =/= [] -> stop; true -> ok end,
- case Results of
- [] ->
- {Go, {Db, Seq, Prepend, FilterFun, Callback, UserAcc, ResponseType,
- Limit, IncludeDocs, Conflicts}
- };
- _ ->
- ChangesRow = changes_row(Db, Results, DocInfo, IncludeDocs, Conflicts),
- UserAcc2 = Callback({change, ChangesRow, Prepend}, ResponseType, UserAcc),
- {Go, {Db, Seq, <<",\n">>, FilterFun, Callback, UserAcc2, ResponseType,
- Limit - 1, IncludeDocs, Conflicts}
- }
- end.
-
-
-changes_row(Db, Results, DocInfo, IncludeDoc, Conflicts) ->
- #doc_info{
- id = Id, high_seq = Seq, revs = [#rev_info{deleted = Del} | _]
- } = DocInfo,
- {[{<<"seq">>, Seq}, {<<"id">>, Id}, {<<"changes">>, Results}] ++
- deleted_item(Del) ++ case IncludeDoc of
- true ->
- Options = if Conflicts -> [conflicts]; true -> [] end,
- couch_httpd_view:doc_member(Db, DocInfo, Options);
- false ->
- []
- end}.
-
-deleted_item(true) -> [{<<"deleted">>, true}];
-deleted_item(_) -> [].
-
-% waits for a db_updated msg, if there are multiple msgs, collects them.
-wait_db_updated(Timeout, TimeoutFun, UserAcc) ->
- receive
- db_updated ->
- get_rest_db_updated(UserAcc)
- after Timeout ->
- {Go, UserAcc2} = TimeoutFun(UserAcc),
- case Go of
- ok ->
- wait_db_updated(Timeout, TimeoutFun, UserAcc2);
- stop ->
- {stop, UserAcc2}
- end
- end.
-
-get_rest_db_updated(UserAcc) ->
- receive
- db_updated ->
- get_rest_db_updated(UserAcc)
- after 0 ->
- {updated, UserAcc}
- end.
diff --git a/1.1.x/src/couchdb/couch_config.erl b/1.1.x/src/couchdb/couch_config.erl
deleted file mode 100644
index 792ff5a0..00000000
--- a/1.1.x/src/couchdb/couch_config.erl
+++ /dev/null
@@ -1,254 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-% Reads CouchDB's ini file and gets queried for configuration parameters.
-% This module is initialized with a list of ini files that it consecutively
-% reads Key/Value pairs from and saves them in an ets table. If more an one
-% ini file is specified, the last one is used to write changes that are made
-% with store/2 back to that ini file.
-
--module(couch_config).
--behaviour(gen_server).
-
--include("couch_db.hrl").
-
-
--export([start_link/1, stop/0]).
--export([all/0, get/1, get/2, get/3, set/3, set/4, delete/2, delete/3]).
--export([register/1, register/2]).
--export([parse_ini_file/1]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--record(config, {
- notify_funs=[],
- write_filename=undefined
-}).
-
-
-start_link(IniFiles) ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, IniFiles, []).
-
-stop() ->
- gen_server:cast(?MODULE, stop).
-
-
-all() ->
- lists:sort(gen_server:call(?MODULE, all, infinity)).
-
-
-get(Section) when is_binary(Section) ->
- ?MODULE:get(?b2l(Section));
-get(Section) ->
- Matches = ets:match(?MODULE, {{Section, '$1'}, '$2'}),
- [{Key, Value} || [Key, Value] <- Matches].
-
-get(Section, Key) ->
- ?MODULE:get(Section, Key, undefined).
-
-get(Section, Key, Default) when is_binary(Section) and is_binary(Key) ->
- ?MODULE:get(?b2l(Section), ?b2l(Key), Default);
-get(Section, Key, Default) ->
- case ets:lookup(?MODULE, {Section, Key}) of
- [] -> Default;
- [{_, Match}] -> Match
- end.
-
-set(Section, Key, Value) ->
- ?MODULE:set(Section, Key, Value, true).
-
-set(Section, Key, Value, Persist) when is_binary(Section) and is_binary(Key) ->
- ?MODULE:set(?b2l(Section), ?b2l(Key), Value, Persist);
-set(Section, Key, Value, Persist) ->
- gen_server:call(?MODULE, {set, Section, Key, Value, Persist}).
-
-
-delete(Section, Key) when is_binary(Section) and is_binary(Key) ->
- delete(?b2l(Section), ?b2l(Key));
-delete(Section, Key) ->
- delete(Section, Key, true).
-
-delete(Section, Key, Persist) when is_binary(Section) and is_binary(Key) ->
- delete(?b2l(Section), ?b2l(Key), Persist);
-delete(Section, Key, Persist) ->
- gen_server:call(?MODULE, {delete, Section, Key, Persist}).
-
-
-register(Fun) ->
- ?MODULE:register(Fun, self()).
-
-register(Fun, Pid) ->
- gen_server:call(?MODULE, {register, Fun, Pid}).
-
-
-init(IniFiles) ->
- ets:new(?MODULE, [named_table, set, protected]),
- try
- lists:map(fun(IniFile) ->
- {ok, ParsedIniValues} = parse_ini_file(IniFile),
- ets:insert(?MODULE, ParsedIniValues)
- end, IniFiles),
- WriteFile = case IniFiles of
- [_|_] -> lists:last(IniFiles);
- _ -> undefined
- end,
- {ok, #config{write_filename = WriteFile}}
- catch _Tag:Error ->
- {stop, Error}
- end.
-
-
-terminate(_Reason, _State) ->
- ok.
-
-
-handle_call(all, _From, Config) ->
- Resp = lists:sort((ets:tab2list(?MODULE))),
- {reply, Resp, Config};
-handle_call({set, Sec, Key, Val, Persist}, From, Config) ->
- Result = case {Persist, Config#config.write_filename} of
- {true, undefined} ->
- ok;
- {true, FileName} ->
- couch_config_writer:save_to_file({{Sec, Key}, Val}, FileName);
- _ ->
- ok
- end,
- case Result of
- ok ->
- true = ets:insert(?MODULE, {{Sec, Key}, Val}),
- spawn_link(fun() ->
- [catch F(Sec, Key, Val, Persist) || {_Pid, F} <- Config#config.notify_funs],
- gen_server:reply(From, ok)
- end),
- {noreply, Config};
- _Error ->
- {reply, Result, Config}
- end;
-handle_call({delete, Sec, Key, Persist}, From, Config) ->
- true = ets:delete(?MODULE, {Sec,Key}),
- case {Persist, Config#config.write_filename} of
- {true, undefined} ->
- ok;
- {true, FileName} ->
- couch_config_writer:save_to_file({{Sec, Key}, ""}, FileName);
- _ ->
- ok
- end,
- spawn_link(fun() ->
- [catch F(Sec, Key, deleted, Persist) || {_Pid, F} <- Config#config.notify_funs],
- gen_server:reply(From, ok)
- end),
- {noreply, Config};
-handle_call({register, Fun, Pid}, _From, #config{notify_funs=PidFuns}=Config) ->
- erlang:monitor(process, Pid),
- % convert 1 and 2 arity to 3 arity
- Fun2 =
- case Fun of
- _ when is_function(Fun, 1) ->
- fun(Section, _Key, _Value, _Persist) -> Fun(Section) end;
- _ when is_function(Fun, 2) ->
- fun(Section, Key, _Value, _Persist) -> Fun(Section, Key) end;
- _ when is_function(Fun, 3) ->
- fun(Section, Key, Value, _Persist) -> Fun(Section, Key, Value) end;
- _ when is_function(Fun, 4) ->
- Fun
- end,
- {reply, ok, Config#config{notify_funs=[{Pid, Fun2} | PidFuns]}}.
-
-
-handle_cast(stop, State) ->
- {stop, normal, State};
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info({'DOWN', _, _, DownPid, _}, #config{notify_funs=PidFuns}=Config) ->
- % remove any funs registered by the downed process
- FilteredPidFuns = [{Pid,Fun} || {Pid,Fun} <- PidFuns, Pid /= DownPid],
- {noreply, Config#config{notify_funs=FilteredPidFuns}}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
-parse_ini_file(IniFile) ->
- IniFilename = couch_util:abs_pathname(IniFile),
- IniBin =
- case file:read_file(IniFilename) of
- {ok, IniBin0} ->
- IniBin0;
- {error, eacces} ->
- throw({file_permission_error, IniFile});
- {error, enoent} ->
- Fmt = "Couldn't find server configuration file ~s.",
- Msg = ?l2b(io_lib:format(Fmt, [IniFilename])),
- ?LOG_ERROR("~s~n", [Msg]),
- throw({startup_error, Msg})
- end,
-
- Lines = re:split(IniBin, "\r\n|\n|\r|\032", [{return, list}]),
- {_, ParsedIniValues} =
- lists:foldl(fun(Line, {AccSectionName, AccValues}) ->
- case string:strip(Line) of
- "[" ++ Rest ->
- case re:split(Rest, "\\]", [{return, list}]) of
- [NewSectionName, ""] ->
- {NewSectionName, AccValues};
- _Else -> % end bracket not at end, ignore this line
- {AccSectionName, AccValues}
- end;
- ";" ++ _Comment ->
- {AccSectionName, AccValues};
- Line2 ->
- case re:split(Line2, "\s?=\s?", [{return, list}]) of
- [Value] ->
- MultiLineValuePart = case re:run(Line, "^ \\S", []) of
- {match, _} ->
- true;
- _ ->
- false
- end,
- case {MultiLineValuePart, AccValues} of
- {true, [{{_, ValueName}, PrevValue} | AccValuesRest]} ->
- % remove comment
- case re:split(Value, " ;|\t;", [{return, list}]) of
- [[]] ->
- % empty line
- {AccSectionName, AccValues};
- [LineValue | _Rest] ->
- E = {{AccSectionName, ValueName},
- PrevValue ++ " " ++ LineValue},
- {AccSectionName, [E | AccValuesRest]}
- end;
- _ ->
- {AccSectionName, AccValues}
- end;
- [""|_LineValues] -> % line begins with "=", ignore
- {AccSectionName, AccValues};
- [ValueName|LineValues] -> % yeehaw, got a line!
- RemainingLine = couch_util:implode(LineValues, "="),
- % removes comments
- case re:split(RemainingLine, " ;|\t;", [{return, list}]) of
- [[]] ->
- % empty line means delete this key
- ets:delete(?MODULE, {AccSectionName, ValueName}),
- {AccSectionName, AccValues};
- [LineValue | _Rest] ->
- {AccSectionName,
- [{{AccSectionName, ValueName}, LineValue} | AccValues]}
- end
- end
- end
- end, {"", []}, Lines),
- {ok, ParsedIniValues}.
-
diff --git a/1.1.x/src/couchdb/couch_config_writer.erl b/1.1.x/src/couchdb/couch_config_writer.erl
deleted file mode 100644
index decd269a..00000000
--- a/1.1.x/src/couchdb/couch_config_writer.erl
+++ /dev/null
@@ -1,86 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%% @doc Saves a Key/Value pair to a ini file. The Key consists of a Section
-%% and Option combination. If that combination is found in the ini file
-%% the new value replaces the old value. If only the Section is found the
-%% Option and value combination is appended to the Section. If the Section
-%% does not yet exist in the ini file, it is added and the Option/Value
-%% pair is appended.
-%% @see couch_config
-
--module(couch_config_writer).
-
--export([save_to_file/2]).
-
-%% @spec save_to_file(
-%% Config::{{Section::string(), Option::string()}, Value::string()},
-%% File::filename()) -> ok
-%% @doc Saves a Section/Key/Value triple to the ini file File::filename()
-save_to_file({{Section, Key}, Value}, File) ->
- {ok, OldFileContents} = file:read_file(File),
- Lines = re:split(OldFileContents, "\r\n|\n|\r|\032", [{return, list}]),
-
- SectionLine = "[" ++ Section ++ "]",
- {ok, Pattern} = re:compile(["^(", Key, "\\s*=)|\\[[a-zA-Z0-9\_-]*\\]"]),
-
- NewLines = process_file_lines(Lines, [], SectionLine, Pattern, Key, Value),
- NewFileContents = reverse_and_add_newline(strip_empty_lines(NewLines), []),
- case file:write_file(File, NewFileContents) of
- ok ->
- ok;
- {error, eacces} ->
- {file_permission_error, File};
- Error ->
- Error
- end.
-
-
-process_file_lines([Section|Rest], SeenLines, Section, Pattern, Key, Value) ->
- process_section_lines(Rest, [Section|SeenLines], Pattern, Key, Value);
-
-process_file_lines([Line|Rest], SeenLines, Section, Pattern, Key, Value) ->
- process_file_lines(Rest, [Line|SeenLines], Section, Pattern, Key, Value);
-
-process_file_lines([], SeenLines, Section, _Pattern, Key, Value) ->
- % Section wasn't found. Append it with the option here.
- [Key ++ " = " ++ Value, Section, "" | strip_empty_lines(SeenLines)].
-
-
-process_section_lines([Line|Rest], SeenLines, Pattern, Key, Value) ->
- case re:run(Line, Pattern, [{capture, all_but_first}]) of
- nomatch -> % Found nothing interesting. Move on.
- process_section_lines(Rest, [Line|SeenLines], Pattern, Key, Value);
- {match, []} -> % Found another section. Append the option here.
- lists:reverse(Rest) ++
- [Line, "", Key ++ " = " ++ Value | strip_empty_lines(SeenLines)];
- {match, _} -> % Found the option itself. Replace it.
- lists:reverse(Rest) ++ [Key ++ " = " ++ Value | SeenLines]
- end;
-
-process_section_lines([], SeenLines, _Pattern, Key, Value) ->
- % Found end of file within the section. Append the option here.
- [Key ++ " = " ++ Value | strip_empty_lines(SeenLines)].
-
-
-reverse_and_add_newline([Line|Rest], Content) ->
- reverse_and_add_newline(Rest, [Line, "\n", Content]);
-
-reverse_and_add_newline([], Content) ->
- Content.
-
-
-strip_empty_lines(["" | Rest]) ->
- strip_empty_lines(Rest);
-
-strip_empty_lines(All) ->
- All.
diff --git a/1.1.x/src/couchdb/couch_db.erl b/1.1.x/src/couchdb/couch_db.erl
deleted file mode 100644
index 1e7addaf..00000000
--- a/1.1.x/src/couchdb/couch_db.erl
+++ /dev/null
@@ -1,1210 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_db).
--behaviour(gen_server).
-
--export([open/2,open_int/2,close/1,create/2,start_compact/1,get_db_info/1,get_design_docs/1]).
--export([open_ref_counted/2,is_idle/1,monitor/1,count_changes_since/2]).
--export([update_doc/3,update_doc/4,update_docs/4,update_docs/2,update_docs/3,delete_doc/3]).
--export([get_doc_info/2,open_doc/2,open_doc/3,open_doc_revs/4]).
--export([set_revs_limit/2,get_revs_limit/1]).
--export([get_missing_revs/2,name/1,doc_to_tree/1,get_update_seq/1,get_committed_update_seq/1]).
--export([enum_docs/4,enum_docs_since/5]).
--export([enum_docs_since_reduce_to_count/1,enum_docs_reduce_to_count/1]).
--export([increment_update_seq/1,get_purge_seq/1,purge_docs/2,get_last_purged/1]).
--export([start_link/3,open_doc_int/3,ensure_full_commit/1]).
--export([set_security/2,get_security/1]).
--export([init/1,terminate/2,handle_call/3,handle_cast/2,code_change/3,handle_info/2]).
--export([changes_since/5,changes_since/6,read_doc/2,new_revid/1]).
--export([check_is_admin/1, check_is_reader/1]).
--export([reopen/1]).
-
--include("couch_db.hrl").
-
-
-start_link(DbName, Filepath, Options) ->
- case open_db_file(Filepath, Options) of
- {ok, Fd} ->
- StartResult = gen_server:start_link(couch_db, {DbName, Filepath, Fd, Options}, []),
- unlink(Fd),
- StartResult;
- Else ->
- Else
- end.
-
-open_db_file(Filepath, Options) ->
- case couch_file:open(Filepath, Options) of
- {ok, Fd} ->
- {ok, Fd};
- {error, enoent} ->
- % couldn't find file. is there a compact version? This can happen if
- % crashed during the file switch.
- case couch_file:open(Filepath ++ ".compact") of
- {ok, Fd} ->
- ?LOG_INFO("Found ~s~s compaction file, using as primary storage.", [Filepath, ".compact"]),
- ok = file:rename(Filepath ++ ".compact", Filepath),
- ok = couch_file:sync(Fd),
- {ok, Fd};
- {error, enoent} ->
- {not_found, no_db_file}
- end;
- Error ->
- Error
- end.
-
-
-create(DbName, Options) ->
- couch_server:create(DbName, Options).
-
-% this is for opening a database for internal purposes like the replicator
-% or the view indexer. it never throws a reader error.
-open_int(DbName, Options) ->
- couch_server:open(DbName, Options).
-
-% this should be called anytime an http request opens the database.
-% it ensures that the http userCtx is a valid reader
-open(DbName, Options) ->
- case couch_server:open(DbName, Options) of
- {ok, Db} ->
- try
- check_is_reader(Db),
- {ok, Db}
- catch
- throw:Error ->
- close(Db),
- throw(Error)
- end;
- Else -> Else
- end.
-
-reopen(#db{main_pid = Pid, fd_ref_counter = OldRefCntr, user_ctx = UserCtx}) ->
- {ok, #db{fd_ref_counter = NewRefCntr} = NewDb} =
- gen_server:call(Pid, get_db, infinity),
- case NewRefCntr =:= OldRefCntr of
- true ->
- ok;
- false ->
- couch_ref_counter:add(NewRefCntr),
- catch couch_ref_counter:drop(OldRefCntr)
- end,
- {ok, NewDb#db{user_ctx = UserCtx}}.
-
-ensure_full_commit(#db{update_pid=UpdatePid,instance_start_time=StartTime}) ->
- ok = gen_server:call(UpdatePid, full_commit, infinity),
- {ok, StartTime}.
-
-close(#db{fd_ref_counter=RefCntr}) ->
- couch_ref_counter:drop(RefCntr).
-
-open_ref_counted(MainPid, OpenedPid) ->
- gen_server:call(MainPid, {open_ref_count, OpenedPid}).
-
-is_idle(MainPid) ->
- gen_server:call(MainPid, is_idle).
-
-monitor(#db{main_pid=MainPid}) ->
- erlang:monitor(process, MainPid).
-
-start_compact(#db{update_pid=Pid}) ->
- gen_server:call(Pid, start_compact).
-
-delete_doc(Db, Id, Revisions) ->
- DeletedDocs = [#doc{id=Id, revs=[Rev], deleted=true} || Rev <- Revisions],
- {ok, [Result]} = update_docs(Db, DeletedDocs, []),
- {ok, Result}.
-
-open_doc(Db, IdOrDocInfo) ->
- open_doc(Db, IdOrDocInfo, []).
-
-open_doc(Db, Id, Options) ->
- increment_stat(Db, {couchdb, database_reads}),
- case open_doc_int(Db, Id, Options) of
- {ok, #doc{deleted=true}=Doc} ->
- case lists:member(deleted, Options) of
- true ->
- apply_open_options({ok, Doc},Options);
- false ->
- {not_found, deleted}
- end;
- Else ->
- apply_open_options(Else,Options)
- end.
-
-apply_open_options({ok, Doc},Options) ->
- apply_open_options2(Doc,Options);
-apply_open_options(Else,_Options) ->
- Else.
-
-apply_open_options2(Doc,[]) ->
- {ok, Doc};
-apply_open_options2(#doc{atts=Atts,revs=Revs}=Doc,
- [{atts_since, PossibleAncestors}|Rest]) ->
- RevPos = find_ancestor_rev_pos(Revs, PossibleAncestors),
- apply_open_options2(Doc#doc{atts=[A#att{data=
- if AttPos>RevPos -> Data; true -> stub end}
- || #att{revpos=AttPos,data=Data}=A <- Atts]}, Rest);
-apply_open_options2(Doc,[_|Rest]) ->
- apply_open_options2(Doc,Rest).
-
-
-find_ancestor_rev_pos({_, []}, _AttsSinceRevs) ->
- 0;
-find_ancestor_rev_pos(_DocRevs, []) ->
- 0;
-find_ancestor_rev_pos({RevPos, [RevId|Rest]}, AttsSinceRevs) ->
- case lists:member({RevPos, RevId}, AttsSinceRevs) of
- true ->
- RevPos;
- false ->
- find_ancestor_rev_pos({RevPos - 1, Rest}, AttsSinceRevs)
- end.
-
-open_doc_revs(Db, Id, Revs, Options) ->
- increment_stat(Db, {couchdb, database_reads}),
- [{ok, Results}] = open_doc_revs_int(Db, [{Id, Revs}], Options),
- {ok, [apply_open_options(Result, Options) || Result <- Results]}.
-
-% Each returned result is a list of tuples:
-% {Id, MissingRevs, PossibleAncestors}
-% if no revs are missing, it's omitted from the results.
-get_missing_revs(Db, IdRevsList) ->
- Results = get_full_doc_infos(Db, [Id1 || {Id1, _Revs} <- IdRevsList]),
- {ok, find_missing(IdRevsList, Results)}.
-
-find_missing([], []) ->
- [];
-find_missing([{Id, Revs}|RestIdRevs], [{ok, FullInfo} | RestLookupInfo]) ->
- case couch_key_tree:find_missing(FullInfo#full_doc_info.rev_tree, Revs) of
- [] ->
- find_missing(RestIdRevs, RestLookupInfo);
- MissingRevs ->
- #doc_info{revs=RevsInfo} = couch_doc:to_doc_info(FullInfo),
- LeafRevs = [Rev || #rev_info{rev=Rev} <- RevsInfo],
- % Find the revs that are possible parents of this rev
- PossibleAncestors =
- lists:foldl(fun({LeafPos, LeafRevId}, Acc) ->
- % this leaf is a "possible ancenstor" of the missing
- % revs if this LeafPos lessthan any of the missing revs
- case lists:any(fun({MissingPos, _}) ->
- LeafPos < MissingPos end, MissingRevs) of
- true ->
- [{LeafPos, LeafRevId} | Acc];
- false ->
- Acc
- end
- end, [], LeafRevs),
- [{Id, MissingRevs, PossibleAncestors} |
- find_missing(RestIdRevs, RestLookupInfo)]
- end;
-find_missing([{Id, Revs}|RestIdRevs], [not_found | RestLookupInfo]) ->
- [{Id, Revs, []} | find_missing(RestIdRevs, RestLookupInfo)].
-
-get_doc_info(Db, Id) ->
- case get_full_doc_info(Db, Id) of
- {ok, DocInfo} ->
- {ok, couch_doc:to_doc_info(DocInfo)};
- Else ->
- Else
- end.
-
-% returns {ok, DocInfo} or not_found
-get_full_doc_info(Db, Id) ->
- [Result] = get_full_doc_infos(Db, [Id]),
- Result.
-
-get_full_doc_infos(Db, Ids) ->
- couch_btree:lookup(Db#db.fulldocinfo_by_id_btree, Ids).
-
-increment_update_seq(#db{update_pid=UpdatePid}) ->
- gen_server:call(UpdatePid, increment_update_seq).
-
-purge_docs(#db{update_pid=UpdatePid}, IdsRevs) ->
- gen_server:call(UpdatePid, {purge_docs, IdsRevs}).
-
-get_committed_update_seq(#db{committed_update_seq=Seq}) ->
- Seq.
-
-get_update_seq(#db{update_seq=Seq})->
- Seq.
-
-get_purge_seq(#db{header=#db_header{purge_seq=PurgeSeq}})->
- PurgeSeq.
-
-get_last_purged(#db{header=#db_header{purged_docs=nil}}) ->
- {ok, []};
-get_last_purged(#db{fd=Fd, header=#db_header{purged_docs=PurgedPointer}}) ->
- couch_file:pread_term(Fd, PurgedPointer).
-
-get_db_info(Db) ->
- #db{fd=Fd,
- header=#db_header{disk_version=DiskVersion},
- compactor_pid=Compactor,
- update_seq=SeqNum,
- name=Name,
- fulldocinfo_by_id_btree=FullDocBtree,
- instance_start_time=StartTime,
- committed_update_seq=CommittedUpdateSeq} = Db,
- {ok, Size} = couch_file:bytes(Fd),
- {ok, {Count, DelCount}} = couch_btree:full_reduce(FullDocBtree),
- InfoList = [
- {db_name, Name},
- {doc_count, Count},
- {doc_del_count, DelCount},
- {update_seq, SeqNum},
- {purge_seq, couch_db:get_purge_seq(Db)},
- {compact_running, Compactor/=nil},
- {disk_size, Size},
- {instance_start_time, StartTime},
- {disk_format_version, DiskVersion},
- {committed_update_seq, CommittedUpdateSeq}
- ],
- {ok, InfoList}.
-
-get_design_docs(#db{fulldocinfo_by_id_btree=Btree}=Db) ->
- {ok,_, Docs} = couch_btree:fold(Btree,
- fun(#full_doc_info{id= <<"_design/",_/binary>>}=FullDocInfo, _Reds, AccDocs) ->
- {ok, Doc} = couch_db:open_doc_int(Db, FullDocInfo, []),
- {ok, [Doc | AccDocs]};
- (_, _Reds, AccDocs) ->
- {stop, AccDocs}
- end,
- [], [{start_key, <<"_design/">>}, {end_key_gt, <<"_design0">>}]),
- {ok, Docs}.
-
-check_is_admin(#db{user_ctx=#user_ctx{name=Name,roles=Roles}}=Db) ->
- {Admins} = get_admins(Db),
- AdminRoles = [<<"_admin">> | couch_util:get_value(<<"roles">>, Admins, [])],
- AdminNames = couch_util:get_value(<<"names">>, Admins,[]),
- case AdminRoles -- Roles of
- AdminRoles -> % same list, not an admin role
- case AdminNames -- [Name] of
- AdminNames -> % same names, not an admin
- throw({unauthorized, <<"You are not a db or server admin.">>});
- _ ->
- ok
- end;
- _ ->
- ok
- end.
-
-check_is_reader(#db{user_ctx=#user_ctx{name=Name,roles=Roles}=UserCtx}=Db) ->
- case (catch check_is_admin(Db)) of
- ok -> ok;
- _ ->
- {Readers} = get_readers(Db),
- ReaderRoles = couch_util:get_value(<<"roles">>, Readers,[]),
- WithAdminRoles = [<<"_admin">> | ReaderRoles],
- ReaderNames = couch_util:get_value(<<"names">>, Readers,[]),
- case ReaderRoles ++ ReaderNames of
- [] -> ok; % no readers == public access
- _Else ->
- case WithAdminRoles -- Roles of
- WithAdminRoles -> % same list, not an reader role
- case ReaderNames -- [Name] of
- ReaderNames -> % same names, not a reader
- ?LOG_DEBUG("Not a reader: UserCtx ~p vs Names ~p Roles ~p",[UserCtx, ReaderNames, WithAdminRoles]),
- throw({unauthorized, <<"You are not authorized to access this db.">>});
- _ ->
- ok
- end;
- _ ->
- ok
- end
- end
- end.
-
-get_admins(#db{security=SecProps}) ->
- couch_util:get_value(<<"admins">>, SecProps, {[]}).
-
-get_readers(#db{security=SecProps}) ->
- couch_util:get_value(<<"readers">>, SecProps, {[]}).
-
-get_security(#db{security=SecProps}) ->
- {SecProps}.
-
-set_security(#db{update_pid=Pid}=Db, {NewSecProps}) when is_list(NewSecProps) ->
- check_is_admin(Db),
- ok = validate_security_object(NewSecProps),
- ok = gen_server:call(Pid, {set_security, NewSecProps}, infinity),
- {ok, _} = ensure_full_commit(Db),
- ok;
-set_security(_, _) ->
- throw(bad_request).
-
-validate_security_object(SecProps) ->
- Admins = couch_util:get_value(<<"admins">>, SecProps, {[]}),
- Readers = couch_util:get_value(<<"readers">>, SecProps, {[]}),
- ok = validate_names_and_roles(Admins),
- ok = validate_names_and_roles(Readers),
- ok.
-
-% validate user input
-validate_names_and_roles({Props}) when is_list(Props) ->
- case couch_util:get_value(<<"names">>,Props,[]) of
- Ns when is_list(Ns) ->
- [throw("names must be a JSON list of strings") ||N <- Ns, not is_binary(N)],
- Ns;
- _ -> throw("names must be a JSON list of strings")
- end,
- case couch_util:get_value(<<"roles">>,Props,[]) of
- Rs when is_list(Rs) ->
- [throw("roles must be a JSON list of strings") ||R <- Rs, not is_binary(R)],
- Rs;
- _ -> throw("roles must be a JSON list of strings")
- end,
- ok.
-
-get_revs_limit(#db{revs_limit=Limit}) ->
- Limit.
-
-set_revs_limit(#db{update_pid=Pid}=Db, Limit) when Limit > 0 ->
- check_is_admin(Db),
- gen_server:call(Pid, {set_revs_limit, Limit}, infinity);
-set_revs_limit(_Db, _Limit) ->
- throw(invalid_revs_limit).
-
-name(#db{name=Name}) ->
- Name.
-
-update_doc(Db, Doc, Options) ->
- update_doc(Db, Doc, Options, interactive_edit).
-
-update_doc(Db, Doc, Options, UpdateType) ->
- case update_docs(Db, [Doc], Options, UpdateType) of
- {ok, [{ok, NewRev}]} ->
- {ok, NewRev};
- {ok, [{{_Id, _Rev}, Error}]} ->
- throw(Error);
- {ok, [Error]} ->
- throw(Error);
- {ok, []} ->
- % replication success
- {Pos, [RevId | _]} = Doc#doc.revs,
- {ok, {Pos, RevId}}
- end.
-
-update_docs(Db, Docs) ->
- update_docs(Db, Docs, []).
-
-% group_alike_docs groups the sorted documents into sublist buckets, by id.
-% ([DocA, DocA, DocB, DocC], []) -> [[DocA, DocA], [DocB], [DocC]]
-group_alike_docs(Docs) ->
- Sorted = lists:sort(fun(#doc{id=A},#doc{id=B})-> A < B end, Docs),
- group_alike_docs(Sorted, []).
-
-group_alike_docs([], Buckets) ->
- lists:reverse(Buckets);
-group_alike_docs([Doc|Rest], []) ->
- group_alike_docs(Rest, [[Doc]]);
-group_alike_docs([Doc|Rest], [Bucket|RestBuckets]) ->
- [#doc{id=BucketId}|_] = Bucket,
- case Doc#doc.id == BucketId of
- true ->
- % add to existing bucket
- group_alike_docs(Rest, [[Doc|Bucket]|RestBuckets]);
- false ->
- % add to new bucket
- group_alike_docs(Rest, [[Doc]|[Bucket|RestBuckets]])
- end.
-
-validate_doc_update(#db{}=Db, #doc{id= <<"_design/",_/binary>>}, _GetDiskDocFun) ->
- catch check_is_admin(Db);
-validate_doc_update(#db{validate_doc_funs=[]}, _Doc, _GetDiskDocFun) ->
- ok;
-validate_doc_update(_Db, #doc{id= <<"_local/",_/binary>>}, _GetDiskDocFun) ->
- ok;
-validate_doc_update(Db, Doc, GetDiskDocFun) ->
- DiskDoc = GetDiskDocFun(),
- JsonCtx = couch_util:json_user_ctx(Db),
- SecObj = get_security(Db),
- try [case Fun(Doc, DiskDoc, JsonCtx, SecObj) of
- ok -> ok;
- Error -> throw(Error)
- end || Fun <- Db#db.validate_doc_funs],
- ok
- catch
- throw:Error ->
- Error
- end.
-
-
-prep_and_validate_update(Db, #doc{id=Id,revs={RevStart, Revs}}=Doc,
- OldFullDocInfo, LeafRevsDict, AllowConflict) ->
- case Revs of
- [PrevRev|_] ->
- case dict:find({RevStart, PrevRev}, LeafRevsDict) of
- {ok, {Deleted, DiskSp, DiskRevs}} ->
- case couch_doc:has_stubs(Doc) of
- true ->
- DiskDoc = make_doc(Db, Id, Deleted, DiskSp, DiskRevs),
- Doc2 = couch_doc:merge_stubs(Doc, DiskDoc),
- {validate_doc_update(Db, Doc2, fun() -> DiskDoc end), Doc2};
- false ->
- LoadDiskDoc = fun() -> make_doc(Db,Id,Deleted,DiskSp,DiskRevs) end,
- {validate_doc_update(Db, Doc, LoadDiskDoc), Doc}
- end;
- error when AllowConflict ->
- couch_doc:merge_stubs(Doc, #doc{}), % will generate error if
- % there are stubs
- {validate_doc_update(Db, Doc, fun() -> nil end), Doc};
- error ->
- {conflict, Doc}
- end;
- [] ->
- % new doc, and we have existing revs.
- % reuse existing deleted doc
- if OldFullDocInfo#full_doc_info.deleted orelse AllowConflict ->
- {validate_doc_update(Db, Doc, fun() -> nil end), Doc};
- true ->
- {conflict, Doc}
- end
- end.
-
-
-
-prep_and_validate_updates(_Db, [], [], _AllowConflict, AccPrepped,
- AccFatalErrors) ->
- {AccPrepped, AccFatalErrors};
-prep_and_validate_updates(Db, [DocBucket|RestBuckets], [not_found|RestLookups],
- AllowConflict, AccPrepped, AccErrors) ->
- [#doc{id=Id}|_]=DocBucket,
- % no existing revs are known,
- {PreppedBucket, AccErrors3} = lists:foldl(
- fun(#doc{revs=Revs}=Doc, {AccBucket, AccErrors2}) ->
- case couch_doc:has_stubs(Doc) of
- true ->
- couch_doc:merge_stubs(Doc, #doc{}); % will throw exception
- false -> ok
- end,
- case Revs of
- {0, []} ->
- case validate_doc_update(Db, Doc, fun() -> nil end) of
- ok ->
- {[Doc | AccBucket], AccErrors2};
- Error ->
- {AccBucket, [{{Id, {0, []}}, Error} | AccErrors2]}
- end;
- _ ->
- % old revs specified but none exist, a conflict
- {AccBucket, [{{Id, Revs}, conflict} | AccErrors2]}
- end
- end,
- {[], AccErrors}, DocBucket),
-
- prep_and_validate_updates(Db, RestBuckets, RestLookups, AllowConflict,
- [PreppedBucket | AccPrepped], AccErrors3);
-prep_and_validate_updates(Db, [DocBucket|RestBuckets],
- [{ok, #full_doc_info{rev_tree=OldRevTree}=OldFullDocInfo}|RestLookups],
- AllowConflict, AccPrepped, AccErrors) ->
- Leafs = couch_key_tree:get_all_leafs(OldRevTree),
- LeafRevsDict = dict:from_list([{{Start, RevId}, {Deleted, Sp, Revs}} ||
- {{Deleted, Sp, _Seq}, {Start, [RevId|_]}=Revs} <- Leafs]),
- {PreppedBucket, AccErrors3} = lists:foldl(
- fun(Doc, {Docs2Acc, AccErrors2}) ->
- case prep_and_validate_update(Db, Doc, OldFullDocInfo,
- LeafRevsDict, AllowConflict) of
- {ok, Doc2} ->
- {[Doc2 | Docs2Acc], AccErrors2};
- {Error, #doc{id=Id,revs=Revs}} ->
- % Record the error
- {Docs2Acc, [{{Id, Revs}, Error} |AccErrors2]}
- end
- end,
- {[], AccErrors}, DocBucket),
- prep_and_validate_updates(Db, RestBuckets, RestLookups, AllowConflict,
- [PreppedBucket | AccPrepped], AccErrors3).
-
-
-update_docs(Db, Docs, Options) ->
- update_docs(Db, Docs, Options, interactive_edit).
-
-
-prep_and_validate_replicated_updates(_Db, [], [], AccPrepped, AccErrors) ->
- Errors2 = [{{Id, {Pos, Rev}}, Error} ||
- {#doc{id=Id,revs={Pos,[Rev|_]}}, Error} <- AccErrors],
- {lists:reverse(AccPrepped), lists:reverse(Errors2)};
-prep_and_validate_replicated_updates(Db, [Bucket|RestBuckets], [OldInfo|RestOldInfo], AccPrepped, AccErrors) ->
- case OldInfo of
- not_found ->
- {ValidatedBucket, AccErrors3} = lists:foldl(
- fun(Doc, {AccPrepped2, AccErrors2}) ->
- case couch_doc:has_stubs(Doc) of
- true ->
- couch_doc:merge_stubs(Doc, #doc{}); % will throw exception
- false -> ok
- end,
- case validate_doc_update(Db, Doc, fun() -> nil end) of
- ok ->
- {[Doc | AccPrepped2], AccErrors2};
- Error ->
- {AccPrepped2, [{Doc, Error} | AccErrors2]}
- end
- end,
- {[], AccErrors}, Bucket),
- prep_and_validate_replicated_updates(Db, RestBuckets, RestOldInfo, [ValidatedBucket | AccPrepped], AccErrors3);
- {ok, #full_doc_info{rev_tree=OldTree}} ->
- NewRevTree = lists:foldl(
- fun(NewDoc, AccTree) ->
- {NewTree, _} = couch_key_tree:merge(AccTree,
- couch_db:doc_to_tree(NewDoc), Db#db.revs_limit),
- NewTree
- end,
- OldTree, Bucket),
- Leafs = couch_key_tree:get_all_leafs_full(NewRevTree),
- LeafRevsFullDict = dict:from_list( [{{Start, RevId}, FullPath} || {Start, [{RevId, _}|_]}=FullPath <- Leafs]),
- {ValidatedBucket, AccErrors3} =
- lists:foldl(
- fun(#doc{id=Id,revs={Pos, [RevId|_]}}=Doc, {AccValidated, AccErrors2}) ->
- case dict:find({Pos, RevId}, LeafRevsFullDict) of
- {ok, {Start, Path}} ->
- % our unflushed doc is a leaf node. Go back on the path
- % to find the previous rev that's on disk.
-
- LoadPrevRevFun = fun() ->
- make_first_doc_on_disk(Db,Id,Start-1, tl(Path))
- end,
-
- case couch_doc:has_stubs(Doc) of
- true ->
- DiskDoc = LoadPrevRevFun(),
- Doc2 = couch_doc:merge_stubs(Doc, DiskDoc),
- GetDiskDocFun = fun() -> DiskDoc end;
- false ->
- Doc2 = Doc,
- GetDiskDocFun = LoadPrevRevFun
- end,
-
- case validate_doc_update(Db, Doc2, GetDiskDocFun) of
- ok ->
- {[Doc2 | AccValidated], AccErrors2};
- Error ->
- {AccValidated, [{Doc, Error} | AccErrors2]}
- end;
- _ ->
- % this doc isn't a leaf or already exists in the tree.
- % ignore but consider it a success.
- {AccValidated, AccErrors2}
- end
- end,
- {[], AccErrors}, Bucket),
- prep_and_validate_replicated_updates(Db, RestBuckets, RestOldInfo,
- [ValidatedBucket | AccPrepped], AccErrors3)
- end.
-
-
-
-new_revid(#doc{body=Body,revs={OldStart,OldRevs},
- atts=Atts,deleted=Deleted}) ->
- case [{N, T, M} || #att{name=N,type=T,md5=M} <- Atts, M =/= <<>>] of
- Atts2 when length(Atts) =/= length(Atts2) ->
- % We must have old style non-md5 attachments
- ?l2b(integer_to_list(couch_util:rand32()));
- Atts2 ->
- OldRev = case OldRevs of [] -> 0; [OldRev0|_] -> OldRev0 end,
- couch_util:md5(term_to_binary([Deleted, OldStart, OldRev, Body, Atts2]))
- end.
-
-new_revs([], OutBuckets, IdRevsAcc) ->
- {lists:reverse(OutBuckets), IdRevsAcc};
-new_revs([Bucket|RestBuckets], OutBuckets, IdRevsAcc) ->
- {NewBucket, IdRevsAcc3} = lists:mapfoldl(
- fun(#doc{id=Id,revs={Start, RevIds}}=Doc, IdRevsAcc2)->
- NewRevId = new_revid(Doc),
- {Doc#doc{revs={Start+1, [NewRevId | RevIds]}},
- [{{Id, {Start, RevIds}}, {ok, {Start+1, NewRevId}}} | IdRevsAcc2]}
- end, IdRevsAcc, Bucket),
- new_revs(RestBuckets, [NewBucket|OutBuckets], IdRevsAcc3).
-
-check_dup_atts(#doc{atts=Atts}=Doc) ->
- Atts2 = lists:sort(fun(#att{name=N1}, #att{name=N2}) -> N1 < N2 end, Atts),
- check_dup_atts2(Atts2),
- Doc.
-
-check_dup_atts2([#att{name=N}, #att{name=N} | _]) ->
- throw({bad_request, <<"Duplicate attachments">>});
-check_dup_atts2([_ | Rest]) ->
- check_dup_atts2(Rest);
-check_dup_atts2(_) ->
- ok.
-
-
-update_docs(Db, Docs, Options, replicated_changes) ->
- increment_stat(Db, {couchdb, database_writes}),
- DocBuckets = group_alike_docs(Docs),
-
- case (Db#db.validate_doc_funs /= []) orelse
- lists:any(
- fun(#doc{id= <<?DESIGN_DOC_PREFIX, _/binary>>}) -> true;
- (#doc{atts=Atts}) ->
- Atts /= []
- end, Docs) of
- true ->
- Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
- ExistingDocs = get_full_doc_infos(Db, Ids),
-
- {DocBuckets2, DocErrors} =
- prep_and_validate_replicated_updates(Db, DocBuckets, ExistingDocs, [], []),
- DocBuckets3 = [Bucket || [_|_]=Bucket <- DocBuckets2]; % remove empty buckets
- false ->
- DocErrors = [],
- DocBuckets3 = DocBuckets
- end,
- DocBuckets4 = [[doc_flush_atts(check_dup_atts(Doc), Db#db.fd)
- || Doc <- Bucket] || Bucket <- DocBuckets3],
- {ok, []} = write_and_commit(Db, DocBuckets4, [], [merge_conflicts | Options]),
- {ok, DocErrors};
-
-update_docs(Db, Docs, Options, interactive_edit) ->
- increment_stat(Db, {couchdb, database_writes}),
- AllOrNothing = lists:member(all_or_nothing, Options),
- % go ahead and generate the new revision ids for the documents.
- % separate out the NonRep documents from the rest of the documents
- {Docs2, NonRepDocs} = lists:foldl(
- fun(#doc{id=Id}=Doc, {DocsAcc, NonRepDocsAcc}) ->
- case Id of
- <<?LOCAL_DOC_PREFIX, _/binary>> ->
- {DocsAcc, [Doc | NonRepDocsAcc]};
- Id->
- {[Doc | DocsAcc], NonRepDocsAcc}
- end
- end, {[], []}, Docs),
-
- DocBuckets = group_alike_docs(Docs2),
-
- case (Db#db.validate_doc_funs /= []) orelse
- lists:any(
- fun(#doc{id= <<?DESIGN_DOC_PREFIX, _/binary>>}) ->
- true;
- (#doc{atts=Atts}) ->
- Atts /= []
- end, Docs2) of
- true ->
- % lookup the doc by id and get the most recent
- Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
- ExistingDocInfos = get_full_doc_infos(Db, Ids),
-
- {DocBucketsPrepped, PreCommitFailures} = prep_and_validate_updates(Db,
- DocBuckets, ExistingDocInfos, AllOrNothing, [], []),
-
- % strip out any empty buckets
- DocBuckets2 = [Bucket || [_|_] = Bucket <- DocBucketsPrepped];
- false ->
- PreCommitFailures = [],
- DocBuckets2 = DocBuckets
- end,
-
- if (AllOrNothing) and (PreCommitFailures /= []) ->
- {aborted, lists:map(
- fun({{Id,{Pos, [RevId|_]}}, Error}) ->
- {{Id, {Pos, RevId}}, Error};
- ({{Id,{0, []}}, Error}) ->
- {{Id, {0, <<>>}}, Error}
- end, PreCommitFailures)};
- true ->
- Options2 = if AllOrNothing -> [merge_conflicts];
- true -> [] end ++ Options,
- DocBuckets3 = [[
- doc_flush_atts(set_new_att_revpos(
- check_dup_atts(Doc)), Db#db.fd)
- || Doc <- B] || B <- DocBuckets2],
- {DocBuckets4, IdRevs} = new_revs(DocBuckets3, [], []),
-
- {ok, CommitResults} = write_and_commit(Db, DocBuckets4, NonRepDocs, Options2),
-
- ResultsDict = dict:from_list(IdRevs ++ CommitResults ++ PreCommitFailures),
- {ok, lists:map(
- fun(#doc{id=Id,revs={Pos, RevIds}}) ->
- {ok, Result} = dict:find({Id, {Pos, RevIds}}, ResultsDict),
- Result
- end, Docs)}
- end.
-
-% Returns the first available document on disk. Input list is a full rev path
-% for the doc.
-make_first_doc_on_disk(_Db, _Id, _Pos, []) ->
- nil;
-make_first_doc_on_disk(Db, Id, Pos, [{_Rev, ?REV_MISSING}|RestPath]) ->
- make_first_doc_on_disk(Db, Id, Pos - 1, RestPath);
-make_first_doc_on_disk(Db, Id, Pos, [{_Rev, {IsDel, Sp, _Seq}} |_]=DocPath) ->
- Revs = [Rev || {Rev, _} <- DocPath],
- make_doc(Db, Id, IsDel, Sp, {Pos, Revs}).
-
-set_commit_option(Options) ->
- CommitSettings = {
- [true || O <- Options, O==full_commit orelse O==delay_commit],
- couch_config:get("couchdb", "delayed_commits", "false")
- },
- case CommitSettings of
- {[true], _} ->
- Options; % user requested explicit commit setting, do not change it
- {_, "true"} ->
- Options; % delayed commits are enabled, do nothing
- {_, "false"} ->
- [full_commit|Options];
- {_, Else} ->
- ?LOG_ERROR("[couchdb] delayed_commits setting must be true/false, not ~p",
- [Else]),
- [full_commit|Options]
- end.
-
-collect_results(UpdatePid, MRef, ResultsAcc) ->
- receive
- {result, UpdatePid, Result} ->
- collect_results(UpdatePid, MRef, [Result | ResultsAcc]);
- {done, UpdatePid} ->
- {ok, ResultsAcc};
- {retry, UpdatePid} ->
- retry;
- {'DOWN', MRef, _, _, Reason} ->
- exit(Reason)
- end.
-
-write_and_commit(#db{update_pid=UpdatePid}=Db, DocBuckets,
- NonRepDocs, Options0) ->
- Options = set_commit_option(Options0),
- MergeConflicts = lists:member(merge_conflicts, Options),
- FullCommit = lists:member(full_commit, Options),
- MRef = erlang:monitor(process, UpdatePid),
- try
- UpdatePid ! {update_docs, self(), DocBuckets, NonRepDocs, MergeConflicts, FullCommit},
- case collect_results(UpdatePid, MRef, []) of
- {ok, Results} -> {ok, Results};
- retry ->
- % This can happen if the db file we wrote to was swapped out by
- % compaction. Retry by reopening the db and writing to the current file
- {ok, Db2} = open_ref_counted(Db#db.main_pid, self()),
- DocBuckets2 = [[doc_flush_atts(Doc, Db2#db.fd) || Doc <- Bucket] || Bucket <- DocBuckets],
- % We only retry once
- close(Db2),
- UpdatePid ! {update_docs, self(), DocBuckets2, NonRepDocs, MergeConflicts, FullCommit},
- case collect_results(UpdatePid, MRef, []) of
- {ok, Results} -> {ok, Results};
- retry -> throw({update_error, compaction_retry})
- end
- end
- after
- erlang:demonitor(MRef, [flush])
- end.
-
-
-set_new_att_revpos(#doc{revs={RevPos,_Revs},atts=Atts}=Doc) ->
- Doc#doc{atts= lists:map(fun(#att{data={_Fd,_Sp}}=Att) ->
- % already commited to disk, do not set new rev
- Att;
- (Att) ->
- Att#att{revpos=RevPos+1}
- end, Atts)}.
-
-
-doc_flush_atts(Doc, Fd) ->
- Doc#doc{atts=[flush_att(Fd, Att) || Att <- Doc#doc.atts]}.
-
-check_md5(_NewSig, <<>>) -> ok;
-check_md5(Sig1, Sig2) when Sig1 == Sig2 -> ok;
-check_md5(_, _) -> throw(md5_mismatch).
-
-flush_att(Fd, #att{data={Fd0, _}}=Att) when Fd0 == Fd ->
- % already written to our file, nothing to write
- Att;
-
-flush_att(Fd, #att{data={OtherFd,StreamPointer}, md5=InMd5,
- disk_len=InDiskLen} = Att) ->
- {NewStreamData, Len, _IdentityLen, Md5, IdentityMd5} =
- couch_stream:copy_to_new_stream(OtherFd, StreamPointer, Fd),
- check_md5(IdentityMd5, InMd5),
- Att#att{data={Fd, NewStreamData}, md5=Md5, att_len=Len, disk_len=InDiskLen};
-
-flush_att(Fd, #att{data=Data}=Att) when is_binary(Data) ->
- with_stream(Fd, Att, fun(OutputStream) ->
- couch_stream:write(OutputStream, Data)
- end);
-
-flush_att(Fd, #att{data=Fun,att_len=undefined}=Att) when is_function(Fun) ->
- with_stream(Fd, Att, fun(OutputStream) ->
- % Fun(MaxChunkSize, WriterFun) must call WriterFun
- % once for each chunk of the attachment,
- Fun(4096,
- % WriterFun({Length, Binary}, State)
- % WriterFun({0, _Footers}, State)
- % Called with Length == 0 on the last time.
- % WriterFun returns NewState.
- fun({0, Footers}, _) ->
- F = mochiweb_headers:from_binary(Footers),
- case mochiweb_headers:get_value("Content-MD5", F) of
- undefined ->
- ok;
- Md5 ->
- {md5, base64:decode(Md5)}
- end;
- ({_Length, Chunk}, _) ->
- couch_stream:write(OutputStream, Chunk)
- end, ok)
- end);
-
-flush_att(Fd, #att{data=Fun,att_len=AttLen}=Att) when is_function(Fun) ->
- with_stream(Fd, Att, fun(OutputStream) ->
- write_streamed_attachment(OutputStream, Fun, AttLen)
- end).
-
-% From RFC 2616 3.6.1 - Chunked Transfer Coding
-%
-% In other words, the origin server is willing to accept
-% the possibility that the trailer fields might be silently
-% discarded along the path to the client.
-%
-% I take this to mean that if "Trailers: Content-MD5\r\n"
-% is present in the request, but there is no Content-MD5
-% trailer, we're free to ignore this inconsistency and
-% pretend that no Content-MD5 exists.
-with_stream(Fd, #att{md5=InMd5,type=Type,encoding=Enc}=Att, Fun) ->
- {ok, OutputStream} = case (Enc =:= identity) andalso
- couch_util:compressible_att_type(Type) of
- true ->
- CompLevel = list_to_integer(
- couch_config:get("attachments", "compression_level", "0")
- ),
- couch_stream:open(Fd, gzip, [{compression_level, CompLevel}]);
- _ ->
- couch_stream:open(Fd)
- end,
- ReqMd5 = case Fun(OutputStream) of
- {md5, FooterMd5} ->
- case InMd5 of
- md5_in_footer -> FooterMd5;
- _ -> InMd5
- end;
- _ ->
- InMd5
- end,
- {StreamInfo, Len, IdentityLen, Md5, IdentityMd5} =
- couch_stream:close(OutputStream),
- check_md5(IdentityMd5, ReqMd5),
- {AttLen, DiskLen, NewEnc} = case Enc of
- identity ->
- case {Md5, IdentityMd5} of
- {Same, Same} ->
- {Len, IdentityLen, identity};
- _ ->
- {Len, IdentityLen, gzip}
- end;
- gzip ->
- case {Att#att.att_len, Att#att.disk_len} of
- {AL, DL} when AL =:= undefined orelse DL =:= undefined ->
- % Compressed attachment uploaded through the standalone API.
- {Len, Len, gzip};
- {AL, DL} ->
- % This case is used for efficient push-replication, where a
- % compressed attachment is located in the body of multipart
- % content-type request.
- {AL, DL, gzip}
- end
- end,
- Att#att{
- data={Fd,StreamInfo},
- att_len=AttLen,
- disk_len=DiskLen,
- md5=Md5,
- encoding=NewEnc
- }.
-
-
-write_streamed_attachment(_Stream, _F, 0) ->
- ok;
-write_streamed_attachment(Stream, F, LenLeft) when LenLeft > 0 ->
- Bin = F(),
- ok = couch_stream:write(Stream, Bin),
- write_streamed_attachment(Stream, F, LenLeft - size(Bin)).
-
-enum_docs_since_reduce_to_count(Reds) ->
- couch_btree:final_reduce(
- fun couch_db_updater:btree_by_seq_reduce/2, Reds).
-
-enum_docs_reduce_to_count(Reds) ->
- {Count, _DelCount} = couch_btree:final_reduce(
- fun couch_db_updater:btree_by_id_reduce/2, Reds),
- Count.
-
-changes_since(Db, Style, StartSeq, Fun, Acc) ->
- changes_since(Db, Style, StartSeq, Fun, [], Acc).
-
-changes_since(Db, Style, StartSeq, Fun, Options, Acc) ->
- Wrapper = fun(DocInfo, _Offset, Acc2) ->
- #doc_info{revs=Revs} = DocInfo,
- DocInfo2 =
- case Style of
- main_only ->
- DocInfo;
- all_docs ->
- % remove revs before the seq
- DocInfo#doc_info{revs=[RevInfo ||
- #rev_info{seq=RevSeq}=RevInfo <- Revs, StartSeq < RevSeq]}
- end,
- Fun(DocInfo2, Acc2)
- end,
- {ok, _LastReduction, AccOut} = couch_btree:fold(Db#db.docinfo_by_seq_btree,
- Wrapper, Acc, [{start_key, StartSeq + 1}] ++ Options),
- {ok, AccOut}.
-
-count_changes_since(Db, SinceSeq) ->
- {ok, Changes} =
- couch_btree:fold_reduce(Db#db.docinfo_by_seq_btree,
- fun(_SeqStart, PartialReds, 0) ->
- {ok, couch_btree:final_reduce(Db#db.docinfo_by_seq_btree, PartialReds)}
- end,
- 0, [{start_key, SinceSeq + 1}]),
- Changes.
-
-enum_docs_since(Db, SinceSeq, InFun, Acc, Options) ->
- {ok, LastReduction, AccOut} = couch_btree:fold(Db#db.docinfo_by_seq_btree, InFun, Acc, [{start_key, SinceSeq + 1} | Options]),
- {ok, enum_docs_since_reduce_to_count(LastReduction), AccOut}.
-
-enum_docs(Db, InFun, InAcc, Options) ->
- {ok, LastReduce, OutAcc} = couch_btree:fold(Db#db.fulldocinfo_by_id_btree, InFun, InAcc, Options),
- {ok, enum_docs_reduce_to_count(LastReduce), OutAcc}.
-
-% server functions
-
-init({DbName, Filepath, Fd, Options}) ->
- {ok, UpdaterPid} = gen_server:start_link(couch_db_updater, {self(), DbName, Filepath, Fd, Options}, []),
- {ok, #db{fd_ref_counter=RefCntr}=Db} = gen_server:call(UpdaterPid, get_db),
- couch_ref_counter:add(RefCntr),
- case lists:member(sys_db, Options) of
- true ->
- ok;
- false ->
- couch_stats_collector:track_process_count({couchdb, open_databases})
- end,
- process_flag(trap_exit, true),
- {ok, Db}.
-
-terminate(_Reason, Db) ->
- couch_util:shutdown_sync(Db#db.update_pid),
- ok.
-
-handle_call({open_ref_count, OpenerPid}, _, #db{fd_ref_counter=RefCntr}=Db) ->
- ok = couch_ref_counter:add(RefCntr, OpenerPid),
- {reply, {ok, Db}, Db};
-handle_call(is_idle, _From, #db{fd_ref_counter=RefCntr, compactor_pid=Compact,
- waiting_delayed_commit=Delay}=Db) ->
- % Idle means no referrers. Unless in the middle of a compaction file switch,
- % there are always at least 2 referrers, couch_db_updater and us.
- {reply, (Delay == nil) andalso (Compact == nil) andalso (couch_ref_counter:count(RefCntr) == 2), Db};
-handle_call({db_updated, NewDb}, _From, #db{fd_ref_counter=OldRefCntr}) ->
- #db{fd_ref_counter=NewRefCntr}=NewDb,
- case NewRefCntr =:= OldRefCntr of
- true -> ok;
- false ->
- couch_ref_counter:add(NewRefCntr),
- couch_ref_counter:drop(OldRefCntr)
- end,
- {reply, ok, NewDb};
-handle_call(get_db, _From, Db) ->
- {reply, {ok, Db}, Db}.
-
-
-handle_cast(Msg, Db) ->
- ?LOG_ERROR("Bad cast message received for db ~s: ~p", [Db#db.name, Msg]),
- exit({error, Msg}).
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-handle_info({'EXIT', _Pid, normal}, Db) ->
- {noreply, Db};
-handle_info({'EXIT', _Pid, Reason}, Server) ->
- {stop, Reason, Server};
-handle_info(Msg, Db) ->
- ?LOG_ERROR("Bad message received for db ~s: ~p", [Db#db.name, Msg]),
- exit({error, Msg}).
-
-
-%%% Internal function %%%
-open_doc_revs_int(Db, IdRevs, Options) ->
- Ids = [Id || {Id, _Revs} <- IdRevs],
- LookupResults = get_full_doc_infos(Db, Ids),
- lists:zipwith(
- fun({Id, Revs}, Lookup) ->
- case Lookup of
- {ok, #full_doc_info{rev_tree=RevTree}} ->
- {FoundRevs, MissingRevs} =
- case Revs of
- all ->
- {couch_key_tree:get_all_leafs(RevTree), []};
- _ ->
- case lists:member(latest, Options) of
- true ->
- couch_key_tree:get_key_leafs(RevTree, Revs);
- false ->
- couch_key_tree:get(RevTree, Revs)
- end
- end,
- FoundResults =
- lists:map(fun({Value, {Pos, [Rev|_]}=FoundRevPath}) ->
- case Value of
- ?REV_MISSING ->
- % we have the rev in our list but know nothing about it
- {{not_found, missing}, {Pos, Rev}};
- {IsDeleted, SummaryPtr, _UpdateSeq} ->
- {ok, make_doc(Db, Id, IsDeleted, SummaryPtr, FoundRevPath)}
- end
- end, FoundRevs),
- Results = FoundResults ++ [{{not_found, missing}, MissingRev} || MissingRev <- MissingRevs],
- {ok, Results};
- not_found when Revs == all ->
- {ok, []};
- not_found ->
- {ok, [{{not_found, missing}, Rev} || Rev <- Revs]}
- end
- end,
- IdRevs, LookupResults).
-
-open_doc_int(Db, <<?LOCAL_DOC_PREFIX, _/binary>> = Id, _Options) ->
- case couch_btree:lookup(Db#db.local_docs_btree, [Id]) of
- [{ok, {_, {Rev, BodyData}}}] ->
- {ok, #doc{id=Id, revs={0, [list_to_binary(integer_to_list(Rev))]}, body=BodyData}};
- [not_found] ->
- {not_found, missing}
- end;
-open_doc_int(Db, #doc_info{id=Id,revs=[RevInfo|_]}=DocInfo, Options) ->
- #rev_info{deleted=IsDeleted,rev={Pos,RevId},body_sp=Bp} = RevInfo,
- Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos,[RevId]}),
- {ok, Doc#doc{meta=doc_meta_info(DocInfo, [], Options)}};
-open_doc_int(Db, #full_doc_info{id=Id,rev_tree=RevTree}=FullDocInfo, Options) ->
- #doc_info{revs=[#rev_info{deleted=IsDeleted,rev=Rev,body_sp=Bp}|_]} =
- DocInfo = couch_doc:to_doc_info(FullDocInfo),
- {[{_, RevPath}], []} = couch_key_tree:get(RevTree, [Rev]),
- Doc = make_doc(Db, Id, IsDeleted, Bp, RevPath),
- {ok, Doc#doc{meta=doc_meta_info(DocInfo, RevTree, Options)}};
-open_doc_int(Db, Id, Options) ->
- case get_full_doc_info(Db, Id) of
- {ok, FullDocInfo} ->
- open_doc_int(Db, FullDocInfo, Options);
- not_found ->
- {not_found, missing}
- end.
-
-doc_meta_info(#doc_info{high_seq=Seq,revs=[#rev_info{rev=Rev}|RestInfo]}, RevTree, Options) ->
- case lists:member(revs_info, Options) of
- false -> [];
- true ->
- {[{Pos, RevPath}],[]} =
- couch_key_tree:get_full_key_paths(RevTree, [Rev]),
-
- [{revs_info, Pos, lists:map(
- fun({Rev1, {true, _Sp, _UpdateSeq}}) ->
- {Rev1, deleted};
- ({Rev1, {false, _Sp, _UpdateSeq}}) ->
- {Rev1, available};
- ({Rev1, ?REV_MISSING}) ->
- {Rev1, missing}
- end, RevPath)}]
- end ++
- case lists:member(conflicts, Options) of
- false -> [];
- true ->
- case [Rev1 || #rev_info{rev=Rev1,deleted=false} <- RestInfo] of
- [] -> [];
- ConflictRevs -> [{conflicts, ConflictRevs}]
- end
- end ++
- case lists:member(deleted_conflicts, Options) of
- false -> [];
- true ->
- case [Rev1 || #rev_info{rev=Rev1,deleted=true} <- RestInfo] of
- [] -> [];
- DelConflictRevs -> [{deleted_conflicts, DelConflictRevs}]
- end
- end ++
- case lists:member(local_seq, Options) of
- false -> [];
- true -> [{local_seq, Seq}]
- end.
-
-read_doc(#db{fd=Fd}, OldStreamPointer) when is_tuple(OldStreamPointer) ->
- % 09 UPGRADE CODE
- couch_stream:old_read_term(Fd, OldStreamPointer);
-read_doc(#db{fd=Fd}, Pos) ->
- couch_file:pread_term(Fd, Pos).
-
-
-doc_to_tree(#doc{revs={Start, RevIds}}=Doc) ->
- [Tree] = doc_to_tree_simple(Doc, lists:reverse(RevIds)),
- {Start - length(RevIds) + 1, Tree}.
-
-
-doc_to_tree_simple(Doc, [RevId]) ->
- [{RevId, Doc, []}];
-doc_to_tree_simple(Doc, [RevId | Rest]) ->
- [{RevId, ?REV_MISSING, doc_to_tree_simple(Doc, Rest)}].
-
-
-make_doc(#db{fd=Fd}=Db, Id, Deleted, Bp, RevisionPath) ->
- {BodyData, Atts} =
- case Bp of
- nil ->
- {[], []};
- _ ->
- {ok, {BodyData0, Atts0}} = read_doc(Db, Bp),
- {BodyData0,
- lists:map(
- fun({Name,Type,Sp,AttLen,DiskLen,RevPos,Md5,Enc}) ->
- #att{name=Name,
- type=Type,
- att_len=AttLen,
- disk_len=DiskLen,
- md5=Md5,
- revpos=RevPos,
- data={Fd,Sp},
- encoding=
- case Enc of
- true ->
- % 0110 UPGRADE CODE
- gzip;
- false ->
- % 0110 UPGRADE CODE
- identity;
- _ ->
- Enc
- end
- };
- ({Name,Type,Sp,AttLen,RevPos,Md5}) ->
- #att{name=Name,
- type=Type,
- att_len=AttLen,
- disk_len=AttLen,
- md5=Md5,
- revpos=RevPos,
- data={Fd,Sp}};
- ({Name,{Type,Sp,AttLen}}) ->
- #att{name=Name,
- type=Type,
- att_len=AttLen,
- disk_len=AttLen,
- md5= <<>>,
- revpos=0,
- data={Fd,Sp}}
- end, Atts0)}
- end,
- #doc{
- id = Id,
- revs = RevisionPath,
- body = BodyData,
- atts = Atts,
- deleted = Deleted
- }.
-
-
-increment_stat(#db{is_sys_db = true}, _Stat) ->
- ok;
-increment_stat(#db{}, Stat) ->
- couch_stats_collector:increment(Stat).
diff --git a/1.1.x/src/couchdb/couch_db.hrl b/1.1.x/src/couchdb/couch_db.hrl
deleted file mode 100644
index 003cb688..00000000
--- a/1.1.x/src/couchdb/couch_db.hrl
+++ /dev/null
@@ -1,278 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--define(LOCAL_DOC_PREFIX, "_local/").
--define(DESIGN_DOC_PREFIX0, "_design").
--define(DESIGN_DOC_PREFIX, "_design/").
-
--define(MIN_STR, <<"">>).
--define(MAX_STR, <<255>>). % illegal utf string
-
--define(JSON_ENCODE(V), couch_util:json_encode(V)).
--define(JSON_DECODE(V), couch_util:json_decode(V)).
-
--define(b2l(V), binary_to_list(V)).
--define(l2b(V), list_to_binary(V)).
-
--define(DEFAULT_ATTACHMENT_CONTENT_TYPE, <<"application/octet-stream">>).
-
--define(LOG_DEBUG(Format, Args), couch_log:debug(Format, Args)).
--define(LOG_INFO(Format, Args), couch_log:info(Format, Args)).
--define(LOG_ERROR(Format, Args), couch_log:error(Format, Args)).
-
--record(rev_info,
- {
- rev,
- seq = 0,
- deleted = false,
- body_sp = nil % stream pointer
- }).
-
--record(doc_info,
- {
- id = <<"">>,
- high_seq = 0,
- revs = [] % rev_info
- }).
-
--record(full_doc_info,
- {id = <<"">>,
- update_seq = 0,
- deleted = false,
- rev_tree = []
- }).
-
--record(httpd,
- {mochi_req,
- peer,
- method,
- requested_path_parts,
- path_parts,
- db_url_handlers,
- user_ctx,
- req_body = undefined,
- design_url_handlers,
- auth,
- default_fun,
- url_handlers
- }).
-
-
--record(doc,
- {
- id = <<"">>,
- revs = {0, []},
-
- % the json body object.
- body = {[]},
-
- atts = [], % attachments
-
- deleted = false,
-
- % key/value tuple of meta information, provided when using special options:
- % couch_db:open_doc(Db, Id, Options).
- meta = []
- }).
-
-
--record(att,
- {
- name,
- type,
- att_len,
- disk_len, % length of the attachment in its identity form
- % (that is, without a content encoding applied to it)
- % differs from att_len when encoding /= identity
- md5= <<>>,
- revpos=0,
- data,
- encoding=identity % currently supported values are:
- % identity, gzip
- % additional values to support in the future:
- % deflate, compress
- }).
-
-
--record(user_ctx,
- {
- name=null,
- roles=[],
- handler
- }).
-
-% This should be updated anytime a header change happens that requires more
-% than filling in new defaults.
-%
-% As long the changes are limited to new header fields (with inline
-% defaults) added to the end of the record, then there is no need to increment
-% the disk revision number.
-%
-% if the disk revision is incremented, then new upgrade logic will need to be
-% added to couch_db_updater:init_db.
-
--define(LATEST_DISK_VERSION, 5).
-
--record(db_header,
- {disk_version = ?LATEST_DISK_VERSION,
- update_seq = 0,
- unused = 0,
- fulldocinfo_by_id_btree_state = nil,
- docinfo_by_seq_btree_state = nil,
- local_docs_btree_state = nil,
- purge_seq = 0,
- purged_docs = nil,
- security_ptr = nil,
- revs_limit = 1000
- }).
-
--record(db,
- {main_pid = nil,
- update_pid = nil,
- compactor_pid = nil,
- instance_start_time, % number of microsecs since jan 1 1970 as a binary string
- fd,
- fd_ref_counter,
- header = #db_header{},
- committed_update_seq,
- fulldocinfo_by_id_btree,
- docinfo_by_seq_btree,
- local_docs_btree,
- update_seq,
- name,
- filepath,
- validate_doc_funs = [],
- security = [],
- security_ptr = nil,
- user_ctx = #user_ctx{},
- waiting_delayed_commit = nil,
- revs_limit = 1000,
- fsync_options = [],
- is_sys_db = false
- }).
-
-
--record(view_query_args, {
- start_key,
- end_key,
- start_docid = ?MIN_STR,
- end_docid = ?MAX_STR,
-
- direction = fwd,
- inclusive_end=true, % aka a closed-interval
-
- limit = 10000000000, % Huge number to simplify logic
- skip = 0,
-
- group_level = 0,
-
- view_type = nil,
- include_docs = false,
- conflicts = false,
- stale = false,
- multi_get = false,
- callback = nil,
- list = nil
-}).
-
--record(view_fold_helper_funs, {
- reduce_count,
- passed_end,
- start_response,
- send_row
-}).
-
--record(reduce_fold_helper_funs, {
- start_response,
- send_row
-}).
-
--record(extern_resp_args, {
- code = 200,
- stop = false,
- data = <<>>,
- ctype = "application/json",
- headers = []
-}).
-
--record(group, {
- sig=nil,
- db=nil,
- fd=nil,
- name,
- def_lang,
- design_options=[],
- views,
- lib,
- id_btree=nil,
- current_seq=0,
- purge_seq=0,
- query_server=nil,
- waiting_delayed_commit=nil
- }).
-
--record(view,
- {id_num,
- update_seq=0,
- purge_seq=0,
- map_names=[],
- def,
- btree=nil,
- reduce_funs=[],
- options=[]
- }).
-
--record(index_header,
- {seq=0,
- purge_seq=0,
- id_btree_state=nil,
- view_states=nil
- }).
-
--record(http_db, {
- url,
- auth = [],
- resource = "",
- headers = [
- {"User-Agent", "CouchDB/"++couch_server:get_version()},
- {"Accept", "application/json"},
- {"Accept-Encoding", "gzip"}
- ],
- qs = [],
- method = get,
- body = nil,
- options = [
- {response_format,binary},
- {inactivity_timeout, 30000}
- ],
- retries = 10,
- pause = 500,
- conn = nil
-}).
-
-% small value used in revision trees to indicate the revision isn't stored
--define(REV_MISSING, []).
-
--record(changes_args, {
- feed = "normal",
- dir = fwd,
- since = 0,
- limit = 1000000000000000,
- style = main_only,
- heartbeat,
- timeout,
- filter = "",
- include_docs = false,
- conflicts = false,
- db_open_options = []
-}).
-
diff --git a/1.1.x/src/couchdb/couch_db_update_notifier.erl b/1.1.x/src/couchdb/couch_db_update_notifier.erl
deleted file mode 100644
index 150eb31b..00000000
--- a/1.1.x/src/couchdb/couch_db_update_notifier.erl
+++ /dev/null
@@ -1,73 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%
-% This causes an OS process to spawned and it is notified every time a database
-% is updated.
-%
-% The notifications are in the form of a the database name sent as a line of
-% text to the OS processes stdout.
-%
-
--module(couch_db_update_notifier).
-
--behaviour(gen_event).
-
--export([start_link/1, notify/1]).
--export([init/1, terminate/2, handle_event/2, handle_call/2, handle_info/2, code_change/3,stop/1]).
-
--include("couch_db.hrl").
-
-start_link(Exec) ->
- couch_event_sup:start_link(couch_db_update, {couch_db_update_notifier, make_ref()}, Exec).
-
-notify(Event) ->
- gen_event:notify(couch_db_update, Event).
-
-stop(Pid) ->
- couch_event_sup:stop(Pid).
-
-init(Exec) when is_list(Exec) -> % an exe
- couch_os_process:start_link(Exec, []);
-init(Else) ->
- {ok, Else}.
-
-terminate(_Reason, Pid) when is_pid(Pid) ->
- couch_os_process:stop(Pid),
- ok;
-terminate(_Reason, _State) ->
- ok.
-
-handle_event(Event, Fun) when is_function(Fun, 1) ->
- Fun(Event),
- {ok, Fun};
-handle_event(Event, {Fun, FunAcc}) ->
- FunAcc2 = Fun(Event, FunAcc),
- {ok, {Fun, FunAcc2}};
-handle_event({EventAtom, DbName}, Pid) ->
- Obj = {[{type, list_to_binary(atom_to_list(EventAtom))}, {db, DbName}]},
- ok = couch_os_process:send(Pid, Obj),
- {ok, Pid}.
-
-handle_call(_Request, State) ->
- {reply, ok, State}.
-
-handle_info({'EXIT', Pid, Reason}, Pid) ->
- ?LOG_ERROR("Update notification process ~p died: ~p", [Pid, Reason]),
- remove_handler;
-handle_info({'EXIT', _, _}, Pid) ->
- %% the db_update event manager traps exits and forwards this message to all
- %% its handlers. Just ignore as it wasn't our os_process that exited.
- {ok, Pid}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
diff --git a/1.1.x/src/couchdb/couch_db_update_notifier_sup.erl b/1.1.x/src/couchdb/couch_db_update_notifier_sup.erl
deleted file mode 100644
index 4d730fc7..00000000
--- a/1.1.x/src/couchdb/couch_db_update_notifier_sup.erl
+++ /dev/null
@@ -1,63 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%
-% This causes an OS process to spawned and it is notified every time a database
-% is updated.
-%
-% The notifications are in the form of a the database name sent as a line of
-% text to the OS processes stdout.
-%
-
--module(couch_db_update_notifier_sup).
-
--behaviour(supervisor).
-
--export([start_link/0,init/1]).
-
-start_link() ->
- supervisor:start_link({local, couch_db_update_notifier_sup},
- couch_db_update_notifier_sup, []).
-
-init([]) ->
- ok = couch_config:register(
- fun("update_notification", Key, Value) -> reload_config(Key, Value) end
- ),
-
- UpdateNotifierExes = couch_config:get("update_notification"),
-
- {ok,
- {{one_for_one, 10, 3600},
- lists:map(fun({Name, UpdateNotifierExe}) ->
- {Name,
- {couch_db_update_notifier, start_link, [UpdateNotifierExe]},
- permanent,
- 1000,
- supervisor,
- [couch_db_update_notifier]}
- end, UpdateNotifierExes)}}.
-
-%% @doc when update_notification configuration changes, terminate the process
-%% for that notifier and start a new one with the updated config
-reload_config(Id, Exe) ->
- ChildSpec = {
- Id,
- {couch_db_update_notifier, start_link, [Exe]},
- permanent,
- 1000,
- supervisor,
- [couch_db_update_notifier]
- },
- supervisor:terminate_child(couch_db_update_notifier_sup, Id),
- supervisor:delete_child(couch_db_update_notifier_sup, Id),
- supervisor:start_child(couch_db_update_notifier_sup, ChildSpec).
-
diff --git a/1.1.x/src/couchdb/couch_db_updater.erl b/1.1.x/src/couchdb/couch_db_updater.erl
deleted file mode 100644
index 2b317d95..00000000
--- a/1.1.x/src/couchdb/couch_db_updater.erl
+++ /dev/null
@@ -1,896 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_db_updater).
--behaviour(gen_server).
-
--export([btree_by_id_reduce/2,btree_by_seq_reduce/2]).
--export([init/1,terminate/2,handle_call/3,handle_cast/2,code_change/3,handle_info/2]).
-
--include("couch_db.hrl").
-
-
-init({MainPid, DbName, Filepath, Fd, Options}) ->
- process_flag(trap_exit, true),
- case lists:member(create, Options) of
- true ->
- % create a new header and writes it to the file
- Header = #db_header{},
- ok = couch_file:write_header(Fd, Header),
- % delete any old compaction files that might be hanging around
- RootDir = couch_config:get("couchdb", "database_dir", "."),
- couch_file:delete(RootDir, Filepath ++ ".compact");
- false ->
- ok = couch_file:upgrade_old_header(Fd, <<$g, $m, $k, 0>>), % 09 UPGRADE CODE
- case couch_file:read_header(Fd) of
- {ok, Header} ->
- ok;
- no_valid_header ->
- % create a new header and writes it to the file
- Header = #db_header{},
- ok = couch_file:write_header(Fd, Header),
- % delete any old compaction files that might be hanging around
- file:delete(Filepath ++ ".compact")
- end
- end,
-
- Db = init_db(DbName, Filepath, Fd, Header),
- Db2 = refresh_validate_doc_funs(Db),
- {ok, Db2#db{main_pid = MainPid, is_sys_db = lists:member(sys_db, Options)}}.
-
-
-terminate(_Reason, Db) ->
- couch_file:close(Db#db.fd),
- couch_util:shutdown_sync(Db#db.compactor_pid),
- couch_util:shutdown_sync(Db#db.fd_ref_counter),
- ok.
-
-handle_call(get_db, _From, Db) ->
- {reply, {ok, Db}, Db};
-handle_call(full_commit, _From, #db{waiting_delayed_commit=nil}=Db) ->
- {reply, ok, Db}; % no data waiting, return ok immediately
-handle_call(full_commit, _From, Db) ->
- {reply, ok, commit_data(Db)}; % commit the data and return ok
-handle_call(increment_update_seq, _From, Db) ->
- Db2 = commit_data(Db#db{update_seq=Db#db.update_seq+1}),
- ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
- couch_db_update_notifier:notify({updated, Db#db.name}),
- {reply, {ok, Db2#db.update_seq}, Db2};
-
-handle_call({set_security, NewSec}, _From, Db) ->
- {ok, Ptr} = couch_file:append_term(Db#db.fd, NewSec),
- Db2 = commit_data(Db#db{security=NewSec, security_ptr=Ptr,
- update_seq=Db#db.update_seq+1}),
- ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
- {reply, ok, Db2};
-
-handle_call({set_revs_limit, Limit}, _From, Db) ->
- Db2 = commit_data(Db#db{revs_limit=Limit,
- update_seq=Db#db.update_seq+1}),
- ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
- {reply, ok, Db2};
-
-handle_call({purge_docs, _IdRevs}, _From,
- #db{compactor_pid=Pid}=Db) when Pid /= nil ->
- {reply, {error, purge_during_compaction}, Db};
-handle_call({purge_docs, IdRevs}, _From, Db) ->
- #db{
- fd=Fd,
- fulldocinfo_by_id_btree = DocInfoByIdBTree,
- docinfo_by_seq_btree = DocInfoBySeqBTree,
- update_seq = LastSeq,
- header = Header = #db_header{purge_seq=PurgeSeq}
- } = Db,
- DocLookups = couch_btree:lookup(DocInfoByIdBTree,
- [Id || {Id, _Revs} <- IdRevs]),
-
- NewDocInfos = lists:zipwith(
- fun({_Id, Revs}, {ok, #full_doc_info{rev_tree=Tree}=FullDocInfo}) ->
- case couch_key_tree:remove_leafs(Tree, Revs) of
- {_, []=_RemovedRevs} -> % no change
- nil;
- {NewTree, RemovedRevs} ->
- {FullDocInfo#full_doc_info{rev_tree=NewTree},RemovedRevs}
- end;
- (_, not_found) ->
- nil
- end,
- IdRevs, DocLookups),
-
- SeqsToRemove = [Seq
- || {#full_doc_info{update_seq=Seq},_} <- NewDocInfos],
-
- FullDocInfoToUpdate = [FullInfo
- || {#full_doc_info{rev_tree=Tree}=FullInfo,_}
- <- NewDocInfos, Tree /= []],
-
- IdRevsPurged = [{Id, Revs}
- || {#full_doc_info{id=Id}, Revs} <- NewDocInfos],
-
- {DocInfoToUpdate, NewSeq} = lists:mapfoldl(
- fun(#full_doc_info{rev_tree=Tree}=FullInfo, SeqAcc) ->
- Tree2 = couch_key_tree:map_leafs(
- fun(_RevId, {IsDeleted, BodyPointer, _UpdateSeq}) ->
- {IsDeleted, BodyPointer, SeqAcc + 1}
- end, Tree),
- {couch_doc:to_doc_info(FullInfo#full_doc_info{rev_tree=Tree2}),
- SeqAcc + 1}
- end, LastSeq, FullDocInfoToUpdate),
-
- IdsToRemove = [Id || {#full_doc_info{id=Id,rev_tree=[]},_}
- <- NewDocInfos],
-
- {ok, DocInfoBySeqBTree2} = couch_btree:add_remove(DocInfoBySeqBTree,
- DocInfoToUpdate, SeqsToRemove),
- {ok, DocInfoByIdBTree2} = couch_btree:add_remove(DocInfoByIdBTree,
- FullDocInfoToUpdate, IdsToRemove),
- {ok, Pointer} = couch_file:append_term(Fd, IdRevsPurged),
-
- Db2 = commit_data(
- Db#db{
- fulldocinfo_by_id_btree = DocInfoByIdBTree2,
- docinfo_by_seq_btree = DocInfoBySeqBTree2,
- update_seq = NewSeq + 1,
- header=Header#db_header{purge_seq=PurgeSeq+1, purged_docs=Pointer}}),
-
- ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
- couch_db_update_notifier:notify({updated, Db#db.name}),
- {reply, {ok, (Db2#db.header)#db_header.purge_seq, IdRevsPurged}, Db2};
-handle_call(start_compact, _From, Db) ->
- case Db#db.compactor_pid of
- nil ->
- ?LOG_INFO("Starting compaction for db \"~s\"", [Db#db.name]),
- Pid = spawn_link(fun() -> start_copy_compact(Db) end),
- Db2 = Db#db{compactor_pid=Pid},
- ok = gen_server:call(Db#db.main_pid, {db_updated, Db2}),
- {reply, ok, Db2};
- _ ->
- % compact currently running, this is a no-op
- {reply, ok, Db}
- end.
-
-
-
-handle_cast({compact_done, CompactFilepath}, #db{filepath=Filepath}=Db) ->
- {ok, NewFd} = couch_file:open(CompactFilepath),
- {ok, NewHeader} = couch_file:read_header(NewFd),
- #db{update_seq=NewSeq} = NewDb =
- init_db(Db#db.name, Filepath, NewFd, NewHeader),
- unlink(NewFd),
- case Db#db.update_seq == NewSeq of
- true ->
- % suck up all the local docs into memory and write them to the new db
- {ok, _, LocalDocs} = couch_btree:foldl(Db#db.local_docs_btree,
- fun(Value, _Offset, Acc) -> {ok, [Value | Acc]} end, []),
- {ok, NewLocalBtree} = couch_btree:add(NewDb#db.local_docs_btree, LocalDocs),
-
- NewDb2 = commit_data(NewDb#db{
- local_docs_btree = NewLocalBtree,
- main_pid = Db#db.main_pid,
- filepath = Filepath,
- instance_start_time = Db#db.instance_start_time,
- revs_limit = Db#db.revs_limit
- }),
-
- ?LOG_DEBUG("CouchDB swapping files ~s and ~s.",
- [Filepath, CompactFilepath]),
- RootDir = couch_config:get("couchdb", "database_dir", "."),
- couch_file:delete(RootDir, Filepath),
- ok = file:rename(CompactFilepath, Filepath),
- close_db(Db),
- NewDb3 = refresh_validate_doc_funs(NewDb2),
- ok = gen_server:call(Db#db.main_pid, {db_updated, NewDb3}, infinity),
- couch_db_update_notifier:notify({compacted, NewDb3#db.name}),
- ?LOG_INFO("Compaction for db \"~s\" completed.", [Db#db.name]),
- {noreply, NewDb3#db{compactor_pid=nil}};
- false ->
- ?LOG_INFO("Compaction file still behind main file "
- "(update seq=~p. compact update seq=~p). Retrying.",
- [Db#db.update_seq, NewSeq]),
- close_db(NewDb),
- Pid = spawn_link(fun() -> start_copy_compact(Db) end),
- Db2 = Db#db{compactor_pid=Pid},
- {noreply, Db2}
- end.
-
-
-handle_info({update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts,
- FullCommit}, Db) ->
- GroupedDocs2 = [[{Client, D} || D <- DocGroup] || DocGroup <- GroupedDocs],
- if NonRepDocs == [] ->
- {GroupedDocs3, Clients, FullCommit2} = collect_updates(GroupedDocs2,
- [Client], MergeConflicts, FullCommit);
- true ->
- GroupedDocs3 = GroupedDocs2,
- FullCommit2 = FullCommit,
- Clients = [Client]
- end,
- NonRepDocs2 = [{Client, NRDoc} || NRDoc <- NonRepDocs],
- try update_docs_int(Db, GroupedDocs3, NonRepDocs2, MergeConflicts,
- FullCommit2) of
- {ok, Db2} ->
- ok = gen_server:call(Db#db.main_pid, {db_updated, Db2}),
- if Db2#db.update_seq /= Db#db.update_seq ->
- couch_db_update_notifier:notify({updated, Db2#db.name});
- true -> ok
- end,
- [catch(ClientPid ! {done, self()}) || ClientPid <- Clients],
- {noreply, Db2}
- catch
- throw: retry ->
- [catch(ClientPid ! {retry, self()}) || ClientPid <- Clients],
- {noreply, Db}
- end;
-handle_info(delayed_commit, #db{waiting_delayed_commit=nil}=Db) ->
- %no outstanding delayed commits, ignore
- {noreply, Db};
-handle_info(delayed_commit, Db) ->
- case commit_data(Db) of
- Db ->
- {noreply, Db};
- Db2 ->
- ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
- {noreply, Db2}
- end;
-handle_info({'EXIT', _Pid, normal}, Db) ->
- {noreply, Db};
-handle_info({'EXIT', _Pid, Reason}, Db) ->
- {stop, Reason, Db}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
-merge_updates([], RestB, AccOutGroups) ->
- lists:reverse(AccOutGroups, RestB);
-merge_updates(RestA, [], AccOutGroups) ->
- lists:reverse(AccOutGroups, RestA);
-merge_updates([[{_, #doc{id=IdA}}|_]=GroupA | RestA],
- [[{_, #doc{id=IdB}}|_]=GroupB | RestB], AccOutGroups) ->
- if IdA == IdB ->
- merge_updates(RestA, RestB, [GroupA ++ GroupB | AccOutGroups]);
- IdA < IdB ->
- merge_updates(RestA, [GroupB | RestB], [GroupA | AccOutGroups]);
- true ->
- merge_updates([GroupA | RestA], RestB, [GroupB | AccOutGroups])
- end.
-
-collect_updates(GroupedDocsAcc, ClientsAcc, MergeConflicts, FullCommit) ->
- receive
- % Only collect updates with the same MergeConflicts flag and without
- % local docs. It's easier to just avoid multiple _local doc
- % updaters than deal with their possible conflicts, and local docs
- % writes are relatively rare. Can be optmized later if really needed.
- {update_docs, Client, GroupedDocs, [], MergeConflicts, FullCommit2} ->
- GroupedDocs2 = [[{Client, Doc} || Doc <- DocGroup]
- || DocGroup <- GroupedDocs],
- GroupedDocsAcc2 =
- merge_updates(GroupedDocsAcc, GroupedDocs2, []),
- collect_updates(GroupedDocsAcc2, [Client | ClientsAcc],
- MergeConflicts, (FullCommit or FullCommit2))
- after 0 ->
- {GroupedDocsAcc, ClientsAcc, FullCommit}
- end.
-
-
-btree_by_seq_split(#doc_info{id=Id, high_seq=KeySeq, revs=Revs}) ->
- {RevInfos, DeletedRevInfos} = lists:foldl(
- fun(#rev_info{deleted = false, seq = Seq} = Ri, {Acc, AccDel}) ->
- {[{Ri#rev_info.rev, Seq, Ri#rev_info.body_sp} | Acc], AccDel};
- (#rev_info{deleted = true, seq = Seq} = Ri, {Acc, AccDel}) ->
- {Acc, [{Ri#rev_info.rev, Seq, Ri#rev_info.body_sp} | AccDel]}
- end,
- {[], []}, Revs),
- {KeySeq, {Id, lists:reverse(RevInfos), lists:reverse(DeletedRevInfos)}}.
-
-btree_by_seq_join(KeySeq, {Id, RevInfos, DeletedRevInfos}) ->
- #doc_info{
- id = Id,
- high_seq=KeySeq,
- revs =
- [#rev_info{rev=Rev,seq=Seq,deleted=false,body_sp = Bp} ||
- {Rev, Seq, Bp} <- RevInfos] ++
- [#rev_info{rev=Rev,seq=Seq,deleted=true,body_sp = Bp} ||
- {Rev, Seq, Bp} <- DeletedRevInfos]};
-btree_by_seq_join(KeySeq,{Id, Rev, Bp, Conflicts, DelConflicts, Deleted}) ->
- % 09 UPGRADE CODE
- % this is the 0.9.0 and earlier by_seq record. It's missing the body pointers
- % and individual seq nums for conflicts that are currently in the index,
- % meaning the filtered _changes api will not work except for on main docs.
- % Simply compact a 0.9.0 database to upgrade the index.
- #doc_info{
- id=Id,
- high_seq=KeySeq,
- revs = [#rev_info{rev=Rev,seq=KeySeq,deleted=Deleted,body_sp=Bp}] ++
- [#rev_info{rev=Rev1,seq=KeySeq,deleted=false} || Rev1 <- Conflicts] ++
- [#rev_info{rev=Rev2,seq=KeySeq,deleted=true} || Rev2 <- DelConflicts]}.
-
-btree_by_id_split(#full_doc_info{id=Id, update_seq=Seq,
- deleted=Deleted, rev_tree=Tree}) ->
- DiskTree =
- couch_key_tree:map(
- fun(_RevId, {IsDeleted, BodyPointer, UpdateSeq}) ->
- {if IsDeleted -> 1; true -> 0 end, BodyPointer, UpdateSeq};
- (_RevId, ?REV_MISSING) ->
- ?REV_MISSING
- end, Tree),
- {Id, {Seq, if Deleted -> 1; true -> 0 end, DiskTree}}.
-
-btree_by_id_join(Id, {HighSeq, Deleted, DiskTree}) ->
- Tree =
- couch_key_tree:map(
- fun(_RevId, {IsDeleted, BodyPointer, UpdateSeq}) ->
- {IsDeleted == 1, BodyPointer, UpdateSeq};
- (_RevId, ?REV_MISSING) ->
- ?REV_MISSING;
- (_RevId, {IsDeleted, BodyPointer}) ->
- % 09 UPGRADE CODE
- % this is the 0.9.0 and earlier rev info record. It's missing the seq
- % nums, which means couchdb will sometimes reexamine unchanged
- % documents with the _changes API.
- % This is fixed by compacting the database.
- {IsDeleted == 1, BodyPointer, HighSeq}
- end, DiskTree),
-
- #full_doc_info{id=Id, update_seq=HighSeq, deleted=Deleted==1, rev_tree=Tree}.
-
-btree_by_id_reduce(reduce, FullDocInfos) ->
- lists:foldl(
- fun(#full_doc_info{deleted = false}, {NotDeleted, Deleted}) ->
- {NotDeleted + 1, Deleted};
- (#full_doc_info{deleted = true}, {NotDeleted, Deleted}) ->
- {NotDeleted, Deleted + 1}
- end,
- {0, 0}, FullDocInfos);
-btree_by_id_reduce(rereduce, [FirstRed | RestReds]) ->
- lists:foldl(
- fun({NotDeleted, Deleted}, {AccNotDeleted, AccDeleted}) ->
- {AccNotDeleted + NotDeleted, AccDeleted + Deleted}
- end,
- FirstRed, RestReds).
-
-btree_by_seq_reduce(reduce, DocInfos) ->
- % count the number of documents
- length(DocInfos);
-btree_by_seq_reduce(rereduce, Reds) ->
- lists:sum(Reds).
-
-simple_upgrade_record(Old, New) when tuple_size(Old) =:= tuple_size(New) ->
- Old;
-simple_upgrade_record(Old, New) when tuple_size(Old) < tuple_size(New) ->
- OldSz = tuple_size(Old),
- NewValuesTail =
- lists:sublist(tuple_to_list(New), OldSz + 1, tuple_size(New) - OldSz),
- list_to_tuple(tuple_to_list(Old) ++ NewValuesTail).
-
-
-init_db(DbName, Filepath, Fd, Header0) ->
- Header1 = simple_upgrade_record(Header0, #db_header{}),
- Header =
- case element(2, Header1) of
- 1 -> Header1#db_header{unused = 0, security_ptr = nil}; % 0.9
- 2 -> Header1#db_header{unused = 0, security_ptr = nil}; % post 0.9 and pre 0.10
- 3 -> Header1#db_header{security_ptr = nil}; % post 0.9 and pre 0.10
- 4 -> Header1#db_header{security_ptr = nil}; % 0.10 and pre 0.11
- ?LATEST_DISK_VERSION -> Header1;
- _ -> throw({database_disk_version_error, "Incorrect disk header version"})
- end,
-
- {ok, FsyncOptions} = couch_util:parse_term(
- couch_config:get("couchdb", "fsync_options",
- "[before_header, after_header, on_file_open]")),
-
- case lists:member(on_file_open, FsyncOptions) of
- true -> ok = couch_file:sync(Fd);
- _ -> ok
- end,
-
- {ok, IdBtree} = couch_btree:open(Header#db_header.fulldocinfo_by_id_btree_state, Fd,
- [{split, fun(X) -> btree_by_id_split(X) end},
- {join, fun(X,Y) -> btree_by_id_join(X,Y) end},
- {reduce, fun(X,Y) -> btree_by_id_reduce(X,Y) end}]),
- {ok, SeqBtree} = couch_btree:open(Header#db_header.docinfo_by_seq_btree_state, Fd,
- [{split, fun(X) -> btree_by_seq_split(X) end},
- {join, fun(X,Y) -> btree_by_seq_join(X,Y) end},
- {reduce, fun(X,Y) -> btree_by_seq_reduce(X,Y) end}]),
- {ok, LocalDocsBtree} = couch_btree:open(Header#db_header.local_docs_btree_state, Fd),
- case Header#db_header.security_ptr of
- nil ->
- Security = [],
- SecurityPtr = nil;
- SecurityPtr ->
- {ok, Security} = couch_file:pread_term(Fd, SecurityPtr)
- end,
- % convert start time tuple to microsecs and store as a binary string
- {MegaSecs, Secs, MicroSecs} = now(),
- StartTime = ?l2b(io_lib:format("~p",
- [(MegaSecs*1000000*1000000) + (Secs*1000000) + MicroSecs])),
- {ok, RefCntr} = couch_ref_counter:start([Fd]),
- #db{
- update_pid=self(),
- fd=Fd,
- fd_ref_counter = RefCntr,
- header=Header,
- fulldocinfo_by_id_btree = IdBtree,
- docinfo_by_seq_btree = SeqBtree,
- local_docs_btree = LocalDocsBtree,
- committed_update_seq = Header#db_header.update_seq,
- update_seq = Header#db_header.update_seq,
- name = DbName,
- filepath = Filepath,
- security = Security,
- security_ptr = SecurityPtr,
- instance_start_time = StartTime,
- revs_limit = Header#db_header.revs_limit,
- fsync_options = FsyncOptions
- }.
-
-
-close_db(#db{fd_ref_counter = RefCntr}) ->
- couch_ref_counter:drop(RefCntr).
-
-
-refresh_validate_doc_funs(Db) ->
- {ok, DesignDocs} = couch_db:get_design_docs(Db),
- ProcessDocFuns = lists:flatmap(
- fun(DesignDoc) ->
- case couch_doc:get_validate_doc_fun(DesignDoc) of
- nil -> [];
- Fun -> [Fun]
- end
- end, DesignDocs),
- Db#db{validate_doc_funs=ProcessDocFuns}.
-
-% rev tree functions
-
-flush_trees(_Db, [], AccFlushedTrees) ->
- {ok, lists:reverse(AccFlushedTrees)};
-flush_trees(#db{fd=Fd,header=Header}=Db,
- [InfoUnflushed | RestUnflushed], AccFlushed) ->
- #full_doc_info{update_seq=UpdateSeq, rev_tree=Unflushed} = InfoUnflushed,
- Flushed = couch_key_tree:map(
- fun(_Rev, Value) ->
- case Value of
- #doc{atts=Atts,deleted=IsDeleted}=Doc ->
- % this node value is actually an unwritten document summary,
- % write to disk.
- % make sure the Fd in the written bins is the same Fd we are
- % and convert bins, removing the FD.
- % All bins should have been written to disk already.
- DiskAtts =
- case Atts of
- [] -> [];
- [#att{data={BinFd, _Sp}} | _ ] when BinFd == Fd ->
- [{N,T,P,AL,DL,R,M,E}
- || #att{name=N,type=T,data={_,P},md5=M,revpos=R,
- att_len=AL,disk_len=DL,encoding=E}
- <- Atts];
- _ ->
- % BinFd must not equal our Fd. This can happen when a database
- % is being switched out during a compaction
- ?LOG_DEBUG("File where the attachments are written has"
- " changed. Possibly retrying.", []),
- throw(retry)
- end,
- {ok, NewSummaryPointer} =
- case Header#db_header.disk_version < 4 of
- true ->
- couch_file:append_term(Fd, {Doc#doc.body, DiskAtts});
- false ->
- couch_file:append_term_md5(Fd, {Doc#doc.body, DiskAtts})
- end,
- {IsDeleted, NewSummaryPointer, UpdateSeq};
- _ ->
- Value
- end
- end, Unflushed),
- flush_trees(Db, RestUnflushed, [InfoUnflushed#full_doc_info{rev_tree=Flushed} | AccFlushed]).
-
-
-send_result(Client, Id, OriginalRevs, NewResult) ->
- % used to send a result to the client
- catch(Client ! {result, self(), {{Id, OriginalRevs}, NewResult}}).
-
-merge_rev_trees(_Limit, _Merge, [], [], AccNewInfos, AccRemoveSeqs, AccSeq) ->
- {ok, lists:reverse(AccNewInfos), AccRemoveSeqs, AccSeq};
-merge_rev_trees(Limit, MergeConflicts, [NewDocs|RestDocsList],
- [OldDocInfo|RestOldInfo], AccNewInfos, AccRemoveSeqs, AccSeq) ->
- #full_doc_info{id=Id,rev_tree=OldTree,deleted=OldDeleted,update_seq=OldSeq}
- = OldDocInfo,
- NewRevTree = lists:foldl(
- fun({Client, #doc{revs={Pos,[_Rev|PrevRevs]}}=NewDoc}, AccTree) ->
- if not MergeConflicts ->
- case couch_key_tree:merge(AccTree, couch_db:doc_to_tree(NewDoc),
- Limit) of
- {_NewTree, conflicts} when (not OldDeleted) ->
- send_result(Client, Id, {Pos-1,PrevRevs}, conflict),
- AccTree;
- {NewTree, conflicts} when PrevRevs /= [] ->
- % Check to be sure if prev revision was specified, it's
- % a leaf node in the tree
- Leafs = couch_key_tree:get_all_leafs(AccTree),
- IsPrevLeaf = lists:any(fun({_, {LeafPos, [LeafRevId|_]}}) ->
- {LeafPos, LeafRevId} == {Pos-1, hd(PrevRevs)}
- end, Leafs),
- if IsPrevLeaf ->
- NewTree;
- true ->
- send_result(Client, Id, {Pos-1,PrevRevs}, conflict),
- AccTree
- end;
- {NewTree, no_conflicts} when AccTree == NewTree ->
- % the tree didn't change at all
- % meaning we are saving a rev that's already
- % been editted again.
- if (Pos == 1) and OldDeleted ->
- % this means we are recreating a brand new document
- % into a state that already existed before.
- % put the rev into a subsequent edit of the deletion
- #doc_info{revs=[#rev_info{rev={OldPos,OldRev}}|_]} =
- couch_doc:to_doc_info(OldDocInfo),
- NewRevId = couch_db:new_revid(
- NewDoc#doc{revs={OldPos, [OldRev]}}),
- NewDoc2 = NewDoc#doc{revs={OldPos + 1, [NewRevId, OldRev]}},
- {NewTree2, _} = couch_key_tree:merge(AccTree,
- couch_db:doc_to_tree(NewDoc2), Limit),
- % we changed the rev id, this tells the caller we did
- send_result(Client, Id, {Pos-1,PrevRevs},
- {ok, {OldPos + 1, NewRevId}}),
- NewTree2;
- true ->
- send_result(Client, Id, {Pos-1,PrevRevs}, conflict),
- AccTree
- end;
- {NewTree, _} ->
- NewTree
- end;
- true ->
- {NewTree, _} = couch_key_tree:merge(AccTree,
- couch_db:doc_to_tree(NewDoc), Limit),
- NewTree
- end
- end,
- OldTree, NewDocs),
- if NewRevTree == OldTree ->
- % nothing changed
- merge_rev_trees(Limit, MergeConflicts, RestDocsList, RestOldInfo,
- AccNewInfos, AccRemoveSeqs, AccSeq);
- true ->
- % we have updated the document, give it a new seq #
- NewInfo = #full_doc_info{id=Id,update_seq=AccSeq+1,rev_tree=NewRevTree},
- RemoveSeqs = case OldSeq of
- 0 -> AccRemoveSeqs;
- _ -> [OldSeq | AccRemoveSeqs]
- end,
- merge_rev_trees(Limit, MergeConflicts, RestDocsList, RestOldInfo,
- [NewInfo|AccNewInfos], RemoveSeqs, AccSeq+1)
- end.
-
-
-
-new_index_entries([], AccById, AccBySeq) ->
- {AccById, AccBySeq};
-new_index_entries([FullDocInfo|RestInfos], AccById, AccBySeq) ->
- #doc_info{revs=[#rev_info{deleted=Deleted}|_]} = DocInfo =
- couch_doc:to_doc_info(FullDocInfo),
- new_index_entries(RestInfos,
- [FullDocInfo#full_doc_info{deleted=Deleted}|AccById],
- [DocInfo|AccBySeq]).
-
-
-stem_full_doc_infos(#db{revs_limit=Limit}, DocInfos) ->
- [Info#full_doc_info{rev_tree=couch_key_tree:stem(Tree, Limit)} ||
- #full_doc_info{rev_tree=Tree}=Info <- DocInfos].
-
-update_docs_int(Db, DocsList, NonRepDocs, MergeConflicts, FullCommit) ->
- #db{
- fulldocinfo_by_id_btree = DocInfoByIdBTree,
- docinfo_by_seq_btree = DocInfoBySeqBTree,
- update_seq = LastSeq,
- revs_limit = RevsLimit
- } = Db,
- Ids = [Id || [{_Client, #doc{id=Id}}|_] <- DocsList],
- % lookup up the old documents, if they exist.
- OldDocLookups = couch_btree:lookup(DocInfoByIdBTree, Ids),
- OldDocInfos = lists:zipwith(
- fun(_Id, {ok, FullDocInfo}) ->
- FullDocInfo;
- (Id, not_found) ->
- #full_doc_info{id=Id}
- end,
- Ids, OldDocLookups),
- % Merge the new docs into the revision trees.
- {ok, NewFullDocInfos, RemoveSeqs, NewSeq} = merge_rev_trees(RevsLimit,
- MergeConflicts, DocsList, OldDocInfos, [], [], LastSeq),
-
- % All documents are now ready to write.
-
- {ok, Db2} = update_local_docs(Db, NonRepDocs),
-
- % Write out the document summaries (the bodies are stored in the nodes of
- % the trees, the attachments are already written to disk)
- {ok, FlushedFullDocInfos} = flush_trees(Db2, NewFullDocInfos, []),
-
- {IndexFullDocInfos, IndexDocInfos} =
- new_index_entries(FlushedFullDocInfos, [], []),
-
- % and the indexes
- {ok, DocInfoByIdBTree2} = couch_btree:add_remove(DocInfoByIdBTree, IndexFullDocInfos, []),
- {ok, DocInfoBySeqBTree2} = couch_btree:add_remove(DocInfoBySeqBTree, IndexDocInfos, RemoveSeqs),
-
- Db3 = Db2#db{
- fulldocinfo_by_id_btree = DocInfoByIdBTree2,
- docinfo_by_seq_btree = DocInfoBySeqBTree2,
- update_seq = NewSeq},
-
- % Check if we just updated any design documents, and update the validation
- % funs if we did.
- case lists:any(
- fun(<<"_design/", _/binary>>) -> true; (_) -> false end, Ids) of
- false ->
- Db4 = Db3;
- true ->
- Db4 = refresh_validate_doc_funs(Db3)
- end,
-
- {ok, commit_data(Db4, not FullCommit)}.
-
-
-update_local_docs(Db, []) ->
- {ok, Db};
-update_local_docs(#db{local_docs_btree=Btree}=Db, Docs) ->
- Ids = [Id || {_Client, #doc{id=Id}} <- Docs],
- OldDocLookups = couch_btree:lookup(Btree, Ids),
- BtreeEntries = lists:zipwith(
- fun({Client, #doc{id=Id,deleted=Delete,revs={0,PrevRevs},body=Body}}, OldDocLookup) ->
- case PrevRevs of
- [RevStr|_] ->
- PrevRev = list_to_integer(?b2l(RevStr));
- [] ->
- PrevRev = 0
- end,
- OldRev =
- case OldDocLookup of
- {ok, {_, {OldRev0, _}}} -> OldRev0;
- not_found -> 0
- end,
- case OldRev == PrevRev of
- true ->
- case Delete of
- false ->
- send_result(Client, Id, {0, PrevRevs}, {ok,
- {0, ?l2b(integer_to_list(PrevRev + 1))}}),
- {update, {Id, {PrevRev + 1, Body}}};
- true ->
- send_result(Client, Id, {0, PrevRevs},
- {ok, {0, <<"0">>}}),
- {remove, Id}
- end;
- false ->
- send_result(Client, Id, {0, PrevRevs}, conflict),
- ignore
- end
- end, Docs, OldDocLookups),
-
- BtreeIdsRemove = [Id || {remove, Id} <- BtreeEntries],
- BtreeIdsUpdate = [{Key, Val} || {update, {Key, Val}} <- BtreeEntries],
-
- {ok, Btree2} =
- couch_btree:add_remove(Btree, BtreeIdsUpdate, BtreeIdsRemove),
-
- {ok, Db#db{local_docs_btree = Btree2}}.
-
-
-commit_data(Db) ->
- commit_data(Db, false).
-
-db_to_header(Db, Header) ->
- Header#db_header{
- update_seq = Db#db.update_seq,
- docinfo_by_seq_btree_state = couch_btree:get_state(Db#db.docinfo_by_seq_btree),
- fulldocinfo_by_id_btree_state = couch_btree:get_state(Db#db.fulldocinfo_by_id_btree),
- local_docs_btree_state = couch_btree:get_state(Db#db.local_docs_btree),
- security_ptr = Db#db.security_ptr,
- revs_limit = Db#db.revs_limit}.
-
-commit_data(#db{waiting_delayed_commit=nil} = Db, true) ->
- Db#db{waiting_delayed_commit=erlang:send_after(1000,self(),delayed_commit)};
-commit_data(Db, true) ->
- Db;
-commit_data(Db, _) ->
- #db{
- fd = Fd,
- filepath = Filepath,
- header = OldHeader,
- fsync_options = FsyncOptions,
- waiting_delayed_commit = Timer
- } = Db,
- if is_reference(Timer) -> erlang:cancel_timer(Timer); true -> ok end,
- case db_to_header(Db, OldHeader) of
- OldHeader ->
- Db#db{waiting_delayed_commit=nil};
- Header ->
- case lists:member(before_header, FsyncOptions) of
- true -> ok = couch_file:sync(Filepath);
- _ -> ok
- end,
-
- ok = couch_file:write_header(Fd, Header),
-
- case lists:member(after_header, FsyncOptions) of
- true -> ok = couch_file:sync(Filepath);
- _ -> ok
- end,
-
- Db#db{waiting_delayed_commit=nil,
- header=Header,
- committed_update_seq=Db#db.update_seq}
- end.
-
-
-copy_doc_attachments(#db{fd=SrcFd}=SrcDb, {Pos,_RevId}, SrcSp, DestFd) ->
- {ok, {BodyData, BinInfos}} = couch_db:read_doc(SrcDb, SrcSp),
- % copy the bin values
- NewBinInfos = lists:map(
- fun({Name, {Type, BinSp, AttLen}}) when is_tuple(BinSp) orelse BinSp == null ->
- % 09 UPGRADE CODE
- {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
- couch_stream:old_copy_to_new_stream(SrcFd, BinSp, AttLen, DestFd),
- {Name, Type, NewBinSp, AttLen, AttLen, Pos, Md5, identity};
- ({Name, {Type, BinSp, AttLen}}) ->
- % 09 UPGRADE CODE
- {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
- couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
- {Name, Type, NewBinSp, AttLen, AttLen, Pos, Md5, identity};
- ({Name, Type, BinSp, AttLen, _RevPos, <<>>}) when
- is_tuple(BinSp) orelse BinSp == null ->
- % 09 UPGRADE CODE
- {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
- couch_stream:old_copy_to_new_stream(SrcFd, BinSp, AttLen, DestFd),
- {Name, Type, NewBinSp, AttLen, AttLen, AttLen, Md5, identity};
- ({Name, Type, BinSp, AttLen, RevPos, Md5}) ->
- % 010 UPGRADE CODE
- {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
- couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
- {Name, Type, NewBinSp, AttLen, AttLen, RevPos, Md5, identity};
- ({Name, Type, BinSp, AttLen, DiskLen, RevPos, Md5, Enc1}) ->
- {NewBinSp, AttLen, _, Md5, _IdentityMd5} =
- couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
- Enc = case Enc1 of
- true ->
- % 0110 UPGRADE CODE
- gzip;
- false ->
- % 0110 UPGRADE CODE
- identity;
- _ ->
- Enc1
- end,
- {Name, Type, NewBinSp, AttLen, DiskLen, RevPos, Md5, Enc}
- end, BinInfos),
- {BodyData, NewBinInfos}.
-
-copy_docs(Db, #db{fd=DestFd}=NewDb, InfoBySeq0, Retry) ->
- % COUCHDB-968, make sure we prune duplicates during compaction
- InfoBySeq = lists:usort(fun(#doc_info{id=A}, #doc_info{id=B}) -> A =< B end,
- InfoBySeq0),
- Ids = [Id || #doc_info{id=Id} <- InfoBySeq],
- LookupResults = couch_btree:lookup(Db#db.fulldocinfo_by_id_btree, Ids),
-
- NewFullDocInfos1 = lists:map(
- fun({ok, #full_doc_info{rev_tree=RevTree}=Info}) ->
- Info#full_doc_info{rev_tree=couch_key_tree:map(
- fun(Rev, {IsDel, Sp, Seq}, leaf) ->
- DocBody = copy_doc_attachments(Db, Rev, Sp, DestFd),
- {ok, Pos} = couch_file:append_term_md5(DestFd, DocBody),
- {IsDel, Pos, Seq};
- (_, _, branch) ->
- ?REV_MISSING
- end, RevTree)}
- end, LookupResults),
-
- NewFullDocInfos = stem_full_doc_infos(Db, NewFullDocInfos1),
- NewDocInfos = [couch_doc:to_doc_info(Info) || Info <- NewFullDocInfos],
- RemoveSeqs =
- case Retry of
- false ->
- [];
- true ->
- % We are retrying a compaction, meaning the documents we are copying may
- % already exist in our file and must be removed from the by_seq index.
- Existing = couch_btree:lookup(NewDb#db.fulldocinfo_by_id_btree, Ids),
- [Seq || {ok, #full_doc_info{update_seq=Seq}} <- Existing]
- end,
-
- {ok, DocInfoBTree} = couch_btree:add_remove(
- NewDb#db.docinfo_by_seq_btree, NewDocInfos, RemoveSeqs),
- {ok, FullDocInfoBTree} = couch_btree:add_remove(
- NewDb#db.fulldocinfo_by_id_btree, NewFullDocInfos, []),
- NewDb#db{ fulldocinfo_by_id_btree=FullDocInfoBTree,
- docinfo_by_seq_btree=DocInfoBTree}.
-
-
-
-copy_compact(Db, NewDb0, Retry) ->
- FsyncOptions = [Op || Op <- NewDb0#db.fsync_options, Op == before_header],
- NewDb = NewDb0#db{fsync_options=FsyncOptions},
- TotalChanges = couch_db:count_changes_since(Db, NewDb#db.update_seq),
- EnumBySeqFun =
- fun(#doc_info{high_seq=Seq}=DocInfo, _Offset, {AccNewDb, AccUncopied, TotalCopied}) ->
- couch_task_status:update("Copied ~p of ~p changes (~p%)",
- [TotalCopied, TotalChanges, (TotalCopied*100) div TotalChanges]),
- if TotalCopied rem 1000 =:= 0 ->
- NewDb2 = copy_docs(Db, AccNewDb, lists:reverse([DocInfo | AccUncopied]), Retry),
- if TotalCopied rem 10000 =:= 0 ->
- {ok, {commit_data(NewDb2#db{update_seq=Seq}), [], TotalCopied + 1}};
- true ->
- {ok, {NewDb2#db{update_seq=Seq}, [], TotalCopied + 1}}
- end;
- true ->
- {ok, {AccNewDb, [DocInfo | AccUncopied], TotalCopied + 1}}
- end
- end,
-
- couch_task_status:set_update_frequency(500),
-
- {ok, _, {NewDb2, Uncopied, TotalChanges}} =
- couch_btree:foldl(Db#db.docinfo_by_seq_btree, EnumBySeqFun,
- {NewDb, [], 0},
- [{start_key, NewDb#db.update_seq + 1}]),
-
- couch_task_status:update("Flushing"),
-
- NewDb3 = copy_docs(Db, NewDb2, lists:reverse(Uncopied), Retry),
-
- % copy misc header values
- if NewDb3#db.security /= Db#db.security ->
- {ok, Ptr} = couch_file:append_term(NewDb3#db.fd, Db#db.security),
- NewDb4 = NewDb3#db{security=Db#db.security, security_ptr=Ptr};
- true ->
- NewDb4 = NewDb3
- end,
-
- commit_data(NewDb4#db{update_seq=Db#db.update_seq}).
-
-start_copy_compact(#db{name=Name,filepath=Filepath,header=#db_header{purge_seq=PurgeSeq}}=Db) ->
- CompactFile = Filepath ++ ".compact",
- ?LOG_DEBUG("Compaction process spawned for db \"~s\"", [Name]),
- case couch_file:open(CompactFile) of
- {ok, Fd} ->
- couch_task_status:add_task(<<"Database Compaction">>, <<Name/binary, " retry">>, <<"Starting">>),
- Retry = true,
- case couch_file:read_header(Fd) of
- {ok, Header} ->
- ok;
- no_valid_header ->
- ok = couch_file:write_header(Fd, Header=#db_header{})
- end;
- {error, enoent} ->
- couch_task_status:add_task(<<"Database Compaction">>, Name, <<"Starting">>),
- {ok, Fd} = couch_file:open(CompactFile, [create]),
- Retry = false,
- ok = couch_file:write_header(Fd, Header=#db_header{})
- end,
- NewDb = init_db(Name, CompactFile, Fd, Header),
- NewDb2 = if PurgeSeq > 0 ->
- {ok, PurgedIdsRevs} = couch_db:get_last_purged(Db),
- {ok, Pointer} = couch_file:append_term(Fd, PurgedIdsRevs),
- NewDb#db{header=Header#db_header{purge_seq=PurgeSeq, purged_docs=Pointer}};
- true ->
- NewDb
- end,
- unlink(Fd),
-
- NewDb3 = copy_compact(Db, NewDb2, Retry),
- close_db(NewDb3),
- gen_server:cast(Db#db.update_pid, {compact_done, CompactFile}).
-
diff --git a/1.1.x/src/couchdb/couch_doc.erl b/1.1.x/src/couchdb/couch_doc.erl
deleted file mode 100644
index e3d66145..00000000
--- a/1.1.x/src/couchdb/couch_doc.erl
+++ /dev/null
@@ -1,527 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_doc).
-
--export([to_doc_info/1,to_doc_info_path/1,parse_rev/1,parse_revs/1,rev_to_str/1,revs_to_strs/1]).
--export([att_foldl/3,range_att_foldl/5,att_foldl_decode/3,get_validate_doc_fun/1]).
--export([from_json_obj/1,to_json_obj/2,has_stubs/1, merge_stubs/2]).
--export([validate_docid/1]).
--export([doc_from_multi_part_stream/2]).
--export([doc_to_multi_part_stream/5, len_doc_to_multi_part_stream/4]).
-
--include("couch_db.hrl").
-
-% helpers used by to_json_obj
-to_json_rev(0, []) ->
- [];
-to_json_rev(Start, [FirstRevId|_]) ->
- [{<<"_rev">>, ?l2b([integer_to_list(Start),"-",revid_to_str(FirstRevId)])}].
-
-to_json_body(true, {Body}) ->
- Body ++ [{<<"_deleted">>, true}];
-to_json_body(false, {Body}) ->
- Body.
-
-to_json_revisions(Options, Start, RevIds) ->
- case lists:member(revs, Options) of
- false -> [];
- true ->
- [{<<"_revisions">>, {[{<<"start">>, Start},
- {<<"ids">>, [revid_to_str(R) ||R <- RevIds]}]}}]
- end.
-
-revid_to_str(RevId) when size(RevId) =:= 16 ->
- ?l2b(couch_util:to_hex(RevId));
-revid_to_str(RevId) ->
- RevId.
-
-rev_to_str({Pos, RevId}) ->
- ?l2b([integer_to_list(Pos),"-",revid_to_str(RevId)]).
-
-
-revs_to_strs([]) ->
- [];
-revs_to_strs([{Pos, RevId}| Rest]) ->
- [rev_to_str({Pos, RevId}) | revs_to_strs(Rest)].
-
-to_json_meta(Meta) ->
- lists:map(
- fun({revs_info, Start, RevsInfo}) ->
- {JsonRevsInfo, _Pos} = lists:mapfoldl(
- fun({RevId, Status}, PosAcc) ->
- JsonObj = {[{<<"rev">>, rev_to_str({PosAcc, RevId})},
- {<<"status">>, ?l2b(atom_to_list(Status))}]},
- {JsonObj, PosAcc - 1}
- end, Start, RevsInfo),
- {<<"_revs_info">>, JsonRevsInfo};
- ({local_seq, Seq}) ->
- {<<"_local_seq">>, Seq};
- ({conflicts, Conflicts}) ->
- {<<"_conflicts">>, revs_to_strs(Conflicts)};
- ({deleted_conflicts, DConflicts}) ->
- {<<"_deleted_conflicts">>, revs_to_strs(DConflicts)}
- end, Meta).
-
-to_json_attachments(Attachments, Options) ->
- to_json_attachments(
- Attachments,
- lists:member(attachments, Options),
- lists:member(follows, Options),
- lists:member(att_encoding_info, Options)
- ).
-
-to_json_attachments([], _OutputData, _DataToFollow, _ShowEncInfo) ->
- [];
-to_json_attachments(Atts, OutputData, DataToFollow, ShowEncInfo) ->
- AttProps = lists:map(
- fun(#att{disk_len=DiskLen, att_len=AttLen, encoding=Enc}=Att) ->
- {Att#att.name, {[
- {<<"content_type">>, Att#att.type},
- {<<"revpos">>, Att#att.revpos}
- ] ++
- if not OutputData orelse Att#att.data == stub ->
- [{<<"length">>, DiskLen}, {<<"stub">>, true}];
- true ->
- if DataToFollow ->
- [{<<"length">>, DiskLen}, {<<"follows">>, true}];
- true ->
- AttData = case Enc of
- gzip ->
- zlib:gunzip(att_to_bin(Att));
- identity ->
- att_to_bin(Att)
- end,
- [{<<"data">>, base64:encode(AttData)}]
- end
- end ++
- case {ShowEncInfo, Enc} of
- {false, _} ->
- [];
- {true, identity} ->
- [];
- {true, _} ->
- [
- {<<"encoding">>, couch_util:to_binary(Enc)},
- {<<"encoded_length">>, AttLen}
- ]
- end
- }}
- end, Atts),
- [{<<"_attachments">>, {AttProps}}].
-
-to_json_obj(#doc{id=Id,deleted=Del,body=Body,revs={Start, RevIds},
- meta=Meta}=Doc,Options)->
- {[{<<"_id">>, Id}]
- ++ to_json_rev(Start, RevIds)
- ++ to_json_body(Del, Body)
- ++ to_json_revisions(Options, Start, RevIds)
- ++ to_json_meta(Meta)
- ++ to_json_attachments(Doc#doc.atts, Options)
- }.
-
-from_json_obj({Props}) ->
- transfer_fields(Props, #doc{body=[]});
-
-from_json_obj(_Other) ->
- throw({bad_request, "Document must be a JSON object"}).
-
-parse_revid(RevId) when size(RevId) =:= 32 ->
- RevInt = erlang:list_to_integer(?b2l(RevId), 16),
- <<RevInt:128>>;
-parse_revid(RevId) when length(RevId) =:= 32 ->
- RevInt = erlang:list_to_integer(RevId, 16),
- <<RevInt:128>>;
-parse_revid(RevId) when is_binary(RevId) ->
- RevId;
-parse_revid(RevId) when is_list(RevId) ->
- ?l2b(RevId).
-
-
-parse_rev(Rev) when is_binary(Rev) ->
- parse_rev(?b2l(Rev));
-parse_rev(Rev) when is_list(Rev) ->
- SplitRev = lists:splitwith(fun($-) -> false; (_) -> true end, Rev),
- case SplitRev of
- {Pos, [$- | RevId]} -> {list_to_integer(Pos), parse_revid(RevId)};
- _Else -> throw({bad_request, <<"Invalid rev format">>})
- end;
-parse_rev(_BadRev) ->
- throw({bad_request, <<"Invalid rev format">>}).
-
-parse_revs([]) ->
- [];
-parse_revs([Rev | Rest]) ->
- [parse_rev(Rev) | parse_revs(Rest)].
-
-
-validate_docid(Id) when is_binary(Id) ->
- case couch_util:validate_utf8(Id) of
- false -> throw({bad_request, <<"Document id must be valid UTF-8">>});
- true -> ok
- end,
- case Id of
- <<"_design/", _/binary>> -> ok;
- <<"_local/", _/binary>> -> ok;
- <<"_", _/binary>> ->
- throw({bad_request, <<"Only reserved document ids may start with underscore.">>});
- _Else -> ok
- end;
-validate_docid(Id) ->
- ?LOG_DEBUG("Document id is not a string: ~p", [Id]),
- throw({bad_request, <<"Document id must be a string">>}).
-
-transfer_fields([], #doc{body=Fields}=Doc) ->
- % convert fields back to json object
- Doc#doc{body={lists:reverse(Fields)}};
-
-transfer_fields([{<<"_id">>, Id} | Rest], Doc) ->
- validate_docid(Id),
- transfer_fields(Rest, Doc#doc{id=Id});
-
-transfer_fields([{<<"_rev">>, Rev} | Rest], #doc{revs={0, []}}=Doc) ->
- {Pos, RevId} = parse_rev(Rev),
- transfer_fields(Rest,
- Doc#doc{revs={Pos, [RevId]}});
-
-transfer_fields([{<<"_rev">>, _Rev} | Rest], Doc) ->
- % we already got the rev from the _revisions
- transfer_fields(Rest,Doc);
-
-transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc) ->
- Atts = lists:map(fun({Name, {BinProps}}) ->
- case couch_util:get_value(<<"stub">>, BinProps) of
- true ->
- Type = couch_util:get_value(<<"content_type">>, BinProps),
- RevPos = couch_util:get_value(<<"revpos">>, BinProps, nil),
- DiskLen = couch_util:get_value(<<"length">>, BinProps),
- {Enc, EncLen} = att_encoding_info(BinProps),
- #att{name=Name, data=stub, type=Type, att_len=EncLen,
- disk_len=DiskLen, encoding=Enc, revpos=RevPos};
- _ ->
- Type = couch_util:get_value(<<"content_type">>, BinProps,
- ?DEFAULT_ATTACHMENT_CONTENT_TYPE),
- RevPos = couch_util:get_value(<<"revpos">>, BinProps, 0),
- case couch_util:get_value(<<"follows">>, BinProps) of
- true ->
- DiskLen = couch_util:get_value(<<"length">>, BinProps),
- {Enc, EncLen} = att_encoding_info(BinProps),
- #att{name=Name, data=follows, type=Type, encoding=Enc,
- att_len=EncLen, disk_len=DiskLen, revpos=RevPos};
- _ ->
- Value = couch_util:get_value(<<"data">>, BinProps),
- Bin = base64:decode(Value),
- LenBin = size(Bin),
- #att{name=Name, data=Bin, type=Type, att_len=LenBin,
- disk_len=LenBin, revpos=RevPos}
- end
- end
- end, JsonBins),
- transfer_fields(Rest, Doc#doc{atts=Atts});
-
-transfer_fields([{<<"_revisions">>, {Props}} | Rest], Doc) ->
- RevIds = couch_util:get_value(<<"ids">>, Props),
- Start = couch_util:get_value(<<"start">>, Props),
- if not is_integer(Start) ->
- throw({doc_validation, "_revisions.start isn't an integer."});
- not is_list(RevIds) ->
- throw({doc_validation, "_revisions.ids isn't a array."});
- true ->
- ok
- end,
- [throw({doc_validation, "RevId isn't a string"}) ||
- RevId <- RevIds, not is_binary(RevId)],
- RevIds2 = [parse_revid(RevId) || RevId <- RevIds],
- transfer_fields(Rest, Doc#doc{revs={Start, RevIds2}});
-
-transfer_fields([{<<"_deleted">>, B} | Rest], Doc) when is_boolean(B) ->
- transfer_fields(Rest, Doc#doc{deleted=B});
-
-% ignored fields
-transfer_fields([{<<"_revs_info">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
-transfer_fields([{<<"_local_seq">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
-transfer_fields([{<<"_conflicts">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
-transfer_fields([{<<"_deleted_conflicts">>, _} | Rest], Doc) ->
- transfer_fields(Rest, Doc);
-
-% special fields for replication documents
-transfer_fields([{<<"_replication_state">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
-transfer_fields([{<<"_replication_state_time">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
-transfer_fields([{<<"_replication_id">>, _} = Field | Rest],
- #doc{body=Fields} = Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
-
-% unknown special field
-transfer_fields([{<<"_",Name/binary>>, _} | _], _) ->
- throw({doc_validation,
- ?l2b(io_lib:format("Bad special document member: _~s", [Name]))});
-
-transfer_fields([Field | Rest], #doc{body=Fields}=Doc) ->
- transfer_fields(Rest, Doc#doc{body=[Field|Fields]}).
-
-att_encoding_info(BinProps) ->
- DiskLen = couch_util:get_value(<<"length">>, BinProps),
- case couch_util:get_value(<<"encoding">>, BinProps) of
- undefined ->
- {identity, DiskLen};
- Enc ->
- EncodedLen = couch_util:get_value(<<"encoded_length">>, BinProps, DiskLen),
- {list_to_existing_atom(?b2l(Enc)), EncodedLen}
- end.
-
-to_doc_info(FullDocInfo) ->
- {DocInfo, _Path} = to_doc_info_path(FullDocInfo),
- DocInfo.
-
-max_seq([], Max) ->
- Max;
-max_seq([#rev_info{seq=Seq}|Rest], Max) ->
- max_seq(Rest, if Max > Seq -> Max; true -> Seq end).
-
-to_doc_info_path(#full_doc_info{id=Id,rev_tree=Tree}) ->
- RevInfosAndPath =
- [{#rev_info{deleted=Del,body_sp=Bp,seq=Seq,rev={Pos,RevId}}, Path} ||
- {{Del, Bp, Seq},{Pos, [RevId|_]}=Path} <-
- couch_key_tree:get_all_leafs(Tree)],
- SortedRevInfosAndPath = lists:sort(
- fun({#rev_info{deleted=DeletedA,rev=RevA}, _PathA},
- {#rev_info{deleted=DeletedB,rev=RevB}, _PathB}) ->
- % sort descending by {not deleted, rev}
- {not DeletedA, RevA} > {not DeletedB, RevB}
- end, RevInfosAndPath),
- [{_RevInfo, WinPath}|_] = SortedRevInfosAndPath,
- RevInfos = [RevInfo || {RevInfo, _Path} <- SortedRevInfosAndPath],
- {#doc_info{id=Id, high_seq=max_seq(RevInfos, 0), revs=RevInfos}, WinPath}.
-
-
-
-
-att_foldl(#att{data=Bin}, Fun, Acc) when is_binary(Bin) ->
- Fun(Bin, Acc);
-att_foldl(#att{data={Fd,Sp},att_len=Len}, Fun, Acc) when is_tuple(Sp) orelse Sp == null ->
- % 09 UPGRADE CODE
- couch_stream:old_foldl(Fd, Sp, Len, Fun, Acc);
-att_foldl(#att{data={Fd,Sp},md5=Md5}, Fun, Acc) ->
- couch_stream:foldl(Fd, Sp, Md5, Fun, Acc);
-att_foldl(#att{data=DataFun,att_len=Len}, Fun, Acc) when is_function(DataFun) ->
- fold_streamed_data(DataFun, Len, Fun, Acc).
-
-range_att_foldl(#att{data={Fd,Sp}}, From, To, Fun, Acc) ->
- couch_stream:range_foldl(Fd, Sp, From, To, Fun, Acc).
-
-att_foldl_decode(#att{data={Fd,Sp},md5=Md5,encoding=Enc}, Fun, Acc) ->
- couch_stream:foldl_decode(Fd, Sp, Md5, Enc, Fun, Acc);
-att_foldl_decode(#att{data=Fun2,att_len=Len, encoding=identity}, Fun, Acc) ->
- fold_streamed_data(Fun2, Len, Fun, Acc).
-
-att_to_bin(#att{data=Bin}) when is_binary(Bin) ->
- Bin;
-att_to_bin(#att{data=Iolist}) when is_list(Iolist) ->
- iolist_to_binary(Iolist);
-att_to_bin(#att{data={_Fd,_Sp}}=Att) ->
- iolist_to_binary(
- lists:reverse(att_foldl(
- Att,
- fun(Bin,Acc) -> [Bin|Acc] end,
- []
- ))
- );
-att_to_bin(#att{data=DataFun, att_len=Len}) when is_function(DataFun)->
- iolist_to_binary(
- lists:reverse(fold_streamed_data(
- DataFun,
- Len,
- fun(Data, Acc) -> [Data | Acc] end,
- []
- ))
- ).
-
-get_validate_doc_fun(#doc{body={Props}}=DDoc) ->
- case couch_util:get_value(<<"validate_doc_update">>, Props) of
- undefined ->
- nil;
- _Else ->
- fun(EditDoc, DiskDoc, Ctx, SecObj) ->
- couch_query_servers:validate_doc_update(DDoc, EditDoc, DiskDoc, Ctx, SecObj)
- end
- end.
-
-
-has_stubs(#doc{atts=Atts}) ->
- has_stubs(Atts);
-has_stubs([]) ->
- false;
-has_stubs([#att{data=stub}|_]) ->
- true;
-has_stubs([_Att|Rest]) ->
- has_stubs(Rest).
-
-merge_stubs(#doc{id=Id,atts=MemBins}=StubsDoc, #doc{atts=DiskBins}) ->
- BinDict = dict:from_list([{Name, Att} || #att{name=Name}=Att <- DiskBins]),
- MergedBins = lists:map(
- fun(#att{name=Name, data=stub, revpos=StubRevPos}) ->
- case dict:find(Name, BinDict) of
- {ok, #att{revpos=DiskRevPos}=DiskAtt}
- when DiskRevPos == StubRevPos orelse StubRevPos == nil ->
- DiskAtt;
- _ ->
- throw({missing_stub,
- <<"id:", Id/binary, ", name:", Name/binary>>})
- end;
- (Att) ->
- Att
- end, MemBins),
- StubsDoc#doc{atts= MergedBins}.
-
-fold_streamed_data(_RcvFun, 0, _Fun, Acc) ->
- Acc;
-fold_streamed_data(RcvFun, LenLeft, Fun, Acc) when LenLeft > 0->
- Bin = RcvFun(),
- ResultAcc = Fun(Bin, Acc),
- fold_streamed_data(RcvFun, LenLeft - size(Bin), Fun, ResultAcc).
-
-len_doc_to_multi_part_stream(Boundary, JsonBytes, Atts, SendEncodedAtts) ->
- AttsSize = lists:foldl(fun(#att{data=Data} = Att, AccAttsSize) ->
- case Data of
- stub ->
- AccAttsSize;
- _ ->
- AccAttsSize +
- 4 + % "\r\n\r\n"
- case SendEncodedAtts of
- true ->
- Att#att.att_len;
- _ ->
- Att#att.disk_len
- end +
- 4 + % "\r\n--"
- size(Boundary)
- end
- end, 0, Atts),
- if AttsSize == 0 ->
- {<<"application/json">>, iolist_size(JsonBytes)};
- true ->
- {<<"multipart/related; boundary=\"", Boundary/binary, "\"">>,
- 2 + % "--"
- size(Boundary) +
- 36 + % "\r\ncontent-type: application/json\r\n\r\n"
- iolist_size(JsonBytes) +
- 4 + % "\r\n--"
- size(Boundary) +
- + AttsSize +
- 2 % "--"
- }
- end.
-
-doc_to_multi_part_stream(Boundary, JsonBytes, Atts, WriteFun,
- SendEncodedAtts) ->
- case lists:any(fun(#att{data=Data})-> Data /= stub end, Atts) of
- true ->
- WriteFun([<<"--", Boundary/binary,
- "\r\ncontent-type: application/json\r\n\r\n">>,
- JsonBytes, <<"\r\n--", Boundary/binary>>]),
- atts_to_mp(Atts, Boundary, WriteFun, SendEncodedAtts);
- false ->
- WriteFun(JsonBytes)
- end.
-
-atts_to_mp([], _Boundary, WriteFun, _SendEncAtts) ->
- WriteFun(<<"--">>);
-atts_to_mp([#att{data=stub} | RestAtts], Boundary, WriteFun,
- SendEncodedAtts) ->
- atts_to_mp(RestAtts, Boundary, WriteFun, SendEncodedAtts);
-atts_to_mp([Att | RestAtts], Boundary, WriteFun,
- SendEncodedAtts) ->
- WriteFun(<<"\r\n\r\n">>),
- AttFun = case SendEncodedAtts of
- false ->
- fun att_foldl_decode/3;
- true ->
- fun att_foldl/3
- end,
- AttFun(Att, fun(Data, _) -> WriteFun(Data) end, ok),
- WriteFun(<<"\r\n--", Boundary/binary>>),
- atts_to_mp(RestAtts, Boundary, WriteFun, SendEncodedAtts).
-
-
-doc_from_multi_part_stream(ContentType, DataFun) ->
- Self = self(),
- Parser = spawn_link(fun() ->
- couch_httpd:parse_multipart_request(ContentType, DataFun,
- fun(Next)-> mp_parse_doc(Next, []) end),
- unlink(Self)
- end),
- Parser ! {get_doc_bytes, self()},
- receive
- {doc_bytes, DocBytes} ->
- erlang:put(mochiweb_request_recv, true),
- Doc = from_json_obj(?JSON_DECODE(DocBytes)),
- % go through the attachments looking for 'follows' in the data,
- % replace with function that reads the data from MIME stream.
- ReadAttachmentDataFun = fun() ->
- Parser ! {get_bytes, self()},
- receive {bytes, Bytes} -> Bytes end
- end,
- Atts2 = lists:map(
- fun(#att{data=follows}=A) ->
- A#att{data=ReadAttachmentDataFun};
- (A) ->
- A
- end, Doc#doc.atts),
- {ok, Doc#doc{atts=Atts2}}
- end.
-
-mp_parse_doc({headers, H}, []) ->
- case couch_util:get_value("content-type", H) of
- {"application/json", _} ->
- fun (Next) ->
- mp_parse_doc(Next, [])
- end
- end;
-mp_parse_doc({body, Bytes}, AccBytes) ->
- fun (Next) ->
- mp_parse_doc(Next, [Bytes | AccBytes])
- end;
-mp_parse_doc(body_end, AccBytes) ->
- receive {get_doc_bytes, From} ->
- From ! {doc_bytes, lists:reverse(AccBytes)}
- end,
- fun (Next) ->
- mp_parse_atts(Next)
- end.
-
-mp_parse_atts(eof) ->
- ok;
-mp_parse_atts({headers, _H}) ->
- fun (Next) ->
- mp_parse_atts(Next)
- end;
-mp_parse_atts({body, Bytes}) ->
- receive {get_bytes, From} ->
- From ! {bytes, Bytes}
- end,
- fun (Next) ->
- mp_parse_atts(Next)
- end;
-mp_parse_atts(body_end) ->
- fun (Next) ->
- mp_parse_atts(Next)
- end.
-
-
diff --git a/1.1.x/src/couchdb/couch_event_sup.erl b/1.1.x/src/couchdb/couch_event_sup.erl
deleted file mode 100644
index 07c48790..00000000
--- a/1.1.x/src/couchdb/couch_event_sup.erl
+++ /dev/null
@@ -1,73 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%% The purpose of this module is to allow event handlers to particpate in Erlang
-%% supervisor trees. It provide a monitorable process that crashes if the event
-%% handler fails. The process, when shutdown, deregisters the event handler.
-
--module(couch_event_sup).
--behaviour(gen_server).
-
--include("couch_db.hrl").
-
--export([start_link/3,start_link/4, stop/1]).
--export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2,code_change/3]).
-
-%
-% Instead calling the
-% ok = gen_event:add_sup_handler(error_logger, my_log, Args)
-%
-% do this:
-% {ok, LinkedPid} = couch_event_sup:start_link(error_logger, my_log, Args)
-%
-% The benefit is the event is now part of the process tree, and can be
-% started, restarted and shutdown consistently like the rest of the server
-% components.
-%
-% And now if the "event" crashes, the supervisor is notified and can restart
-% the event handler.
-%
-% Use this form to named process:
-% {ok, LinkedPid} = couch_event_sup:start_link({local, my_log}, error_logger, my_log, Args)
-%
-
-start_link(EventMgr, EventHandler, Args) ->
- gen_server:start_link(couch_event_sup, {EventMgr, EventHandler, Args}, []).
-
-start_link(ServerName, EventMgr, EventHandler, Args) ->
- gen_server:start_link(ServerName, couch_event_sup, {EventMgr, EventHandler, Args}, []).
-
-stop(Pid) ->
- gen_server:cast(Pid, stop).
-
-init({EventMgr, EventHandler, Args}) ->
- case gen_event:add_sup_handler(EventMgr, EventHandler, Args) of
- ok ->
- {ok, {EventMgr, EventHandler}};
- {stop, Error} ->
- {stop, Error}
- end.
-
-terminate(_Reason, _State) ->
- ok.
-
-handle_call(_Whatever, _From, State) ->
- {ok, State}.
-
-handle_cast(stop, State) ->
- {stop, normal, State}.
-
-handle_info({gen_event_EXIT, _Handler, Reason}, State) ->
- {stop, Reason, State}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
diff --git a/1.1.x/src/couchdb/couch_external_manager.erl b/1.1.x/src/couchdb/couch_external_manager.erl
deleted file mode 100644
index 7e401389..00000000
--- a/1.1.x/src/couchdb/couch_external_manager.erl
+++ /dev/null
@@ -1,101 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_external_manager).
--behaviour(gen_server).
-
--export([start_link/0, execute/2, config_change/2]).
--export([init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2, handle_info/2]).
-
--include("couch_db.hrl").
-
-start_link() ->
- gen_server:start_link({local, couch_external_manager},
- couch_external_manager, [], []).
-
-execute(UrlName, JsonReq) ->
- Pid = gen_server:call(couch_external_manager, {get, UrlName}),
- case Pid of
- {error, Reason} ->
- Reason;
- _ ->
- couch_external_server:execute(Pid, JsonReq)
- end.
-
-config_change("external", UrlName) ->
- gen_server:call(couch_external_manager, {config, UrlName}).
-
-% gen_server API
-
-init([]) ->
- process_flag(trap_exit, true),
- Handlers = ets:new(couch_external_manager_handlers, [set, private]),
- couch_config:register(fun config_change/2),
- {ok, Handlers}.
-
-terminate(_Reason, Handlers) ->
- ets:foldl(fun({_UrlName, Pid}, nil) ->
- couch_external_server:stop(Pid),
- nil
- end, nil, Handlers),
- ok.
-
-handle_call({get, UrlName}, _From, Handlers) ->
- case ets:lookup(Handlers, UrlName) of
- [] ->
- case couch_config:get("external", UrlName, nil) of
- nil ->
- Msg = lists:flatten(
- io_lib:format("No server configured for ~p.", [UrlName])),
- {reply, {error, {unknown_external_server, ?l2b(Msg)}}, Handlers};
- Command ->
- {ok, NewPid} = couch_external_server:start_link(UrlName, Command),
- true = ets:insert(Handlers, {UrlName, NewPid}),
- {reply, NewPid, Handlers}
- end;
- [{UrlName, Pid}] ->
- {reply, Pid, Handlers}
- end;
-handle_call({config, UrlName}, _From, Handlers) ->
- % A newly added handler and a handler that had it's command
- % changed are treated exactly the same.
-
- % Shutdown the old handler.
- case ets:lookup(Handlers, UrlName) of
- [{UrlName, Pid}] ->
- couch_external_server:stop(Pid);
- [] ->
- ok
- end,
- % Wait for next request to boot the handler.
- {reply, ok, Handlers}.
-
-handle_cast(_Whatever, State) ->
- {noreply, State}.
-
-handle_info({'EXIT', Pid, normal}, Handlers) ->
- ?LOG_INFO("EXTERNAL: Server ~p terminated normally", [Pid]),
- % The process terminated normally without us asking - Remove Pid from the
- % handlers table so we don't attempt to reuse it
- ets:match_delete(Handlers, {'_', Pid}),
- {noreply, Handlers};
-
-handle_info({'EXIT', Pid, Reason}, Handlers) ->
- ?LOG_INFO("EXTERNAL: Server ~p died. (reason: ~p)", [Pid, Reason]),
- % Remove Pid from the handlers table so we don't try closing
- % it a second time in terminate/2.
- ets:match_delete(Handlers, {'_', Pid}),
- {stop, normal, Handlers}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
diff --git a/1.1.x/src/couchdb/couch_external_server.erl b/1.1.x/src/couchdb/couch_external_server.erl
deleted file mode 100644
index 045fcee9..00000000
--- a/1.1.x/src/couchdb/couch_external_server.erl
+++ /dev/null
@@ -1,69 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_external_server).
--behaviour(gen_server).
-
--export([start_link/2, stop/1, execute/2]).
--export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, code_change/3]).
-
--include("couch_db.hrl").
-
-% External API
-
-start_link(Name, Command) ->
- gen_server:start_link(couch_external_server, [Name, Command], []).
-
-stop(Pid) ->
- gen_server:cast(Pid, stop).
-
-execute(Pid, JsonReq) ->
- gen_server:call(Pid, {execute, JsonReq}, infinity).
-
-% Gen Server Handlers
-
-init([Name, Command]) ->
- ?LOG_INFO("EXTERNAL: Starting process for: ~s", [Name]),
- ?LOG_INFO("COMMAND: ~s", [Command]),
- process_flag(trap_exit, true),
- Timeout = list_to_integer(couch_config:get("couchdb", "os_process_timeout",
- "5000")),
- {ok, Pid} = couch_os_process:start_link(Command, [{timeout, Timeout}]),
- couch_config:register(fun("couchdb", "os_process_timeout", NewTimeout) ->
- couch_os_process:set_timeout(Pid, list_to_integer(NewTimeout))
- end),
- {ok, {Name, Command, Pid}}.
-
-terminate(_Reason, {_Name, _Command, Pid}) ->
- couch_os_process:stop(Pid),
- ok.
-
-handle_call({execute, JsonReq}, _From, {Name, Command, Pid}) ->
- {reply, couch_os_process:prompt(Pid, JsonReq), {Name, Command, Pid}}.
-
-handle_info({'EXIT', _Pid, normal}, State) ->
- {noreply, State};
-handle_info({'EXIT', Pid, Reason}, {Name, Command, Pid}) ->
- ?LOG_INFO("EXTERNAL: Process for ~s exiting. (reason: ~w)", [Name, Reason]),
- {stop, Reason, {Name, Command, Pid}}.
-
-handle_cast(stop, {Name, Command, Pid}) ->
- ?LOG_INFO("EXTERNAL: Shutting down ~s", [Name]),
- exit(Pid, normal),
- {stop, normal, {Name, Command, Pid}};
-handle_cast(_Whatever, State) ->
- {noreply, State}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
diff --git a/1.1.x/src/couchdb/couch_file.erl b/1.1.x/src/couchdb/couch_file.erl
deleted file mode 100644
index 7b677034..00000000
--- a/1.1.x/src/couchdb/couch_file.erl
+++ /dev/null
@@ -1,614 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_file).
--behaviour(gen_server).
-
--include("couch_db.hrl").
-
--define(SIZE_BLOCK, 4096).
-
--record(file, {
- fd,
- tail_append_begin = 0, % 09 UPGRADE CODE
- eof = 0
- }).
-
--export([open/1, open/2, close/1, bytes/1, sync/1, append_binary/2,old_pread/3]).
--export([append_term/2, pread_term/2, pread_iolist/2, write_header/2]).
--export([pread_binary/2, read_header/1, truncate/2, upgrade_old_header/2]).
--export([append_term_md5/2,append_binary_md5/2]).
--export([init/1, terminate/2, handle_call/3, handle_cast/2, code_change/3, handle_info/2]).
--export([delete/2,delete/3,init_delete_dir/1]).
-
-%%----------------------------------------------------------------------
-%% Args: Valid Options are [create] and [create,overwrite].
-%% Files are opened in read/write mode.
-%% Returns: On success, {ok, Fd}
-%% or {error, Reason} if the file could not be opened.
-%%----------------------------------------------------------------------
-
-open(Filepath) ->
- open(Filepath, []).
-
-open(Filepath, Options) ->
- case gen_server:start_link(couch_file,
- {Filepath, Options, self(), Ref = make_ref()}, []) of
- {ok, Fd} ->
- {ok, Fd};
- ignore ->
- % get the error
- receive
- {Ref, Pid, Error} ->
- case process_info(self(), trap_exit) of
- {trap_exit, true} -> receive {'EXIT', Pid, _} -> ok end;
- {trap_exit, false} -> ok
- end,
- case Error of
- {error, eacces} -> {file_permission_error, Filepath};
- _ -> Error
- end
- end;
- Error ->
- Error
- end.
-
-
-%%----------------------------------------------------------------------
-%% Purpose: To append an Erlang term to the end of the file.
-%% Args: Erlang term to serialize and append to the file.
-%% Returns: {ok, Pos} where Pos is the file offset to the beginning the
-%% serialized term. Use pread_term to read the term back.
-%% or {error, Reason}.
-%%----------------------------------------------------------------------
-
-append_term(Fd, Term) ->
- append_binary(Fd, term_to_binary(Term)).
-
-append_term_md5(Fd, Term) ->
- append_binary_md5(Fd, term_to_binary(Term)).
-
-
-%%----------------------------------------------------------------------
-%% Purpose: To append an Erlang binary to the end of the file.
-%% Args: Erlang term to serialize and append to the file.
-%% Returns: {ok, Pos} where Pos is the file offset to the beginning the
-%% serialized term. Use pread_term to read the term back.
-%% or {error, Reason}.
-%%----------------------------------------------------------------------
-
-append_binary(Fd, Bin) ->
- Size = iolist_size(Bin),
- gen_server:call(Fd, {append_bin,
- [<<0:1/integer,Size:31/integer>>, Bin]}, infinity).
-
-append_binary_md5(Fd, Bin) ->
- Size = iolist_size(Bin),
- gen_server:call(Fd, {append_bin,
- [<<1:1/integer,Size:31/integer>>, couch_util:md5(Bin), Bin]}, infinity).
-
-
-%%----------------------------------------------------------------------
-%% Purpose: Reads a term from a file that was written with append_term
-%% Args: Pos, the offset into the file where the term is serialized.
-%% Returns: {ok, Term}
-%% or {error, Reason}.
-%%----------------------------------------------------------------------
-
-
-pread_term(Fd, Pos) ->
- {ok, Bin} = pread_binary(Fd, Pos),
- {ok, binary_to_term(Bin)}.
-
-
-%%----------------------------------------------------------------------
-%% Purpose: Reads a binrary from a file that was written with append_binary
-%% Args: Pos, the offset into the file where the term is serialized.
-%% Returns: {ok, Term}
-%% or {error, Reason}.
-%%----------------------------------------------------------------------
-
-pread_binary(Fd, Pos) ->
- {ok, L} = pread_iolist(Fd, Pos),
- {ok, iolist_to_binary(L)}.
-
-
-pread_iolist(Fd, Pos) ->
- case gen_server:call(Fd, {pread_iolist, Pos}, infinity) of
- {ok, IoList, <<>>} ->
- {ok, IoList};
- {ok, IoList, Md5} ->
- case couch_util:md5(IoList) of
- Md5 ->
- {ok, IoList};
- _ ->
- exit({file_corruption, <<"file corruption">>})
- end;
- Error ->
- Error
- end.
-
-%%----------------------------------------------------------------------
-%% Purpose: The length of a file, in bytes.
-%% Returns: {ok, Bytes}
-%% or {error, Reason}.
-%%----------------------------------------------------------------------
-
-% length in bytes
-bytes(Fd) ->
- gen_server:call(Fd, bytes, infinity).
-
-%%----------------------------------------------------------------------
-%% Purpose: Truncate a file to the number of bytes.
-%% Returns: ok
-%% or {error, Reason}.
-%%----------------------------------------------------------------------
-
-truncate(Fd, Pos) ->
- gen_server:call(Fd, {truncate, Pos}, infinity).
-
-%%----------------------------------------------------------------------
-%% Purpose: Ensure all bytes written to the file are flushed to disk.
-%% Returns: ok
-%% or {error, Reason}.
-%%----------------------------------------------------------------------
-
-sync(Filepath) when is_list(Filepath) ->
- {ok, Fd} = file:open(Filepath, [append, raw]),
- try file:sync(Fd) after file:close(Fd) end;
-sync(Fd) ->
- gen_server:call(Fd, sync, infinity).
-
-%%----------------------------------------------------------------------
-%% Purpose: Close the file.
-%% Returns: ok
-%%----------------------------------------------------------------------
-close(Fd) ->
- couch_util:shutdown_sync(Fd).
-
-
-delete(RootDir, Filepath) ->
- delete(RootDir, Filepath, true).
-
-
-delete(RootDir, Filepath, Async) ->
- DelFile = filename:join([RootDir,".delete", ?b2l(couch_uuids:random())]),
- case file:rename(Filepath, DelFile) of
- ok ->
- if (Async) ->
- spawn(file, delete, [DelFile]),
- ok;
- true ->
- file:delete(DelFile)
- end;
- Error ->
- Error
- end.
-
-
-init_delete_dir(RootDir) ->
- Dir = filename:join(RootDir,".delete"),
- % note: ensure_dir requires an actual filename companent, which is the
- % reason for "foo".
- filelib:ensure_dir(filename:join(Dir,"foo")),
- filelib:fold_files(Dir, ".*", true,
- fun(Filename, _) ->
- ok = file:delete(Filename)
- end, ok).
-
-
-% 09 UPGRADE CODE
-old_pread(Fd, Pos, Len) ->
- {ok, <<RawBin:Len/binary>>, false} = gen_server:call(Fd, {pread, Pos, Len}, infinity),
- {ok, RawBin}.
-
-% 09 UPGRADE CODE
-upgrade_old_header(Fd, Sig) ->
- gen_server:call(Fd, {upgrade_old_header, Sig}, infinity).
-
-
-read_header(Fd) ->
- case gen_server:call(Fd, find_header, infinity) of
- {ok, Bin} ->
- {ok, binary_to_term(Bin)};
- Else ->
- Else
- end.
-
-write_header(Fd, Data) ->
- Bin = term_to_binary(Data),
- Md5 = couch_util:md5(Bin),
- % now we assemble the final header binary and write to disk
- FinalBin = <<Md5/binary, Bin/binary>>,
- gen_server:call(Fd, {write_header, FinalBin}, infinity).
-
-
-
-
-init_status_error(ReturnPid, Ref, Error) ->
- ReturnPid ! {Ref, self(), Error},
- ignore.
-
-% server functions
-
-init({Filepath, Options, ReturnPid, Ref}) ->
- process_flag(trap_exit, true),
- case lists:member(create, Options) of
- true ->
- filelib:ensure_dir(Filepath),
- case file:open(Filepath, [read, append, raw, binary]) of
- {ok, Fd} ->
- {ok, Length} = file:position(Fd, eof),
- case Length > 0 of
- true ->
- % this means the file already exists and has data.
- % FYI: We don't differentiate between empty files and non-existant
- % files here.
- case lists:member(overwrite, Options) of
- true ->
- {ok, 0} = file:position(Fd, 0),
- ok = file:truncate(Fd),
- ok = file:sync(Fd),
- maybe_track_open_os_files(Options),
- {ok, #file{fd=Fd}};
- false ->
- ok = file:close(Fd),
- init_status_error(ReturnPid, Ref, file_exists)
- end;
- false ->
- maybe_track_open_os_files(Options),
- {ok, #file{fd=Fd}}
- end;
- Error ->
- init_status_error(ReturnPid, Ref, Error)
- end;
- false ->
- % open in read mode first, so we don't create the file if it doesn't exist.
- case file:open(Filepath, [read, raw]) of
- {ok, Fd_Read} ->
- {ok, Fd} = file:open(Filepath, [read, append, raw, binary]),
- ok = file:close(Fd_Read),
- maybe_track_open_os_files(Options),
- {ok, Length} = file:position(Fd, eof),
- {ok, #file{fd=Fd, eof=Length}};
- Error ->
- init_status_error(ReturnPid, Ref, Error)
- end
- end.
-
-maybe_track_open_os_files(FileOptions) ->
- case lists:member(sys_db, FileOptions) of
- true ->
- ok;
- false ->
- couch_stats_collector:track_process_count({couchdb, open_os_files})
- end.
-
-terminate(_Reason, #file{fd = Fd}) ->
- ok = file:close(Fd).
-
-
-handle_call({pread_iolist, Pos}, _From, File) ->
- {RawData, NextPos} = try
- % up to 8Kbs of read ahead
- read_raw_iolist_int(File, Pos, 2 * ?SIZE_BLOCK - (Pos rem ?SIZE_BLOCK))
- catch
- _:_ ->
- read_raw_iolist_int(File, Pos, 4)
- end,
- <<Prefix:1/integer, Len:31/integer, RestRawData/binary>> =
- iolist_to_binary(RawData),
- case Prefix of
- 1 ->
- {Md5, IoList} = extract_md5(
- maybe_read_more_iolist(RestRawData, 16 + Len, NextPos, File)),
- {reply, {ok, IoList, Md5}, File};
- 0 ->
- IoList = maybe_read_more_iolist(RestRawData, Len, NextPos, File),
- {reply, {ok, IoList, <<>>}, File}
- end;
-handle_call({pread, Pos, Bytes}, _From, #file{fd=Fd,tail_append_begin=TailAppendBegin}=File) ->
- {ok, Bin} = file:pread(Fd, Pos, Bytes),
- {reply, {ok, Bin, Pos >= TailAppendBegin}, File};
-handle_call(bytes, _From, #file{eof=Length}=File) ->
- {reply, {ok, Length}, File};
-handle_call(sync, _From, #file{fd=Fd}=File) ->
- {reply, file:sync(Fd), File};
-handle_call({truncate, Pos}, _From, #file{fd=Fd}=File) ->
- {ok, Pos} = file:position(Fd, Pos),
- case file:truncate(Fd) of
- ok ->
- {reply, ok, File#file{eof=Pos}};
- Error ->
- {reply, Error, File}
- end;
-handle_call({append_bin, Bin}, _From, #file{fd=Fd, eof=Pos}=File) ->
- Blocks = make_blocks(Pos rem ?SIZE_BLOCK, Bin),
- case file:write(Fd, Blocks) of
- ok ->
- {reply, {ok, Pos}, File#file{eof=Pos+iolist_size(Blocks)}};
- Error ->
- {reply, Error, File}
- end;
-handle_call({write_header, Bin}, _From, #file{fd=Fd, eof=Pos}=File) ->
- BinSize = size(Bin),
- case Pos rem ?SIZE_BLOCK of
- 0 ->
- Padding = <<>>;
- BlockOffset ->
- Padding = <<0:(8*(?SIZE_BLOCK-BlockOffset))>>
- end,
- FinalBin = [Padding, <<1, BinSize:32/integer>> | make_blocks(5, [Bin])],
- case file:write(Fd, FinalBin) of
- ok ->
- {reply, ok, File#file{eof=Pos+iolist_size(FinalBin)}};
- Error ->
- {reply, Error, File}
- end;
-
-
-handle_call({upgrade_old_header, Prefix}, _From, #file{fd=Fd}=File) ->
- case (catch read_old_header(Fd, Prefix)) of
- {ok, Header} ->
- TailAppendBegin = File#file.eof,
- Bin = term_to_binary(Header),
- Md5 = couch_util:md5(Bin),
- % now we assemble the final header binary and write to disk
- FinalBin = <<Md5/binary, Bin/binary>>,
- {reply, ok, _} = handle_call({write_header, FinalBin}, ok, File),
- ok = write_old_header(Fd, <<"upgraded">>, TailAppendBegin),
- {reply, ok, File#file{tail_append_begin=TailAppendBegin}};
- _Error ->
- case (catch read_old_header(Fd, <<"upgraded">>)) of
- {ok, TailAppendBegin} ->
- {reply, ok, File#file{tail_append_begin = TailAppendBegin}};
- _Error2 ->
- {reply, ok, File}
- end
- end;
-
-
-handle_call(find_header, _From, #file{fd=Fd, eof=Pos}=File) ->
- {reply, find_header(Fd, Pos div ?SIZE_BLOCK), File}.
-
-% 09 UPGRADE CODE
--define(HEADER_SIZE, 2048). % size of each segment of the doubly written header
-
-% 09 UPGRADE CODE
-read_old_header(Fd, Prefix) ->
- {ok, Bin} = file:pread(Fd, 0, 2*(?HEADER_SIZE)),
- <<Bin1:(?HEADER_SIZE)/binary, Bin2:(?HEADER_SIZE)/binary>> = Bin,
- Result =
- % read the first header
- case extract_header(Prefix, Bin1) of
- {ok, Header1} ->
- case extract_header(Prefix, Bin2) of
- {ok, Header2} ->
- case Header1 == Header2 of
- true ->
- % Everything is completely normal!
- {ok, Header1};
- false ->
- % To get here we must have two different header versions with signatures intact.
- % It's weird but possible (a commit failure right at the 2k boundary). Log it and take the first.
- ?LOG_INFO("Header version differences.~nPrimary Header: ~p~nSecondary Header: ~p", [Header1, Header2]),
- {ok, Header1}
- end;
- Error ->
- % error reading second header. It's ok, but log it.
- ?LOG_INFO("Secondary header corruption (error: ~p). Using primary header.", [Error]),
- {ok, Header1}
- end;
- Error ->
- % error reading primary header
- case extract_header(Prefix, Bin2) of
- {ok, Header2} ->
- % log corrupt primary header. It's ok since the secondary is still good.
- ?LOG_INFO("Primary header corruption (error: ~p). Using secondary header.", [Error]),
- {ok, Header2};
- _ ->
- % error reading secondary header too
- % return the error, no need to log anything as the caller will be responsible for dealing with the error.
- Error
- end
- end,
- case Result of
- {ok, {pointer_to_header_data, Ptr}} ->
- pread_term(Fd, Ptr);
- _ ->
- Result
- end.
-
-% 09 UPGRADE CODE
-extract_header(Prefix, Bin) ->
- SizeOfPrefix = size(Prefix),
- SizeOfTermBin = ?HEADER_SIZE -
- SizeOfPrefix -
- 16, % md5 sig
-
- <<HeaderPrefix:SizeOfPrefix/binary, TermBin:SizeOfTermBin/binary, Sig:16/binary>> = Bin,
-
- % check the header prefix
- case HeaderPrefix of
- Prefix ->
- % check the integrity signature
- case couch_util:md5(TermBin) == Sig of
- true ->
- Header = binary_to_term(TermBin),
- {ok, Header};
- false ->
- header_corrupt
- end;
- _ ->
- unknown_header_type
- end.
-
-
-% 09 UPGRADE CODE
-write_old_header(Fd, Prefix, Data) ->
- TermBin = term_to_binary(Data),
- % the size of all the bytes written to the header, including the md5 signature (16 bytes)
- FilledSize = byte_size(Prefix) + byte_size(TermBin) + 16,
- {TermBin2, FilledSize2} =
- case FilledSize > ?HEADER_SIZE of
- true ->
- % too big!
- {ok, Pos} = append_binary(Fd, TermBin),
- PtrBin = term_to_binary({pointer_to_header_data, Pos}),
- {PtrBin, byte_size(Prefix) + byte_size(PtrBin) + 16};
- false ->
- {TermBin, FilledSize}
- end,
- ok = file:sync(Fd),
- % pad out the header with zeros, then take the md5 hash
- PadZeros = <<0:(8*(?HEADER_SIZE - FilledSize2))>>,
- Sig = couch_util:md5([TermBin2, PadZeros]),
- % now we assemble the final header binary and write to disk
- WriteBin = <<Prefix/binary, TermBin2/binary, PadZeros/binary, Sig/binary>>,
- ?HEADER_SIZE = size(WriteBin), % sanity check
- DblWriteBin = [WriteBin, WriteBin],
- ok = file:pwrite(Fd, 0, DblWriteBin),
- ok = file:sync(Fd).
-
-
-handle_cast(close, Fd) ->
- {stop,normal,Fd}.
-
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-handle_info({'EXIT', _, normal}, Fd) ->
- {noreply, Fd};
-handle_info({'EXIT', _, Reason}, Fd) ->
- {stop, Reason, Fd}.
-
-
-find_header(_Fd, -1) ->
- no_valid_header;
-find_header(Fd, Block) ->
- case (catch load_header(Fd, Block)) of
- {ok, Bin} ->
- {ok, Bin};
- _Error ->
- find_header(Fd, Block -1)
- end.
-
-load_header(Fd, Block) ->
- {ok, <<1, HeaderLen:32/integer, RestBlock/binary>>} =
- file:pread(Fd, Block * ?SIZE_BLOCK, ?SIZE_BLOCK),
- TotalBytes = calculate_total_read_len(1, HeaderLen),
- case TotalBytes > byte_size(RestBlock) of
- false ->
- <<RawBin:TotalBytes/binary, _/binary>> = RestBlock;
- true ->
- {ok, Missing} = file:pread(
- Fd, (Block * ?SIZE_BLOCK) + 5 + byte_size(RestBlock),
- TotalBytes - byte_size(RestBlock)),
- RawBin = <<RestBlock/binary, Missing/binary>>
- end,
- <<Md5Sig:16/binary, HeaderBin/binary>> =
- iolist_to_binary(remove_block_prefixes(1, RawBin)),
- Md5Sig = couch_util:md5(HeaderBin),
- {ok, HeaderBin}.
-
-maybe_read_more_iolist(Buffer, DataSize, _, _)
- when DataSize =< byte_size(Buffer) ->
- <<Data:DataSize/binary, _/binary>> = Buffer,
- [Data];
-maybe_read_more_iolist(Buffer, DataSize, NextPos, File) ->
- {Missing, _} =
- read_raw_iolist_int(File, NextPos, DataSize - byte_size(Buffer)),
- [Buffer, Missing].
-
--spec read_raw_iolist_int(#file{}, Pos::non_neg_integer(), Len::non_neg_integer()) ->
- {Data::iolist(), CurPos::non_neg_integer()}.
-read_raw_iolist_int(Fd, {Pos, _Size}, Len) -> % 0110 UPGRADE CODE
- read_raw_iolist_int(Fd, Pos, Len);
-read_raw_iolist_int(#file{fd=Fd, tail_append_begin=TAB}, Pos, Len) ->
- BlockOffset = Pos rem ?SIZE_BLOCK,
- TotalBytes = calculate_total_read_len(BlockOffset, Len),
- {ok, <<RawBin:TotalBytes/binary>>} = file:pread(Fd, Pos, TotalBytes),
- if Pos >= TAB ->
- {remove_block_prefixes(BlockOffset, RawBin), Pos + TotalBytes};
- true ->
- % 09 UPGRADE CODE
- <<ReturnBin:Len/binary, _/binary>> = RawBin,
- {[ReturnBin], Pos + Len}
- end.
-
--spec extract_md5(iolist()) -> {binary(), iolist()}.
-extract_md5(FullIoList) ->
- {Md5List, IoList} = split_iolist(FullIoList, 16, []),
- {iolist_to_binary(Md5List), IoList}.
-
-calculate_total_read_len(0, FinalLen) ->
- calculate_total_read_len(1, FinalLen) + 1;
-calculate_total_read_len(BlockOffset, FinalLen) ->
- case ?SIZE_BLOCK - BlockOffset of
- BlockLeft when BlockLeft >= FinalLen ->
- FinalLen;
- BlockLeft ->
- FinalLen + ((FinalLen - BlockLeft) div (?SIZE_BLOCK -1)) +
- if ((FinalLen - BlockLeft) rem (?SIZE_BLOCK -1)) =:= 0 -> 0;
- true -> 1 end
- end.
-
-remove_block_prefixes(_BlockOffset, <<>>) ->
- [];
-remove_block_prefixes(0, <<_BlockPrefix,Rest/binary>>) ->
- remove_block_prefixes(1, Rest);
-remove_block_prefixes(BlockOffset, Bin) ->
- BlockBytesAvailable = ?SIZE_BLOCK - BlockOffset,
- case size(Bin) of
- Size when Size > BlockBytesAvailable ->
- <<DataBlock:BlockBytesAvailable/binary,Rest/binary>> = Bin,
- [DataBlock | remove_block_prefixes(0, Rest)];
- _Size ->
- [Bin]
- end.
-
-make_blocks(_BlockOffset, []) ->
- [];
-make_blocks(0, IoList) ->
- [<<0>> | make_blocks(1, IoList)];
-make_blocks(BlockOffset, IoList) ->
- case split_iolist(IoList, (?SIZE_BLOCK - BlockOffset), []) of
- {Begin, End} ->
- [Begin | make_blocks(0, End)];
- _SplitRemaining ->
- IoList
- end.
-
-%% @doc Returns a tuple where the first element contains the leading SplitAt
-%% bytes of the original iolist, and the 2nd element is the tail. If SplitAt
-%% is larger than byte_size(IoList), return the difference.
--spec split_iolist(IoList::iolist(), SplitAt::non_neg_integer(), Acc::list()) ->
- {iolist(), iolist()} | non_neg_integer().
-split_iolist(List, 0, BeginAcc) ->
- {lists:reverse(BeginAcc), List};
-split_iolist([], SplitAt, _BeginAcc) ->
- SplitAt;
-split_iolist([<<Bin/binary>> | Rest], SplitAt, BeginAcc) when SplitAt > byte_size(Bin) ->
- split_iolist(Rest, SplitAt - byte_size(Bin), [Bin | BeginAcc]);
-split_iolist([<<Bin/binary>> | Rest], SplitAt, BeginAcc) ->
- <<Begin:SplitAt/binary,End/binary>> = Bin,
- split_iolist([End | Rest], 0, [Begin | BeginAcc]);
-split_iolist([Sublist| Rest], SplitAt, BeginAcc) when is_list(Sublist) ->
- case split_iolist(Sublist, SplitAt, BeginAcc) of
- {Begin, End} ->
- {Begin, [End | Rest]};
- SplitRemaining ->
- split_iolist(Rest, SplitAt - (SplitAt - SplitRemaining), [Sublist | BeginAcc])
- end;
-split_iolist([Byte | Rest], SplitAt, BeginAcc) when is_integer(Byte) ->
- split_iolist(Rest, SplitAt - 1, [Byte | BeginAcc]).
diff --git a/1.1.x/src/couchdb/couch_httpd.erl b/1.1.x/src/couchdb/couch_httpd.erl
deleted file mode 100644
index 73d214e8..00000000
--- a/1.1.x/src/couchdb/couch_httpd.erl
+++ /dev/null
@@ -1,997 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd).
--include("couch_db.hrl").
-
--export([start_link/0, start_link/1, stop/0, handle_request/5]).
-
--export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,qs_json_value/3]).
--export([path/1,absolute_uri/2,body_length/1]).
--export([verify_is_server_admin/1,unquote/1,quote/1,recv/2,recv_chunked/4,error_info/1]).
--export([make_fun_spec_strs/1]).
--export([make_arity_1_fun/1, make_arity_2_fun/1, make_arity_3_fun/1]).
--export([parse_form/1,json_body/1,json_body_obj/1,body/1,doc_etag/1, make_etag/1, etag_respond/3]).
--export([primary_header_value/2,partition/1,serve_file/3,serve_file/4, server_header/0]).
--export([start_chunked_response/3,send_chunk/2,log_request/2]).
--export([start_response_length/4, start_response/3, send/2]).
--export([start_json_response/2, start_json_response/3, end_json_response/1]).
--export([send_response/4,send_method_not_allowed/2,send_error/4, send_redirect/2,send_chunked_error/2]).
--export([send_json/2,send_json/3,send_json/4,last_chunk/1,parse_multipart_request/3]).
--export([accepted_encodings/1,handle_request_int/5,validate_referer/1,validate_ctype/2]).
-
-start_link() ->
- start_link(http).
-start_link(http) ->
- Port = couch_config:get("httpd", "port", "5984"),
- start_link(?MODULE, [{port, Port}]);
-start_link(https) ->
- Port = couch_config:get("ssl", "port", "6984"),
- CertFile = couch_config:get("ssl", "cert_file", nil),
- KeyFile = couch_config:get("ssl", "key_file", nil),
- Options = case CertFile /= nil andalso KeyFile /= nil of
- true ->
- [{port, Port},
- {ssl, true},
- {ssl_opts, [
- {certfile, CertFile},
- {keyfile, KeyFile}]}];
- false ->
- io:format("SSL enabled but PEM certificates are missing.", []),
- throw({error, missing_certs})
- end,
- start_link(https, Options).
-start_link(Name, Options) ->
- % read config and register for configuration changes
-
- % just stop if one of the config settings change. couch_server_sup
- % will restart us and then we will pick up the new settings.
-
- BindAddress = couch_config:get("httpd", "bind_address", any),
- DefaultSpec = "{couch_httpd_db, handle_request}",
- DefaultFun = make_arity_1_fun(
- couch_config:get("httpd", "default_handler", DefaultSpec)
- ),
-
- UrlHandlersList = lists:map(
- fun({UrlKey, SpecStr}) ->
- {?l2b(UrlKey), make_arity_1_fun(SpecStr)}
- end, couch_config:get("httpd_global_handlers")),
-
- DbUrlHandlersList = lists:map(
- fun({UrlKey, SpecStr}) ->
- {?l2b(UrlKey), make_arity_2_fun(SpecStr)}
- end, couch_config:get("httpd_db_handlers")),
-
- DesignUrlHandlersList = lists:map(
- fun({UrlKey, SpecStr}) ->
- {?l2b(UrlKey), make_arity_3_fun(SpecStr)}
- end, couch_config:get("httpd_design_handlers")),
-
- UrlHandlers = dict:from_list(UrlHandlersList),
- DbUrlHandlers = dict:from_list(DbUrlHandlersList),
- DesignUrlHandlers = dict:from_list(DesignUrlHandlersList),
- {ok, ServerOptions} = couch_util:parse_term(
- couch_config:get("httpd", "server_options", "[]")),
- {ok, SocketOptions} = couch_util:parse_term(
- couch_config:get("httpd", "socket_options", "[]")),
- Loop = fun(Req)->
- case SocketOptions of
- [] ->
- ok;
- _ ->
- ok = mochiweb_socket:setopts(Req:get(socket), SocketOptions)
- end,
- apply(?MODULE, handle_request, [
- Req, DefaultFun, UrlHandlers, DbUrlHandlers, DesignUrlHandlers
- ])
- end,
-
- % and off we go
-
- {ok, Pid} = case mochiweb_http:start(Options ++ ServerOptions ++ [
- {loop, Loop},
- {name, Name},
- {ip, BindAddress}
- ]) of
- {ok, MochiPid} -> {ok, MochiPid};
- {error, Reason} ->
- io:format("Failure to start Mochiweb: ~s~n",[Reason]),
- throw({error, Reason})
- end,
-
- ok = couch_config:register(
- fun("httpd", "bind_address") ->
- ?MODULE:stop();
- ("httpd", "port") ->
- ?MODULE:stop();
- ("httpd", "default_handler") ->
- ?MODULE:stop();
- ("httpd", "server_options") ->
- ?MODULE:stop();
- ("httpd", "socket_options") ->
- ?MODULE:stop();
- ("httpd_global_handlers", _) ->
- ?MODULE:stop();
- ("httpd_db_handlers", _) ->
- ?MODULE:stop();
- ("vhosts", _) ->
- ?MODULE:stop();
- ("ssl", _) ->
- ?MODULE:stop()
- end, Pid),
-
- {ok, Pid}.
-
-% SpecStr is a string like "{my_module, my_fun}"
-% or "{my_module, my_fun, <<"my_arg">>}"
-make_arity_1_fun(SpecStr) ->
- case couch_util:parse_term(SpecStr) of
- {ok, {Mod, Fun, SpecArg}} ->
- fun(Arg) -> Mod:Fun(Arg, SpecArg) end;
- {ok, {Mod, Fun}} ->
- fun(Arg) -> Mod:Fun(Arg) end
- end.
-
-make_arity_2_fun(SpecStr) ->
- case couch_util:parse_term(SpecStr) of
- {ok, {Mod, Fun, SpecArg}} ->
- fun(Arg1, Arg2) -> Mod:Fun(Arg1, Arg2, SpecArg) end;
- {ok, {Mod, Fun}} ->
- fun(Arg1, Arg2) -> Mod:Fun(Arg1, Arg2) end
- end.
-
-make_arity_3_fun(SpecStr) ->
- case couch_util:parse_term(SpecStr) of
- {ok, {Mod, Fun, SpecArg}} ->
- fun(Arg1, Arg2, Arg3) -> Mod:Fun(Arg1, Arg2, Arg3, SpecArg) end;
- {ok, {Mod, Fun}} ->
- fun(Arg1, Arg2, Arg3) -> Mod:Fun(Arg1, Arg2, Arg3) end
- end.
-
-% SpecStr is "{my_module, my_fun}, {my_module2, my_fun2}"
-make_fun_spec_strs(SpecStr) ->
- re:split(SpecStr, "(?<=})\\s*,\\s*(?={)", [{return, list}]).
-
-stop() ->
- mochiweb_http:stop(?MODULE).
-
-
-handle_request(MochiReq, DefaultFun, UrlHandlers, DbUrlHandlers,
- DesignUrlHandlers) ->
-
- MochiReq1 = couch_httpd_vhost:match_vhost(MochiReq),
- handle_request_int(MochiReq1, DefaultFun,
- UrlHandlers, DbUrlHandlers, DesignUrlHandlers).
-
-handle_request_int(MochiReq, DefaultFun,
- UrlHandlers, DbUrlHandlers, DesignUrlHandlers) ->
- Begin = now(),
- AuthenticationSrcs = make_fun_spec_strs(
- couch_config:get("httpd", "authentication_handlers")),
- % for the path, use the raw path with the query string and fragment
- % removed, but URL quoting left intact
- RawUri = MochiReq:get(raw_path),
- {"/" ++ Path, _, _} = mochiweb_util:urlsplit_path(RawUri),
-
- Headers = MochiReq:get(headers),
-
- % get requested path
- RequestedPath = case MochiReq:get_header_value("x-couchdb-vhost-path") of
- undefined -> RawUri;
- P -> P
- end,
-
- HandlerKey =
- case mochiweb_util:partition(Path, "/") of
- {"", "", ""} ->
- <<"/">>; % Special case the root url handler
- {FirstPart, _, _} ->
- list_to_binary(FirstPart)
- end,
- ?LOG_DEBUG("~p ~s ~p from ~p~nHeaders: ~p", [
- MochiReq:get(method),
- RawUri,
- MochiReq:get(version),
- MochiReq:get(peer),
- mochiweb_headers:to_list(MochiReq:get(headers))
- ]),
-
- Method1 =
- case MochiReq:get(method) of
- % already an atom
- Meth when is_atom(Meth) -> Meth;
-
- % Non standard HTTP verbs aren't atoms (COPY, MOVE etc) so convert when
- % possible (if any module references the atom, then it's existing).
- Meth -> couch_util:to_existing_atom(Meth)
- end,
- increment_method_stats(Method1),
-
- % allow broken HTTP clients to fake a full method vocabulary with an X-HTTP-METHOD-OVERRIDE header
- MethodOverride = MochiReq:get_primary_header_value("X-HTTP-Method-Override"),
- Method2 = case lists:member(MethodOverride, ["GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT", "COPY"]) of
- true ->
- ?LOG_INFO("MethodOverride: ~s (real method was ~s)", [MethodOverride, Method1]),
- case Method1 of
- 'POST' -> couch_util:to_existing_atom(MethodOverride);
- _ ->
- % Ignore X-HTTP-Method-Override when the original verb isn't POST.
- % I'd like to send a 406 error to the client, but that'd require a nasty refactor.
- % throw({not_acceptable, <<"X-HTTP-Method-Override may only be used with POST requests.">>})
- Method1
- end;
- _ -> Method1
- end,
-
- % alias HEAD to GET as mochiweb takes care of stripping the body
- Method = case Method2 of
- 'HEAD' -> 'GET';
- Other -> Other
- end,
-
- HttpReq = #httpd{
- mochi_req = MochiReq,
- peer = MochiReq:get(peer),
- method = Method,
- requested_path_parts = [list_to_binary(couch_httpd:unquote(Part))
- || Part <- string:tokens(RequestedPath, "/")],
- path_parts = [list_to_binary(couch_httpd:unquote(Part))
- || Part <- string:tokens(Path, "/")],
- db_url_handlers = DbUrlHandlers,
- design_url_handlers = DesignUrlHandlers,
- default_fun = DefaultFun,
- url_handlers = UrlHandlers
- },
-
- HandlerFun = couch_util:dict_find(HandlerKey, UrlHandlers, DefaultFun),
-
- {ok, Resp} =
- try
- case authenticate_request(HttpReq, AuthenticationSrcs) of
- #httpd{} = Req ->
- HandlerFun(Req);
- Response ->
- Response
- end
- catch
- throw:{http_head_abort, Resp0} ->
- {ok, Resp0};
- throw:{invalid_json, S} ->
- ?LOG_ERROR("attempted upload of invalid JSON (set log_level to debug to log it)", []),
- ?LOG_DEBUG("Invalid JSON: ~p",[S]),
- send_error(HttpReq, {bad_request, io_lib:format("invalid UTF-8 JSON: ~p",[S])});
- throw:unacceptable_encoding ->
- ?LOG_ERROR("unsupported encoding method for the response", []),
- send_error(HttpReq, {not_acceptable, "unsupported encoding"});
- throw:bad_accept_encoding_value ->
- ?LOG_ERROR("received invalid Accept-Encoding header", []),
- send_error(HttpReq, bad_request);
- exit:normal ->
- exit(normal);
- throw:Error ->
- ?LOG_DEBUG("Minor error in HTTP request: ~p",[Error]),
- ?LOG_DEBUG("Stacktrace: ~p",[erlang:get_stacktrace()]),
- send_error(HttpReq, Error);
- error:badarg ->
- ?LOG_ERROR("Badarg error in HTTP request",[]),
- ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]),
- send_error(HttpReq, badarg);
- error:function_clause ->
- ?LOG_ERROR("function_clause error in HTTP request",[]),
- ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]),
- send_error(HttpReq, function_clause);
- Tag:Error ->
- ?LOG_ERROR("Uncaught error in HTTP request: ~p",[{Tag, Error}]),
- ?LOG_INFO("Stacktrace: ~p",[erlang:get_stacktrace()]),
- send_error(HttpReq, Error)
- end,
- RequestTime = round(timer:now_diff(now(), Begin)/1000),
- couch_stats_collector:record({couchdb, request_time}, RequestTime),
- couch_stats_collector:increment({httpd, requests}),
- {ok, Resp}.
-
-% Try authentication handlers in order until one sets a user_ctx
-% the auth funs also have the option of returning a response
-% move this to couch_httpd_auth?
-authenticate_request(#httpd{user_ctx=#user_ctx{}} = Req, _AuthSrcs) ->
- Req;
-authenticate_request(#httpd{} = Req, []) ->
- case couch_config:get("couch_httpd_auth", "require_valid_user", "false") of
- "true" ->
- throw({unauthorized, <<"Authentication required.">>});
- "false" ->
- Req#httpd{user_ctx=#user_ctx{}}
- end;
-authenticate_request(#httpd{} = Req, [AuthSrc|Rest]) ->
- AuthFun = make_arity_1_fun(AuthSrc),
- R = case AuthFun(Req) of
- #httpd{user_ctx=#user_ctx{}=UserCtx}=Req2 ->
- Req2#httpd{user_ctx=UserCtx#user_ctx{handler=?l2b(AuthSrc)}};
- Else -> Else
- end,
- authenticate_request(R, Rest);
-authenticate_request(Response, _AuthSrcs) ->
- Response.
-
-increment_method_stats(Method) ->
- couch_stats_collector:increment({httpd_request_methods, Method}).
-
-validate_referer(Req) ->
- Host = host_for_request(Req),
- Referer = header_value(Req, "Referer", fail),
- case Referer of
- fail ->
- throw({bad_request, <<"Referer header required.">>});
- Referer ->
- {_,RefererHost,_,_,_} = mochiweb_util:urlsplit(Referer),
- if
- RefererHost =:= Host -> ok;
- true -> throw({bad_request, <<"Referer header must match host.">>})
- end
- end.
-
-validate_ctype(Req, Ctype) ->
- case couch_httpd:header_value(Req, "Content-Type") of
- undefined ->
- throw({bad_ctype, "Content-Type must be "++Ctype});
- ReqCtype ->
- % ?LOG_ERROR("Ctype ~p ReqCtype ~p",[Ctype,ReqCtype]),
- case re:split(ReqCtype, ";", [{return, list}]) of
- [Ctype] -> ok;
- [Ctype, _Rest] -> ok;
- _Else ->
- throw({bad_ctype, "Content-Type must be "++Ctype})
- end
- end.
-
-% Utilities
-
-partition(Path) ->
- mochiweb_util:partition(Path, "/").
-
-header_value(#httpd{mochi_req=MochiReq}, Key) ->
- MochiReq:get_header_value(Key).
-
-header_value(#httpd{mochi_req=MochiReq}, Key, Default) ->
- case MochiReq:get_header_value(Key) of
- undefined -> Default;
- Value -> Value
- end.
-
-primary_header_value(#httpd{mochi_req=MochiReq}, Key) ->
- MochiReq:get_primary_header_value(Key).
-
-accepted_encodings(#httpd{mochi_req=MochiReq}) ->
- case MochiReq:accepted_encodings(["gzip", "identity"]) of
- bad_accept_encoding_value ->
- throw(bad_accept_encoding_value);
- [] ->
- throw(unacceptable_encoding);
- EncList ->
- EncList
- end.
-
-serve_file(Req, RelativePath, DocumentRoot) ->
- serve_file(Req, RelativePath, DocumentRoot, []).
-
-serve_file(#httpd{mochi_req=MochiReq}=Req, RelativePath, DocumentRoot, ExtraHeaders) ->
- {ok, MochiReq:serve_file(RelativePath, DocumentRoot,
- server_header() ++ couch_httpd_auth:cookie_auth_header(Req, []) ++ ExtraHeaders)}.
-
-qs_value(Req, Key) ->
- qs_value(Req, Key, undefined).
-
-qs_value(Req, Key, Default) ->
- couch_util:get_value(Key, qs(Req), Default).
-
-qs_json_value(Req, Key, Default) ->
- case qs_value(Req, Key, Default) of
- Default ->
- Default;
- Result ->
- ?JSON_DECODE(Result)
- end.
-
-qs(#httpd{mochi_req=MochiReq}) ->
- MochiReq:parse_qs().
-
-path(#httpd{mochi_req=MochiReq}) ->
- MochiReq:get(path).
-
-host_for_request(#httpd{mochi_req=MochiReq}) ->
- XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"),
- case MochiReq:get_header_value(XHost) of
- undefined ->
- case MochiReq:get_header_value("Host") of
- undefined ->
- {ok, {Address, Port}} = inet:sockname(MochiReq:get(socket)),
- inet_parse:ntoa(Address) ++ ":" ++ integer_to_list(Port);
- Value1 ->
- Value1
- end;
- Value -> Value
- end.
-
-absolute_uri(#httpd{mochi_req=MochiReq}=Req, Path) ->
- Host = host_for_request(Req),
- XSsl = couch_config:get("httpd", "x_forwarded_ssl", "X-Forwarded-Ssl"),
- Scheme = case MochiReq:get_header_value(XSsl) of
- "on" -> "https";
- _ ->
- XProto = couch_config:get("httpd", "x_forwarded_proto", "X-Forwarded-Proto"),
- case MochiReq:get_header_value(XProto) of
- %% Restrict to "https" and "http" schemes only
- "https" -> "https";
- _ -> case MochiReq:get(scheme) of
- https -> "https";
- http -> "http"
- end
- end
- end,
- Scheme ++ "://" ++ Host ++ Path.
-
-unquote(UrlEncodedString) ->
- mochiweb_util:unquote(UrlEncodedString).
-
-quote(UrlDecodedString) ->
- mochiweb_util:quote_plus(UrlDecodedString).
-
-parse_form(#httpd{mochi_req=MochiReq}) ->
- mochiweb_multipart:parse_form(MochiReq).
-
-recv(#httpd{mochi_req=MochiReq}, Len) ->
- MochiReq:recv(Len).
-
-recv_chunked(#httpd{mochi_req=MochiReq}, MaxChunkSize, ChunkFun, InitState) ->
- % Fun is called once with each chunk
- % Fun({Length, Binary}, State)
- % called with Length == 0 on the last time.
- MochiReq:stream_body(MaxChunkSize, ChunkFun, InitState).
-
-body_length(Req) ->
- case header_value(Req, "Transfer-Encoding") of
- undefined ->
- case header_value(Req, "Content-Length") of
- undefined -> undefined;
- Length -> list_to_integer(Length)
- end;
- "chunked" -> chunked;
- Unknown -> {unknown_transfer_encoding, Unknown}
- end.
-
-body(#httpd{mochi_req=MochiReq, req_body=ReqBody}) ->
- case ReqBody of
- undefined ->
- % Maximum size of document PUT request body (4GB)
- MaxSize = list_to_integer(
- couch_config:get("couchdb", "max_document_size", "4294967296")),
- MochiReq:recv_body(MaxSize);
- _Else ->
- ReqBody
- end.
-
-json_body(Httpd) ->
- ?JSON_DECODE(body(Httpd)).
-
-json_body_obj(Httpd) ->
- case json_body(Httpd) of
- {Props} -> {Props};
- _Else ->
- throw({bad_request, "Request body must be a JSON object"})
- end.
-
-
-
-doc_etag(#doc{revs={Start, [DiskRev|_]}}) ->
- "\"" ++ ?b2l(couch_doc:rev_to_str({Start, DiskRev})) ++ "\"".
-
-make_etag(Term) ->
- <<SigInt:128/integer>> = couch_util:md5(term_to_binary(Term)),
- list_to_binary("\"" ++ lists:flatten(io_lib:format("~.36B",[SigInt])) ++ "\"").
-
-etag_match(Req, CurrentEtag) when is_binary(CurrentEtag) ->
- etag_match(Req, binary_to_list(CurrentEtag));
-
-etag_match(Req, CurrentEtag) ->
- EtagsToMatch = string:tokens(
- couch_httpd:header_value(Req, "If-None-Match", ""), ", "),
- lists:member(CurrentEtag, EtagsToMatch).
-
-etag_respond(Req, CurrentEtag, RespFun) ->
- case etag_match(Req, CurrentEtag) of
- true ->
- % the client has this in their cache.
- couch_httpd:send_response(Req, 304, [{"Etag", CurrentEtag}], <<>>);
- false ->
- % Run the function.
- RespFun()
- end.
-
-verify_is_server_admin(#httpd{user_ctx=UserCtx}) ->
- verify_is_server_admin(UserCtx);
-verify_is_server_admin(#user_ctx{roles=Roles}) ->
- case lists:member(<<"_admin">>, Roles) of
- true -> ok;
- false -> throw({unauthorized, <<"You are not a server admin.">>})
- end.
-
-log_request(#httpd{mochi_req=MochiReq,peer=Peer}, Code) ->
- ?LOG_INFO("~s - - ~p ~s ~B", [
- Peer,
- couch_util:to_existing_atom(MochiReq:get(method)),
- MochiReq:get(raw_path),
- couch_util:to_integer(Code)
- ]).
-
-
-start_response_length(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Length) ->
- log_request(Req, Code),
- couch_stats_collector:increment({httpd_status_codes, Code}),
- Resp = MochiReq:start_response_length({Code, Headers ++ server_header() ++ couch_httpd_auth:cookie_auth_header(Req, Headers), Length}),
- case MochiReq:get(method) of
- 'HEAD' -> throw({http_head_abort, Resp});
- _ -> ok
- end,
- {ok, Resp}.
-
-start_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) ->
- log_request(Req, Code),
- couch_stats_collector:increment({httpd_status_cdes, Code}),
- CookieHeader = couch_httpd_auth:cookie_auth_header(Req, Headers),
- Headers2 = Headers ++ server_header() ++ CookieHeader,
- Resp = MochiReq:start_response({Code, Headers2}),
- case MochiReq:get(method) of
- 'HEAD' -> throw({http_head_abort, Resp});
- _ -> ok
- end,
- {ok, Resp}.
-
-send(Resp, Data) ->
- Resp:send(Data),
- {ok, Resp}.
-
-no_resp_conn_header([]) ->
- true;
-no_resp_conn_header([{Hdr, _}|Rest]) ->
- case string:to_lower(Hdr) of
- "connection" -> false;
- _ -> no_resp_conn_header(Rest)
- end.
-
-http_1_0_keep_alive(Req, Headers) ->
- KeepOpen = Req:should_close() == false,
- IsHttp10 = Req:get(version) == {1, 0},
- NoRespHeader = no_resp_conn_header(Headers),
- case KeepOpen andalso IsHttp10 andalso NoRespHeader of
- true -> [{"Connection", "Keep-Alive"} | Headers];
- false -> Headers
- end.
-
-start_chunked_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) ->
- log_request(Req, Code),
- couch_stats_collector:increment({httpd_status_codes, Code}),
- Headers2 = http_1_0_keep_alive(MochiReq, Headers),
- Resp = MochiReq:respond({Code, Headers2 ++ server_header() ++ couch_httpd_auth:cookie_auth_header(Req, Headers2), chunked}),
- case MochiReq:get(method) of
- 'HEAD' -> throw({http_head_abort, Resp});
- _ -> ok
- end,
- {ok, Resp}.
-
-send_chunk(Resp, Data) ->
- case iolist_size(Data) of
- 0 -> ok; % do nothing
- _ -> Resp:write_chunk(Data)
- end,
- {ok, Resp}.
-
-last_chunk(Resp) ->
- Resp:write_chunk([]),
- {ok, Resp}.
-
-send_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Body) ->
- log_request(Req, Code),
- couch_stats_collector:increment({httpd_status_codes, Code}),
- Headers2 = http_1_0_keep_alive(MochiReq, Headers),
- if Code >= 400 ->
- ?LOG_DEBUG("httpd ~p error response:~n ~s", [Code, Body]);
- true -> ok
- end,
- {ok, MochiReq:respond({Code, Headers2 ++ server_header() ++ couch_httpd_auth:cookie_auth_header(Req, Headers2), Body})}.
-
-send_method_not_allowed(Req, Methods) ->
- send_error(Req, 405, [{"Allow", Methods}], <<"method_not_allowed">>, ?l2b("Only " ++ Methods ++ " allowed")).
-
-send_json(Req, Value) ->
- send_json(Req, 200, Value).
-
-send_json(Req, Code, Value) ->
- send_json(Req, Code, [], Value).
-
-send_json(Req, Code, Headers, Value) ->
- DefaultHeaders = [
- {"Content-Type", negotiate_content_type(Req)},
- {"Cache-Control", "must-revalidate"}
- ],
- Body = [start_jsonp(Req), ?JSON_ENCODE(Value), end_jsonp(), $\n],
- send_response(Req, Code, DefaultHeaders ++ Headers, Body).
-
-start_json_response(Req, Code) ->
- start_json_response(Req, Code, []).
-
-start_json_response(Req, Code, Headers) ->
- DefaultHeaders = [
- {"Content-Type", negotiate_content_type(Req)},
- {"Cache-Control", "must-revalidate"}
- ],
- start_jsonp(Req), % Validate before starting chunked.
- %start_chunked_response(Req, Code, DefaultHeaders ++ Headers).
- {ok, Resp} = start_chunked_response(Req, Code, DefaultHeaders ++ Headers),
- case start_jsonp(Req) of
- [] -> ok;
- Start -> send_chunk(Resp, Start)
- end,
- {ok, Resp}.
-
-end_json_response(Resp) ->
- send_chunk(Resp, end_jsonp() ++ [$\n]),
- last_chunk(Resp).
-
-start_jsonp(Req) ->
- case get(jsonp) of
- undefined -> put(jsonp, qs_value(Req, "callback", no_jsonp));
- _ -> ok
- end,
- case get(jsonp) of
- no_jsonp -> [];
- [] -> [];
- CallBack ->
- try
- % make sure jsonp is configured on (default off)
- case couch_config:get("httpd", "allow_jsonp", "false") of
- "true" ->
- validate_callback(CallBack),
- CallBack ++ "(";
- _Else ->
- % this could throw an error message, but instead we just ignore the
- % jsonp parameter
- % throw({bad_request, <<"JSONP must be configured before using.">>})
- put(jsonp, no_jsonp),
- []
- end
- catch
- Error ->
- put(jsonp, no_jsonp),
- throw(Error)
- end
- end.
-
-end_jsonp() ->
- Resp = case get(jsonp) of
- no_jsonp -> [];
- [] -> [];
- _ -> ");"
- end,
- put(jsonp, undefined),
- Resp.
-
-validate_callback(CallBack) when is_binary(CallBack) ->
- validate_callback(binary_to_list(CallBack));
-validate_callback([]) ->
- ok;
-validate_callback([Char | Rest]) ->
- case Char of
- _ when Char >= $a andalso Char =< $z -> ok;
- _ when Char >= $A andalso Char =< $Z -> ok;
- _ when Char >= $0 andalso Char =< $9 -> ok;
- _ when Char == $. -> ok;
- _ when Char == $_ -> ok;
- _ when Char == $[ -> ok;
- _ when Char == $] -> ok;
- _ ->
- throw({bad_request, invalid_callback})
- end,
- validate_callback(Rest).
-
-
-error_info({Error, Reason}) when is_list(Reason) ->
- error_info({Error, ?l2b(Reason)});
-error_info(bad_request) ->
- {400, <<"bad_request">>, <<>>};
-error_info({bad_request, Reason}) ->
- {400, <<"bad_request">>, Reason};
-error_info({query_parse_error, Reason}) ->
- {400, <<"query_parse_error">>, Reason};
-% Prior art for md5 mismatch resulting in a 400 is from AWS S3
-error_info(md5_mismatch) ->
- {400, <<"content_md5_mismatch">>, <<"Possible message corruption.">>};
-error_info(not_found) ->
- {404, <<"not_found">>, <<"missing">>};
-error_info({not_found, Reason}) ->
- {404, <<"not_found">>, Reason};
-error_info({not_acceptable, Reason}) ->
- {406, <<"not_acceptable">>, Reason};
-error_info(conflict) ->
- {409, <<"conflict">>, <<"Document update conflict.">>};
-error_info({forbidden, Msg}) ->
- {403, <<"forbidden">>, Msg};
-error_info({unauthorized, Msg}) ->
- {401, <<"unauthorized">>, Msg};
-error_info(file_exists) ->
- {412, <<"file_exists">>, <<"The database could not be "
- "created, the file already exists.">>};
-error_info({bad_ctype, Reason}) ->
- {415, <<"bad_content_type">>, Reason};
-error_info(requested_range_not_satisfiable) ->
- {416, <<"requested_range_not_satisfiable">>, <<"Requested range not satisfiable">>};
-error_info({error, illegal_database_name}) ->
- {400, <<"illegal_database_name">>, <<"Only lowercase characters (a-z), "
- "digits (0-9), and any of the characters _, $, (, ), +, -, and / "
- "are allowed. Must begin with a letter.">>};
-error_info({missing_stub, Reason}) ->
- {412, <<"missing_stub">>, Reason};
-error_info({Error, Reason}) ->
- {500, couch_util:to_binary(Error), couch_util:to_binary(Reason)};
-error_info(Error) ->
- {500, <<"unknown_error">>, couch_util:to_binary(Error)}.
-
-error_headers(#httpd{mochi_req=MochiReq}=Req, Code, ErrorStr, ReasonStr) ->
- if Code == 401 ->
- % this is where the basic auth popup is triggered
- case MochiReq:get_header_value("X-CouchDB-WWW-Authenticate") of
- undefined ->
- case couch_config:get("httpd", "WWW-Authenticate", nil) of
- nil ->
- % If the client is a browser and the basic auth popup isn't turned on
- % redirect to the session page.
- case ErrorStr of
- <<"unauthorized">> ->
- case couch_config:get("couch_httpd_auth", "authentication_redirect", nil) of
- nil -> {Code, []};
- AuthRedirect ->
- case couch_config:get("couch_httpd_auth", "require_valid_user", "false") of
- "true" ->
- % send the browser popup header no matter what if we are require_valid_user
- {Code, [{"WWW-Authenticate", "Basic realm=\"server\""}]};
- _False ->
- case MochiReq:accepts_content_type("text/html") of
- false ->
- {Code, []};
- true ->
- % Redirect to the path the user requested, not
- % the one that is used internally.
- UrlReturnRaw = case MochiReq:get_header_value("x-couchdb-vhost-path") of
- undefined ->
- MochiReq:get(path);
- VHostPath ->
- VHostPath
- end,
- RedirectLocation = lists:flatten([
- AuthRedirect,
- "?return=", couch_util:url_encode(UrlReturnRaw),
- "&reason=", couch_util:url_encode(ReasonStr)
- ]),
- {302, [{"Location", absolute_uri(Req, RedirectLocation)}]}
- end
- end
- end;
- _Else ->
- {Code, []}
- end;
- Type ->
- {Code, [{"WWW-Authenticate", Type}]}
- end;
- Type ->
- {Code, [{"WWW-Authenticate", Type}]}
- end;
- true ->
- {Code, []}
- end.
-
-send_error(_Req, {already_sent, Resp, _Error}) ->
- {ok, Resp};
-
-send_error(Req, Error) ->
- {Code, ErrorStr, ReasonStr} = error_info(Error),
- {Code1, Headers} = error_headers(Req, Code, ErrorStr, ReasonStr),
- send_error(Req, Code1, Headers, ErrorStr, ReasonStr).
-
-send_error(Req, Code, ErrorStr, ReasonStr) ->
- send_error(Req, Code, [], ErrorStr, ReasonStr).
-
-send_error(Req, Code, Headers, ErrorStr, ReasonStr) ->
- send_json(Req, Code, Headers,
- {[{<<"error">>, ErrorStr},
- {<<"reason">>, ReasonStr}]}).
-
-% give the option for list functions to output html or other raw errors
-send_chunked_error(Resp, {_Error, {[{<<"body">>, Reason}]}}) ->
- send_chunk(Resp, Reason),
- last_chunk(Resp);
-
-send_chunked_error(Resp, Error) ->
- {Code, ErrorStr, ReasonStr} = error_info(Error),
- JsonError = {[{<<"code">>, Code},
- {<<"error">>, ErrorStr},
- {<<"reason">>, ReasonStr}]},
- send_chunk(Resp, ?l2b([$\n,?JSON_ENCODE(JsonError),$\n])),
- last_chunk(Resp).
-
-send_redirect(Req, Path) ->
- Headers = [{"Location", couch_httpd:absolute_uri(Req, Path)}],
- send_response(Req, 301, Headers, <<>>).
-
-negotiate_content_type(#httpd{mochi_req=MochiReq}) ->
- %% Determine the appropriate Content-Type header for a JSON response
- %% depending on the Accept header in the request. A request that explicitly
- %% lists the correct JSON MIME type will get that type, otherwise the
- %% response will have the generic MIME type "text/plain"
- AcceptedTypes = case MochiReq:get_header_value("Accept") of
- undefined -> [];
- AcceptHeader -> string:tokens(AcceptHeader, ", ")
- end,
- case lists:member("application/json", AcceptedTypes) of
- true -> "application/json";
- false -> "text/plain;charset=utf-8"
- end.
-
-server_header() ->
- [{"Server", "CouchDB/" ++ couch_server:get_version() ++
- " (Erlang OTP/" ++ erlang:system_info(otp_release) ++ ")"}].
-
-
--record(mp, {boundary, buffer, data_fun, callback}).
-
-
-parse_multipart_request(ContentType, DataFun, Callback) ->
- Boundary0 = iolist_to_binary(get_boundary(ContentType)),
- Boundary = <<"\r\n--", Boundary0/binary>>,
- Mp = #mp{boundary= Boundary,
- buffer= <<>>,
- data_fun=DataFun,
- callback=Callback},
- {Mp2, _NilCallback} = read_until(Mp, <<"--", Boundary0/binary>>,
- fun(Next)-> nil_callback(Next) end),
- #mp{buffer=Buffer, data_fun=DataFun2, callback=Callback2} =
- parse_part_header(Mp2),
- {Buffer, DataFun2, Callback2}.
-
-nil_callback(_Data)->
- fun(Next) -> nil_callback(Next) end.
-
-get_boundary({"multipart/" ++ _, Opts}) ->
- case couch_util:get_value("boundary", Opts) of
- S when is_list(S) ->
- S
- end;
-get_boundary(ContentType) ->
- {"multipart/" ++ _ , Opts} = mochiweb_util:parse_header(ContentType),
- get_boundary({"multipart/", Opts}).
-
-
-
-split_header(<<>>) ->
- [];
-split_header(Line) ->
- {Name, [$: | Value]} = lists:splitwith(fun (C) -> C =/= $: end,
- binary_to_list(Line)),
- [{string:to_lower(string:strip(Name)),
- mochiweb_util:parse_header(Value)}].
-
-read_until(#mp{data_fun=DataFun, buffer=Buffer}=Mp, Pattern, Callback) ->
- case find_in_binary(Pattern, Buffer) of
- not_found ->
- Callback2 = Callback(Buffer),
- {Buffer2, DataFun2} = DataFun(),
- Buffer3 = iolist_to_binary(Buffer2),
- read_until(Mp#mp{data_fun=DataFun2,buffer=Buffer3}, Pattern, Callback2);
- {partial, 0} ->
- {NewData, DataFun2} = DataFun(),
- read_until(Mp#mp{data_fun=DataFun2,
- buffer= iolist_to_binary([Buffer,NewData])},
- Pattern, Callback);
- {partial, Skip} ->
- <<DataChunk:Skip/binary, Rest/binary>> = Buffer,
- Callback2 = Callback(DataChunk),
- {NewData, DataFun2} = DataFun(),
- read_until(Mp#mp{data_fun=DataFun2,
- buffer= iolist_to_binary([Rest | NewData])},
- Pattern, Callback2);
- {exact, 0} ->
- PatternLen = size(Pattern),
- <<_:PatternLen/binary, Rest/binary>> = Buffer,
- {Mp#mp{buffer= Rest}, Callback};
- {exact, Skip} ->
- PatternLen = size(Pattern),
- <<DataChunk:Skip/binary, _:PatternLen/binary, Rest/binary>> = Buffer,
- Callback2 = Callback(DataChunk),
- {Mp#mp{buffer= Rest}, Callback2}
- end.
-
-
-parse_part_header(#mp{callback=UserCallBack}=Mp) ->
- {Mp2, AccCallback} = read_until(Mp, <<"\r\n\r\n">>,
- fun(Next) -> acc_callback(Next, []) end),
- HeaderData = AccCallback(get_data),
-
- Headers =
- lists:foldl(fun(Line, Acc) ->
- split_header(Line) ++ Acc
- end, [], re:split(HeaderData,<<"\r\n">>, [])),
- NextCallback = UserCallBack({headers, Headers}),
- parse_part_body(Mp2#mp{callback=NextCallback}).
-
-parse_part_body(#mp{boundary=Prefix, callback=Callback}=Mp) ->
- {Mp2, WrappedCallback} = read_until(Mp, Prefix,
- fun(Data) -> body_callback_wrapper(Data, Callback) end),
- Callback2 = WrappedCallback(get_callback),
- Callback3 = Callback2(body_end),
- case check_for_last(Mp2#mp{callback=Callback3}) of
- {last, #mp{callback=Callback3}=Mp3} ->
- Mp3#mp{callback=Callback3(eof)};
- {more, Mp3} ->
- parse_part_header(Mp3)
- end.
-
-acc_callback(get_data, Acc)->
- iolist_to_binary(lists:reverse(Acc));
-acc_callback(Data, Acc)->
- fun(Next) -> acc_callback(Next, [Data | Acc]) end.
-
-body_callback_wrapper(get_callback, Callback) ->
- Callback;
-body_callback_wrapper(Data, Callback) ->
- Callback2 = Callback({body, Data}),
- fun(Next) -> body_callback_wrapper(Next, Callback2) end.
-
-
-check_for_last(#mp{buffer=Buffer, data_fun=DataFun}=Mp) ->
- case Buffer of
- <<"--",_/binary>> -> {last, Mp};
- <<_, _, _/binary>> -> {more, Mp};
- _ -> % not long enough
- {Data, DataFun2} = DataFun(),
- check_for_last(Mp#mp{buffer= <<Buffer/binary, Data/binary>>,
- data_fun = DataFun2})
- end.
-
-find_in_binary(B, Data) when size(B) > 0 ->
- case size(Data) - size(B) of
- Last when Last < 0 ->
- partial_find(B, Data, 0, size(Data));
- Last ->
- find_in_binary(B, size(B), Data, 0, Last)
- end.
-
-find_in_binary(B, BS, D, N, Last) when N =< Last->
- case D of
- <<_:N/binary, B:BS/binary, _/binary>> ->
- {exact, N};
- _ ->
- find_in_binary(B, BS, D, 1 + N, Last)
- end;
-find_in_binary(B, BS, D, N, Last) when N =:= 1 + Last ->
- partial_find(B, D, N, BS - 1).
-
-partial_find(_B, _D, _N, 0) ->
- not_found;
-partial_find(B, D, N, K) ->
- <<B1:K/binary, _/binary>> = B,
- case D of
- <<_Skip:N/binary, B1/binary>> ->
- {partial, N};
- _ ->
- partial_find(B, D, 1 + N, K - 1)
- end.
-
-
diff --git a/1.1.x/src/couchdb/couch_httpd_auth.erl b/1.1.x/src/couchdb/couch_httpd_auth.erl
deleted file mode 100644
index 155865e5..00000000
--- a/1.1.x/src/couchdb/couch_httpd_auth.erl
+++ /dev/null
@@ -1,359 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_auth).
--include("couch_db.hrl").
-
--export([default_authentication_handler/1,special_test_authentication_handler/1]).
--export([cookie_authentication_handler/1]).
--export([null_authentication_handler/1]).
--export([proxy_authentification_handler/1]).
--export([cookie_auth_header/2]).
--export([handle_session_req/1]).
-
--import(couch_httpd, [header_value/2, send_json/2,send_json/4, send_method_not_allowed/2]).
-
-special_test_authentication_handler(Req) ->
- case header_value(Req, "WWW-Authenticate") of
- "X-Couch-Test-Auth " ++ NamePass ->
- % NamePass is a colon separated string: "joe schmoe:a password".
- [Name, Pass] = re:split(NamePass, ":", [{return, list}]),
- case {Name, Pass} of
- {"Jan Lehnardt", "apple"} -> ok;
- {"Christopher Lenz", "dog food"} -> ok;
- {"Noah Slater", "biggiesmalls endian"} -> ok;
- {"Chris Anderson", "mp3"} -> ok;
- {"Damien Katz", "pecan pie"} -> ok;
- {_, _} ->
- throw({unauthorized, <<"Name or password is incorrect.">>})
- end,
- Req#httpd{user_ctx=#user_ctx{name=?l2b(Name)}};
- _ ->
- % No X-Couch-Test-Auth credentials sent, give admin access so the
- % previous authentication can be restored after the test
- Req#httpd{user_ctx=#user_ctx{roles=[<<"_admin">>]}}
- end.
-
-basic_name_pw(Req) ->
- AuthorizationHeader = header_value(Req, "Authorization"),
- case AuthorizationHeader of
- "Basic " ++ Base64Value ->
- case string:tokens(?b2l(base64:decode(Base64Value)),":") of
- ["_", "_"] ->
- % special name and pass to be logged out
- nil;
- [User, Pass] ->
- {User, Pass};
- [User | Pass] ->
- {User, string:join(Pass, ":")};
- _ ->
- nil
- end;
- _ ->
- nil
- end.
-
-default_authentication_handler(Req) ->
- case basic_name_pw(Req) of
- {User, Pass} ->
- case couch_auth_cache:get_user_creds(User) of
- nil ->
- throw({unauthorized, <<"Name or password is incorrect.">>});
- UserProps ->
- UserSalt = couch_util:get_value(<<"salt">>, UserProps, <<>>),
- PasswordHash = hash_password(?l2b(Pass), UserSalt),
- ExpectedHash = couch_util:get_value(<<"password_sha">>, UserProps, nil),
- case couch_util:verify(ExpectedHash, PasswordHash) of
- true ->
- Req#httpd{user_ctx=#user_ctx{
- name=?l2b(User),
- roles=couch_util:get_value(<<"roles">>, UserProps, [])
- }};
- _Else ->
- throw({unauthorized, <<"Name or password is incorrect.">>})
- end
- end;
- nil ->
- case couch_server:has_admins() of
- true ->
- Req;
- false ->
- case couch_config:get("couch_httpd_auth", "require_valid_user", "false") of
- "true" -> Req;
- % If no admins, and no user required, then everyone is admin!
- % Yay, admin party!
- _ -> Req#httpd{user_ctx=#user_ctx{roles=[<<"_admin">>]}}
- end
- end
- end.
-
-null_authentication_handler(Req) ->
- Req#httpd{user_ctx=#user_ctx{roles=[<<"_admin">>]}}.
-
-%% @doc proxy auth handler.
-%
-% This handler allows creation of a userCtx object from a user authenticated remotly.
-% The client just pass specific headers to CouchDB and the handler create the userCtx.
-% Headers name can be defined in local.ini. By thefault they are :
-%
-% * X-Auth-CouchDB-UserName : contain the username, (x_auth_username in
-% couch_httpd_auth section)
-% * X-Auth-CouchDB-Roles : contain the user roles, list of roles separated by a
-% comma (x_auth_roles in couch_httpd_auth section)
-% * X-Auth-CouchDB-Token : token to authenticate the authorization (x_auth_token
-% in couch_httpd_auth section). This token is an hmac-sha1 created from secret key
-% and username. The secret key should be the same in the client and couchdb node. s
-% ecret key is the secret key in couch_httpd_auth section of ini. This token is optional
-% if value of proxy_use_secret key in couch_httpd_auth section of ini isn't true.
-%
-proxy_authentification_handler(Req) ->
- case proxy_auth_user(Req) of
- nil -> Req;
- Req2 -> Req2
- end.
-
-proxy_auth_user(Req) ->
- XHeaderUserName = couch_config:get("couch_httpd_auth", "x_auth_username",
- "X-Auth-CouchDB-UserName"),
- XHeaderRoles = couch_config:get("couch_httpd_auth", "x_auth_roles",
- "X-Auth-CouchDB-Roles"),
- XHeaderToken = couch_config:get("couch_httpd_auth", "x_auth_token",
- "X-Auth-CouchDB-Token"),
- case header_value(Req, XHeaderUserName) of
- undefined -> nil;
- UserName ->
- Roles = case header_value(Req, XHeaderRoles) of
- undefined -> [];
- Else ->
- [?l2b(R) || R <- string:tokens(Else, ",")]
- end,
- case couch_config:get("couch_httpd_auth", "proxy_use_secret", "false") of
- "true" ->
- case couch_config:get("couch_httpd_auth", "secret", nil) of
- nil ->
- Req#httpd{user_ctx=#user_ctx{name=?l2b(UserName), roles=Roles}};
- Secret ->
- ExpectedToken = couch_util:to_hex(crypto:sha_mac(Secret, UserName)),
- case header_value(Req, XHeaderToken) of
- Token when Token == ExpectedToken ->
- Req#httpd{user_ctx=#user_ctx{name=?l2b(UserName),
- roles=Roles}};
- _ -> nil
- end
- end;
- _ ->
- Req#httpd{user_ctx=#user_ctx{name=?l2b(UserName), roles=Roles}}
- end
- end.
-
-
-cookie_authentication_handler(#httpd{mochi_req=MochiReq}=Req) ->
- case MochiReq:get_cookie_value("AuthSession") of
- undefined -> Req;
- [] -> Req;
- Cookie ->
- [User, TimeStr | HashParts] = try
- AuthSession = couch_util:decodeBase64Url(Cookie),
- [_A, _B | _Cs] = string:tokens(?b2l(AuthSession), ":")
- catch
- _:_Error ->
- Reason = <<"Malformed AuthSession cookie. Please clear your cookies.">>,
- throw({bad_request, Reason})
- end,
- % Verify expiry and hash
- CurrentTime = make_cookie_time(),
- case couch_config:get("couch_httpd_auth", "secret", nil) of
- nil ->
- ?LOG_DEBUG("cookie auth secret is not set",[]),
- Req;
- SecretStr ->
- Secret = ?l2b(SecretStr),
- case couch_auth_cache:get_user_creds(User) of
- nil -> Req;
- UserProps ->
- UserSalt = couch_util:get_value(<<"salt">>, UserProps, <<"">>),
- FullSecret = <<Secret/binary, UserSalt/binary>>,
- ExpectedHash = crypto:sha_mac(FullSecret, User ++ ":" ++ TimeStr),
- Hash = ?l2b(string:join(HashParts, ":")),
- Timeout = to_int(couch_config:get("couch_httpd_auth", "timeout", 600)),
- ?LOG_DEBUG("timeout ~p", [Timeout]),
- case (catch erlang:list_to_integer(TimeStr, 16)) of
- TimeStamp when CurrentTime < TimeStamp + Timeout ->
- case couch_util:verify(ExpectedHash, Hash) of
- true ->
- TimeLeft = TimeStamp + Timeout - CurrentTime,
- ?LOG_DEBUG("Successful cookie auth as: ~p", [User]),
- Req#httpd{user_ctx=#user_ctx{
- name=?l2b(User),
- roles=couch_util:get_value(<<"roles">>, UserProps, [])
- }, auth={FullSecret, TimeLeft < Timeout*0.9}};
- _Else ->
- Req
- end;
- _Else ->
- Req
- end
- end
- end
- end.
-
-cookie_auth_header(#httpd{user_ctx=#user_ctx{name=null}}, _Headers) -> [];
-cookie_auth_header(#httpd{user_ctx=#user_ctx{name=User}, auth={Secret, true}}=Req, Headers) ->
- % Note: we only set the AuthSession cookie if:
- % * a valid AuthSession cookie has been received
- % * we are outside a 10% timeout window
- % * and if an AuthSession cookie hasn't already been set e.g. by a login
- % or logout handler.
- % The login and logout handlers need to set the AuthSession cookie
- % themselves.
- CookieHeader = couch_util:get_value("Set-Cookie", Headers, ""),
- Cookies = mochiweb_cookies:parse_cookie(CookieHeader),
- AuthSession = couch_util:get_value("AuthSession", Cookies),
- if AuthSession == undefined ->
- TimeStamp = make_cookie_time(),
- [cookie_auth_cookie(Req, ?b2l(User), Secret, TimeStamp)];
- true ->
- []
- end;
-cookie_auth_header(_Req, _Headers) -> [].
-
-cookie_auth_cookie(Req, User, Secret, TimeStamp) ->
- SessionData = User ++ ":" ++ erlang:integer_to_list(TimeStamp, 16),
- Hash = crypto:sha_mac(Secret, SessionData),
- mochiweb_cookies:cookie("AuthSession",
- couch_util:encodeBase64Url(SessionData ++ ":" ++ ?b2l(Hash)),
- [{path, "/"}, cookie_scheme(Req)]).
-
-hash_password(Password, Salt) ->
- ?l2b(couch_util:to_hex(crypto:sha(<<Password/binary, Salt/binary>>))).
-
-ensure_cookie_auth_secret() ->
- case couch_config:get("couch_httpd_auth", "secret", nil) of
- nil ->
- NewSecret = ?b2l(couch_uuids:random()),
- couch_config:set("couch_httpd_auth", "secret", NewSecret),
- NewSecret;
- Secret -> Secret
- end.
-
-% session handlers
-% Login handler with user db
-handle_session_req(#httpd{method='POST', mochi_req=MochiReq}=Req) ->
- ReqBody = MochiReq:recv_body(),
- Form = case MochiReq:get_primary_header_value("content-type") of
- % content type should be json
- "application/x-www-form-urlencoded" ++ _ ->
- mochiweb_util:parse_qs(ReqBody);
- "application/json" ++ _ ->
- {Pairs} = ?JSON_DECODE(ReqBody),
- lists:map(fun({Key, Value}) ->
- {?b2l(Key), ?b2l(Value)}
- end, Pairs);
- _ ->
- []
- end,
- UserName = ?l2b(couch_util:get_value("name", Form, "")),
- Password = ?l2b(couch_util:get_value("password", Form, "")),
- ?LOG_DEBUG("Attempt Login: ~s",[UserName]),
- User = case couch_auth_cache:get_user_creds(UserName) of
- nil -> [];
- Result -> Result
- end,
- UserSalt = couch_util:get_value(<<"salt">>, User, <<>>),
- PasswordHash = hash_password(Password, UserSalt),
- ExpectedHash = couch_util:get_value(<<"password_sha">>, User, nil),
- case couch_util:verify(ExpectedHash, PasswordHash) of
- true ->
- % setup the session cookie
- Secret = ?l2b(ensure_cookie_auth_secret()),
- CurrentTime = make_cookie_time(),
- Cookie = cookie_auth_cookie(Req, ?b2l(UserName), <<Secret/binary, UserSalt/binary>>, CurrentTime),
- % TODO document the "next" feature in Futon
- {Code, Headers} = case couch_httpd:qs_value(Req, "next", nil) of
- nil ->
- {200, [Cookie]};
- Redirect ->
- {302, [Cookie, {"Location", couch_httpd:absolute_uri(Req, Redirect)}]}
- end,
- send_json(Req#httpd{req_body=ReqBody}, Code, Headers,
- {[
- {ok, true},
- {name, couch_util:get_value(<<"name">>, User, null)},
- {roles, couch_util:get_value(<<"roles">>, User, [])}
- ]});
- _Else ->
- % clear the session
- Cookie = mochiweb_cookies:cookie("AuthSession", "", [{path, "/"}, cookie_scheme(Req)]),
- send_json(Req, 401, [Cookie], {[{error, <<"unauthorized">>},{reason, <<"Name or password is incorrect.">>}]})
- end;
-% get user info
-% GET /_session
-handle_session_req(#httpd{method='GET', user_ctx=UserCtx}=Req) ->
- Name = UserCtx#user_ctx.name,
- ForceLogin = couch_httpd:qs_value(Req, "basic", "false"),
- case {Name, ForceLogin} of
- {null, "true"} ->
- throw({unauthorized, <<"Please login.">>});
- {Name, _} ->
- send_json(Req, {[
- % remove this ok
- {ok, true},
- {<<"userCtx">>, {[
- {name, Name},
- {roles, UserCtx#user_ctx.roles}
- ]}},
- {info, {[
- {authentication_db, ?l2b(couch_config:get("couch_httpd_auth", "authentication_db"))},
- {authentication_handlers, [auth_name(H) || H <- couch_httpd:make_fun_spec_strs(
- couch_config:get("httpd", "authentication_handlers"))]}
- ] ++ maybe_value(authenticated, UserCtx#user_ctx.handler, fun(Handler) ->
- auth_name(?b2l(Handler))
- end)}}
- ]})
- end;
-% logout by deleting the session
-handle_session_req(#httpd{method='DELETE'}=Req) ->
- Cookie = mochiweb_cookies:cookie("AuthSession", "", [{path, "/"}, cookie_scheme(Req)]),
- {Code, Headers} = case couch_httpd:qs_value(Req, "next", nil) of
- nil ->
- {200, [Cookie]};
- Redirect ->
- {302, [Cookie, {"Location", couch_httpd:absolute_uri(Req, Redirect)}]}
- end,
- send_json(Req, Code, Headers, {[{ok, true}]});
-handle_session_req(Req) ->
- send_method_not_allowed(Req, "GET,HEAD,POST,DELETE").
-
-maybe_value(_Key, undefined, _Fun) -> [];
-maybe_value(Key, Else, Fun) ->
- [{Key, Fun(Else)}].
-
-auth_name(String) when is_list(String) ->
- [_,_,_,_,_,Name|_] = re:split(String, "[\\W_]", [{return, list}]),
- ?l2b(Name).
-
-to_int(Value) when is_binary(Value) ->
- to_int(?b2l(Value));
-to_int(Value) when is_list(Value) ->
- list_to_integer(Value);
-to_int(Value) when is_integer(Value) ->
- Value.
-
-make_cookie_time() ->
- {NowMS, NowS, _} = erlang:now(),
- NowMS * 1000000 + NowS.
-
-cookie_scheme(#httpd{mochi_req=MochiReq}) ->
- case MochiReq:get(scheme) of
- http -> {http_only, true};
- https -> {secure, true}
- end.
diff --git a/1.1.x/src/couchdb/couch_httpd_db.erl b/1.1.x/src/couchdb/couch_httpd_db.erl
deleted file mode 100644
index 0dbebb6e..00000000
--- a/1.1.x/src/couchdb/couch_httpd_db.erl
+++ /dev/null
@@ -1,1283 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_db).
--include("couch_db.hrl").
-
--export([handle_request/1, handle_compact_req/2, handle_design_req/2,
- db_req/2, couch_doc_open/4,handle_changes_req/2,
- update_doc_result_to_json/1, update_doc_result_to_json/2,
- handle_design_info_req/3, handle_view_cleanup_req/2]).
-
--import(couch_httpd,
- [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
- send_response/4,start_json_response/2,start_json_response/3,
- send_chunk/2,last_chunk/1,end_json_response/1,
- start_chunked_response/3, absolute_uri/2, send/2,
- start_response_length/4]).
-
--record(doc_query_args, {
- options = [],
- rev = nil,
- open_revs = [],
- update_type = interactive_edit,
- atts_since = nil
-}).
-
-% Database request handlers
-handle_request(#httpd{path_parts=[DbName|RestParts],method=Method,
- db_url_handlers=DbUrlHandlers}=Req)->
- case {Method, RestParts} of
- {'PUT', []} ->
- create_db_req(Req, DbName);
- {'DELETE', []} ->
- % if we get ?rev=... the user is using a faulty script where the
- % document id is empty by accident. Let them recover safely.
- case couch_httpd:qs_value(Req, "rev", false) of
- false -> delete_db_req(Req, DbName);
- _Rev -> throw({bad_request,
- "You tried to DELETE a database with a ?=rev parameter. "
- ++ "Did you mean to DELETE a document instead?"})
- end;
- {_, []} ->
- do_db_req(Req, fun db_req/2);
- {_, [SecondPart|_]} ->
- Handler = couch_util:dict_find(SecondPart, DbUrlHandlers, fun db_req/2),
- do_db_req(Req, Handler)
- end.
-
-handle_changes_req(#httpd{method='POST'}=Req, Db) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- handle_changes_req1(Req, Db);
-handle_changes_req(#httpd{method='GET'}=Req, Db) ->
- handle_changes_req1(Req, Db);
-handle_changes_req(#httpd{path_parts=[_,<<"_changes">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "GET,HEAD,POST").
-
-handle_changes_req1(Req, Db) ->
- MakeCallback = fun(Resp) ->
- fun({change, Change, _}, "continuous") ->
- send_chunk(Resp, [?JSON_ENCODE(Change) | "\n"]);
- ({change, Change, Prepend}, _) ->
- send_chunk(Resp, [Prepend, ?JSON_ENCODE(Change)]);
- (start, "continuous") ->
- ok;
- (start, _) ->
- send_chunk(Resp, "{\"results\":[\n");
- ({stop, EndSeq}, "continuous") ->
- send_chunk(
- Resp,
- [?JSON_ENCODE({[{<<"last_seq">>, EndSeq}]}) | "\n"]
- ),
- end_json_response(Resp);
- ({stop, EndSeq}, _) ->
- send_chunk(
- Resp,
- io_lib:format("\n],\n\"last_seq\":~w}\n", [EndSeq])
- ),
- end_json_response(Resp);
- (timeout, _) ->
- send_chunk(Resp, "\n")
- end
- end,
- ChangesArgs = parse_changes_query(Req),
- ChangesFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
- WrapperFun = case ChangesArgs#changes_args.feed of
- "normal" ->
- {ok, Info} = couch_db:get_db_info(Db),
- CurrentEtag = couch_httpd:make_etag(Info),
- fun(FeedChangesFun) ->
- couch_httpd:etag_respond(
- Req,
- CurrentEtag,
- fun() ->
- {ok, Resp} = couch_httpd:start_json_response(
- Req, 200, [{"Etag", CurrentEtag}]
- ),
- FeedChangesFun(MakeCallback(Resp))
- end
- )
- end;
- _ ->
- % "longpoll" or "continuous"
- {ok, Resp} = couch_httpd:start_json_response(Req, 200),
- fun(FeedChangesFun) ->
- FeedChangesFun(MakeCallback(Resp))
- end
- end,
- couch_stats_collector:track_process_count(
- {httpd, clients_requesting_changes}
- ),
- WrapperFun(ChangesFun).
-
-
-handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, Db) ->
- ok = couch_db:check_is_admin(Db),
- couch_httpd:validate_ctype(Req, "application/json"),
- ok = couch_view_compactor:start_compact(DbName, Id),
- send_json(Req, 202, {[{ok, true}]});
-
-handle_compact_req(#httpd{method='POST'}=Req, Db) ->
- ok = couch_db:check_is_admin(Db),
- couch_httpd:validate_ctype(Req, "application/json"),
- ok = couch_db:start_compact(Db),
- send_json(Req, 202, {[{ok, true}]});
-
-handle_compact_req(Req, _Db) ->
- send_method_not_allowed(Req, "POST").
-
-handle_view_cleanup_req(#httpd{method='POST'}=Req, Db) ->
- % delete unreferenced index files
- ok = couch_db:check_is_admin(Db),
- couch_httpd:validate_ctype(Req, "application/json"),
- ok = couch_view:cleanup_index_files(Db),
- send_json(Req, 202, {[{ok, true}]});
-
-handle_view_cleanup_req(Req, _Db) ->
- send_method_not_allowed(Req, "POST").
-
-
-handle_design_req(#httpd{
- path_parts=[_DbName, _Design, DesignName, <<"_",_/binary>> = Action | _Rest],
- design_url_handlers = DesignUrlHandlers
- }=Req, Db) ->
- % load ddoc
- DesignId = <<"_design/", DesignName/binary>>,
- DDoc = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
- Handler = couch_util:dict_find(Action, DesignUrlHandlers, fun(_, _, _) ->
- throw({not_found, <<"missing handler: ", Action/binary>>})
- end),
- Handler(Req, Db, DDoc);
-
-handle_design_req(Req, Db) ->
- db_req(Req, Db).
-
-handle_design_info_req(#httpd{
- method='GET',
- path_parts=[_DbName, _Design, DesignName, _]
- }=Req, Db, _DDoc) ->
- DesignId = <<"_design/", DesignName/binary>>,
- {ok, GroupInfoList} = couch_view:get_group_info(Db, DesignId),
- send_json(Req, 200, {[
- {name, DesignName},
- {view_index, {GroupInfoList}}
- ]});
-
-handle_design_info_req(Req, _Db, _DDoc) ->
- send_method_not_allowed(Req, "GET").
-
-create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- case couch_server:create(DbName, [{user_ctx, UserCtx}]) of
- {ok, Db} ->
- couch_db:close(Db),
- DbUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)),
- send_json(Req, 201, [{"Location", DbUrl}], {[{ok, true}]});
- Error ->
- throw(Error)
- end.
-
-delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- case couch_server:delete(DbName, [{user_ctx, UserCtx}]) of
- ok ->
- send_json(Req, 200, {[{ok, true}]});
- Error ->
- throw(Error)
- end.
-
-do_db_req(#httpd{user_ctx=UserCtx,path_parts=[DbName|_]}=Req, Fun) ->
- case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
- {ok, Db} ->
- try
- Fun(Req, Db)
- after
- catch couch_db:close(Db)
- end;
- Error ->
- throw(Error)
- end.
-
-db_req(#httpd{method='GET',path_parts=[_DbName]}=Req, Db) ->
- {ok, DbInfo} = couch_db:get_db_info(Db),
- send_json(Req, {DbInfo});
-
-db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- Doc = couch_doc:from_json_obj(couch_httpd:json_body(Req)),
- Doc2 = case Doc#doc.id of
- <<"">> ->
- Doc#doc{id=couch_uuids:new(), revs={0, []}};
- _ ->
- Doc
- end,
- DocId = Doc2#doc.id,
- case couch_httpd:qs_value(Req, "batch") of
- "ok" ->
- % async_batching
- spawn(fun() ->
- case catch(couch_db:update_doc(Db, Doc2, [])) of
- {ok, _} -> ok;
- Error ->
- ?LOG_INFO("Batch doc error (~s): ~p",[DocId, Error])
- end
- end),
-
- send_json(Req, 202, [], {[
- {ok, true},
- {id, DocId}
- ]});
- _Normal ->
- % normal
- {ok, NewRev} = couch_db:update_doc(Db, Doc2, []),
- DocUrl = absolute_uri(
- Req, binary_to_list(<<"/",DbName/binary,"/", DocId/binary>>)),
- send_json(Req, 201, [{"Location", DocUrl}], {[
- {ok, true},
- {id, DocId},
- {rev, couch_doc:rev_to_str(NewRev)}
- ]})
- end;
-
-
-db_req(#httpd{path_parts=[_DbName]}=Req, _Db) ->
- send_method_not_allowed(Req, "DELETE,GET,HEAD,POST");
-
-db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, Db) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- UpdateSeq = couch_db:get_update_seq(Db),
- CommittedSeq = couch_db:get_committed_update_seq(Db),
- {ok, StartTime} =
- case couch_httpd:qs_value(Req, "seq") of
- undefined ->
- couch_db:ensure_full_commit(Db);
- RequiredStr ->
- RequiredSeq = list_to_integer(RequiredStr),
- if RequiredSeq > UpdateSeq ->
- throw({bad_request,
- "can't do a full commit ahead of current update_seq"});
- RequiredSeq > CommittedSeq ->
- couch_db:ensure_full_commit(Db);
- true ->
- {ok, Db#db.instance_start_time}
- end
- end,
- send_json(Req, 201, {[
- {ok, true},
- {instance_start_time, StartTime}
- ]});
-
-db_req(#httpd{path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "POST");
-
-db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
- couch_stats_collector:increment({httpd, bulk_requests}),
- couch_httpd:validate_ctype(Req, "application/json"),
- {JsonProps} = couch_httpd:json_body_obj(Req),
- DocsArray = couch_util:get_value(<<"docs">>, JsonProps),
- case couch_httpd:header_value(Req, "X-Couch-Full-Commit") of
- "true" ->
- Options = [full_commit];
- "false" ->
- Options = [delay_commit];
- _ ->
- Options = []
- end,
- case couch_util:get_value(<<"new_edits">>, JsonProps, true) of
- true ->
- Docs = lists:map(
- fun({ObjProps} = JsonObj) ->
- Doc = couch_doc:from_json_obj(JsonObj),
- validate_attachment_names(Doc),
- Id = case Doc#doc.id of
- <<>> -> couch_uuids:new();
- Id0 -> Id0
- end,
- case couch_util:get_value(<<"_rev">>, ObjProps) of
- undefined ->
- Revs = {0, []};
- Rev ->
- {Pos, RevId} = couch_doc:parse_rev(Rev),
- Revs = {Pos, [RevId]}
- end,
- Doc#doc{id=Id,revs=Revs}
- end,
- DocsArray),
- Options2 =
- case couch_util:get_value(<<"all_or_nothing">>, JsonProps) of
- true -> [all_or_nothing|Options];
- _ -> Options
- end,
- case couch_db:update_docs(Db, Docs, Options2) of
- {ok, Results} ->
- % output the results
- DocResults = lists:zipwith(fun update_doc_result_to_json/2,
- Docs, Results),
- send_json(Req, 201, DocResults);
- {aborted, Errors} ->
- ErrorsJson =
- lists:map(fun update_doc_result_to_json/1, Errors),
- send_json(Req, 417, ErrorsJson)
- end;
- false ->
- Docs = lists:map(fun(JsonObj) ->
- Doc = couch_doc:from_json_obj(JsonObj),
- validate_attachment_names(Doc),
- Doc
- end, DocsArray),
- {ok, Errors} = couch_db:update_docs(Db, Docs, Options, replicated_changes),
- ErrorsJson =
- lists:map(fun update_doc_result_to_json/1, Errors),
- send_json(Req, 201, ErrorsJson)
- end;
-db_req(#httpd{path_parts=[_,<<"_bulk_docs">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "POST");
-
-db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- {IdsRevs} = couch_httpd:json_body_obj(Req),
- IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs],
-
- case couch_db:purge_docs(Db, IdsRevs2) of
- {ok, PurgeSeq, PurgedIdsRevs} ->
- PurgedIdsRevs2 = [{Id, couch_doc:revs_to_strs(Revs)} || {Id, Revs} <- PurgedIdsRevs],
- send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs2}}]});
- Error ->
- throw(Error)
- end;
-
-db_req(#httpd{path_parts=[_,<<"_purge">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "POST");
-
-db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) ->
- Keys = couch_httpd:qs_json_value(Req, "keys", nil),
- all_docs_view(Req, Db, Keys);
-
-db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- {Fields} = couch_httpd:json_body_obj(Req),
- case couch_util:get_value(<<"keys">>, Fields, nil) of
- nil ->
- ?LOG_DEBUG("POST to _all_docs with no keys member.", []),
- all_docs_view(Req, Db, nil);
- Keys when is_list(Keys) ->
- all_docs_view(Req, Db, Keys);
- _ ->
- throw({bad_request, "`keys` member must be a array."})
- end;
-
-db_req(#httpd{path_parts=[_,<<"_all_docs">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "GET,HEAD,POST");
-
-db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) ->
- {JsonDocIdRevs} = couch_httpd:json_body_obj(Req),
- JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs],
- {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs2),
- Results2 = [{Id, couch_doc:revs_to_strs(Revs)} || {Id, Revs, _} <- Results],
- send_json(Req, {[
- {missing_revs, {Results2}}
- ]});
-
-db_req(#httpd{path_parts=[_,<<"_missing_revs">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "POST");
-
-db_req(#httpd{method='POST',path_parts=[_,<<"_revs_diff">>]}=Req, Db) ->
- {JsonDocIdRevs} = couch_httpd:json_body_obj(Req),
- JsonDocIdRevs2 =
- [{Id, couch_doc:parse_revs(RevStrs)} || {Id, RevStrs} <- JsonDocIdRevs],
- {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs2),
- Results2 =
- lists:map(fun({Id, MissingRevs, PossibleAncestors}) ->
- {Id,
- {[{missing, couch_doc:revs_to_strs(MissingRevs)}] ++
- if PossibleAncestors == [] ->
- [];
- true ->
- [{possible_ancestors,
- couch_doc:revs_to_strs(PossibleAncestors)}]
- end}}
- end, Results),
- send_json(Req, {Results2});
-
-db_req(#httpd{path_parts=[_,<<"_revs_diff">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "POST");
-
-db_req(#httpd{method='PUT',path_parts=[_,<<"_security">>]}=Req, Db) ->
- SecObj = couch_httpd:json_body(Req),
- ok = couch_db:set_security(Db, SecObj),
- send_json(Req, {[{<<"ok">>, true}]});
-
-db_req(#httpd{method='GET',path_parts=[_,<<"_security">>]}=Req, Db) ->
- send_json(Req, couch_db:get_security(Db));
-
-db_req(#httpd{path_parts=[_,<<"_security">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "PUT,GET");
-
-db_req(#httpd{method='PUT',path_parts=[_,<<"_revs_limit">>]}=Req,
- Db) ->
- Limit = couch_httpd:json_body(Req),
- ok = couch_db:set_revs_limit(Db, Limit),
- send_json(Req, {[{<<"ok">>, true}]});
-
-db_req(#httpd{method='GET',path_parts=[_,<<"_revs_limit">>]}=Req, Db) ->
- send_json(Req, couch_db:get_revs_limit(Db));
-
-db_req(#httpd{path_parts=[_,<<"_revs_limit">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "PUT,GET");
-
-% Special case to enable using an unencoded slash in the URL of design docs,
-% as slashes in document IDs must otherwise be URL encoded.
-db_req(#httpd{method='GET',mochi_req=MochiReq, path_parts=[DbName,<<"_design/",_/binary>>|_]}=Req, _Db) ->
- PathFront = "/" ++ couch_httpd:quote(binary_to_list(DbName)) ++ "/",
- [_|PathTail] = re:split(MochiReq:get(raw_path), "_design%2F",
- [{return, list}]),
- couch_httpd:send_redirect(Req, PathFront ++ "_design/" ++
- mochiweb_util:join(PathTail, "_design%2F"));
-
-db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name]}=Req, Db) ->
- db_doc_req(Req, Db, <<"_design/",Name/binary>>);
-
-db_req(#httpd{path_parts=[_DbName,<<"_design">>,Name|FileNameParts]}=Req, Db) ->
- db_attachment_req(Req, Db, <<"_design/",Name/binary>>, FileNameParts);
-
-
-% Special case to allow for accessing local documents without %2F
-% encoding the docid. Throws out requests that don't have the second
-% path part or that specify an attachment name.
-db_req(#httpd{path_parts=[_DbName, <<"_local">>]}, _Db) ->
- throw({bad_request, <<"Invalid _local document id.">>});
-
-db_req(#httpd{path_parts=[_DbName, <<"_local/">>]}, _Db) ->
- throw({bad_request, <<"Invalid _local document id.">>});
-
-db_req(#httpd{path_parts=[_DbName, <<"_local">>, Name]}=Req, Db) ->
- db_doc_req(Req, Db, <<"_local/", Name/binary>>);
-
-db_req(#httpd{path_parts=[_DbName, <<"_local">> | _Rest]}, _Db) ->
- throw({bad_request, <<"_local documents do not accept attachments.">>});
-
-db_req(#httpd{path_parts=[_, DocId]}=Req, Db) ->
- db_doc_req(Req, Db, DocId);
-
-db_req(#httpd{path_parts=[_, DocId | FileNameParts]}=Req, Db) ->
- db_attachment_req(Req, Db, DocId, FileNameParts).
-
-all_docs_view(Req, Db, Keys) ->
- #view_query_args{
- start_key = StartKey,
- start_docid = StartDocId,
- end_key = EndKey,
- end_docid = EndDocId,
- limit = Limit,
- skip = SkipCount,
- direction = Dir,
- inclusive_end = Inclusive
- } = QueryArgs = couch_httpd_view:parse_view_params(Req, Keys, map),
- {ok, Info} = couch_db:get_db_info(Db),
- CurrentEtag = couch_httpd:make_etag(Info),
- couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
-
- TotalRowCount = couch_util:get_value(doc_count, Info),
- StartId = if is_binary(StartKey) -> StartKey;
- true -> StartDocId
- end,
- EndId = if is_binary(EndKey) -> EndKey;
- true -> EndDocId
- end,
- FoldAccInit = {Limit, SkipCount, undefined, []},
- UpdateSeq = couch_db:get_update_seq(Db),
- JsonParams = case couch_httpd:qs_value(Req, "update_seq") of
- "true" ->
- [{update_seq, UpdateSeq}];
- _Else ->
- []
- end,
- case Keys of
- nil ->
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, UpdateSeq,
- TotalRowCount, #view_fold_helper_funs{
- reduce_count = fun couch_db:enum_docs_reduce_to_count/1,
- send_row = fun all_docs_send_json_view_row/6
- }),
- AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) ->
- case couch_doc:to_doc_info(FullDocInfo) of
- #doc_info{revs=[#rev_info{deleted=false}|_]} = DocInfo ->
- FoldlFun({{Id, Id}, DocInfo}, Offset, Acc);
- #doc_info{revs=[#rev_info{deleted=true}|_]} ->
- {ok, Acc}
- end
- end,
- {ok, LastOffset, FoldResult} = couch_db:enum_docs(Db,
- AdapterFun, FoldAccInit, [{start_key, StartId}, {dir, Dir},
- {if Inclusive -> end_key; true -> end_key_gt end, EndId}]),
- couch_httpd_view:finish_view_fold(Req, TotalRowCount, LastOffset, FoldResult, JsonParams);
- _ ->
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, UpdateSeq,
- TotalRowCount, #view_fold_helper_funs{
- reduce_count = fun(Offset) -> Offset end,
- send_row = fun all_docs_send_json_view_row/6
- }),
- KeyFoldFun = case Dir of
- fwd ->
- fun lists:foldl/3;
- rev ->
- fun lists:foldr/3
- end,
- FoldResult = KeyFoldFun(
- fun(Key, FoldAcc) ->
- DocInfo = (catch couch_db:get_doc_info(Db, Key)),
- Doc = case DocInfo of
- {ok, #doc_info{id = Id} = Di} ->
- {{Id, Id}, Di};
- not_found ->
- {{Key, error}, not_found};
- _ ->
- ?LOG_ERROR("Invalid DocInfo: ~p", [DocInfo]),
- throw({error, invalid_doc_info})
- end,
- {_, FoldAcc2} = FoldlFun(Doc, 0, FoldAcc),
- FoldAcc2
- end, FoldAccInit, Keys),
- couch_httpd_view:finish_view_fold(Req, TotalRowCount, 0, FoldResult, JsonParams)
- end
- end).
-
-all_docs_send_json_view_row(Resp, Db, KV, IncludeDocs, Conflicts, RowFront) ->
- JsonRow = all_docs_view_row_obj(Db, KV, IncludeDocs, Conflicts),
- send_chunk(Resp, RowFront ++ ?JSON_ENCODE(JsonRow)),
- {ok, ",\r\n"}.
-
-all_docs_view_row_obj(_Db, {{DocId, error}, Value}, _IncludeDocs, _Conflicts) ->
- {[{key, DocId}, {error, Value}]};
-all_docs_view_row_obj(Db, {_KeyDocId, DocInfo}, true, Conflicts) ->
- case DocInfo of
- #doc_info{revs = [#rev_info{deleted = true} | _]} ->
- {all_docs_row(DocInfo) ++ [{doc, null}]};
- _ ->
- {all_docs_row(DocInfo) ++ couch_httpd_view:doc_member(
- Db, DocInfo, if Conflicts -> [conflicts]; true -> [] end)}
- end;
-all_docs_view_row_obj(_Db, {_KeyDocId, DocInfo}, _IncludeDocs, _Conflicts) ->
- {all_docs_row(DocInfo)}.
-
-all_docs_row(#doc_info{id = Id, revs = [RevInfo | _]}) ->
- #rev_info{rev = Rev, deleted = Del} = RevInfo,
- [ {id, Id}, {key, Id},
- {value, {[{rev, couch_doc:rev_to_str(Rev)}] ++ case Del of
- true -> [{deleted, true}];
- false -> []
- end}} ].
-
-
-db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) ->
- % check for the existence of the doc to handle the 404 case.
- couch_doc_open(Db, DocId, nil, []),
- case couch_httpd:qs_value(Req, "rev") of
- undefined ->
- update_doc(Req, Db, DocId,
- couch_doc_from_req(Req, DocId, {[{<<"_deleted">>,true}]}));
- Rev ->
- update_doc(Req, Db, DocId,
- couch_doc_from_req(Req, DocId,
- {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]}))
- end;
-
-db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) ->
- #doc_query_args{
- rev = Rev,
- open_revs = Revs,
- options = Options1,
- atts_since = AttsSince
- } = parse_doc_query(Req),
- Options = case AttsSince of
- nil ->
- Options1;
- RevList when is_list(RevList) ->
- [{atts_since, RevList}, attachments | Options1]
- end,
- case Revs of
- [] ->
- Doc = couch_doc_open(Db, DocId, Rev, Options),
- send_doc(Req, Doc, Options);
- _ ->
- {ok, Results} = couch_db:open_doc_revs(Db, DocId, Revs, Options),
- case MochiReq:accepts_content_type("multipart/mixed") of
- false ->
- {ok, Resp} = start_json_response(Req, 200),
- send_chunk(Resp, "["),
- % We loop through the docs. The first time through the separator
- % is whitespace, then a comma on subsequent iterations.
- lists:foldl(
- fun(Result, AccSeparator) ->
- case Result of
- {ok, Doc} ->
- JsonDoc = couch_doc:to_json_obj(Doc, Options),
- Json = ?JSON_ENCODE({[{ok, JsonDoc}]}),
- send_chunk(Resp, AccSeparator ++ Json);
- {{not_found, missing}, RevId} ->
- RevStr = couch_doc:rev_to_str(RevId),
- Json = ?JSON_ENCODE({[{"missing", RevStr}]}),
- send_chunk(Resp, AccSeparator ++ Json)
- end,
- "," % AccSeparator now has a comma
- end,
- "", Results),
- send_chunk(Resp, "]"),
- end_json_response(Resp);
- true ->
- send_docs_multipart(Req, Results, Options)
- end
- end;
-
-
-db_doc_req(#httpd{method='POST'}=Req, Db, DocId) ->
- couch_httpd:validate_referer(Req),
- couch_doc:validate_docid(DocId),
- couch_httpd:validate_ctype(Req, "multipart/form-data"),
- Form = couch_httpd:parse_form(Req),
- case couch_util:get_value("_doc", Form) of
- undefined ->
- Rev = couch_doc:parse_rev(couch_util:get_value("_rev", Form)),
- {ok, [{ok, Doc}]} = couch_db:open_doc_revs(Db, DocId, [Rev], []);
- Json ->
- Doc = couch_doc_from_req(Req, DocId, ?JSON_DECODE(Json))
- end,
- UpdatedAtts = [
- #att{name=validate_attachment_name(Name),
- type=list_to_binary(ContentType),
- data=Content} ||
- {Name, {ContentType, _}, Content} <-
- proplists:get_all_values("_attachments", Form)
- ],
- #doc{atts=OldAtts} = Doc,
- OldAtts2 = lists:flatmap(
- fun(#att{name=OldName}=Att) ->
- case [1 || A <- UpdatedAtts, A#att.name == OldName] of
- [] -> [Att]; % the attachment wasn't in the UpdatedAtts, return it
- _ -> [] % the attachment was in the UpdatedAtts, drop it
- end
- end, OldAtts),
- NewDoc = Doc#doc{
- atts = UpdatedAtts ++ OldAtts2
- },
- {ok, NewRev} = couch_db:update_doc(Db, NewDoc, []),
-
- send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewRev)) ++ "\""}], {[
- {ok, true},
- {id, DocId},
- {rev, couch_doc:rev_to_str(NewRev)}
- ]});
-
-db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
- #doc_query_args{
- update_type = UpdateType
- } = parse_doc_query(Req),
- couch_doc:validate_docid(DocId),
-
- Loc = absolute_uri(Req, "/" ++ ?b2l(Db#db.name) ++ "/" ++ ?b2l(DocId)),
- RespHeaders = [{"Location", Loc}],
- case couch_util:to_list(couch_httpd:header_value(Req, "Content-Type")) of
- ("multipart/related;" ++ _) = ContentType ->
- {ok, Doc0} = couch_doc:doc_from_multi_part_stream(ContentType,
- fun() -> receive_request_data(Req) end),
- Doc = couch_doc_from_req(Req, DocId, Doc0),
- update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType);
- _Else ->
- case couch_httpd:qs_value(Req, "batch") of
- "ok" ->
- % batch
- Doc = couch_doc_from_req(Req, DocId, couch_httpd:json_body(Req)),
-
- spawn(fun() ->
- case catch(couch_db:update_doc(Db, Doc, [])) of
- {ok, _} -> ok;
- Error ->
- ?LOG_INFO("Batch doc error (~s): ~p",[DocId, Error])
- end
- end),
- send_json(Req, 202, [], {[
- {ok, true},
- {id, DocId}
- ]});
- _Normal ->
- % normal
- Body = couch_httpd:json_body(Req),
- Doc = couch_doc_from_req(Req, DocId, Body),
- update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType)
- end
- end;
-
-db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) ->
- SourceRev =
- case extract_header_rev(Req, couch_httpd:qs_value(Req, "rev")) of
- missing_rev -> nil;
- Rev -> Rev
- end,
- {TargetDocId, TargetRevs} = parse_copy_destination_header(Req),
- % open old doc
- Doc = couch_doc_open(Db, SourceDocId, SourceRev, []),
- % save new doc
- {ok, NewTargetRev} = couch_db:update_doc(Db,
- Doc#doc{id=TargetDocId, revs=TargetRevs}, []),
- % respond
- send_json(Req, 201,
- [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewTargetRev)) ++ "\""}],
- update_doc_result_to_json(TargetDocId, {ok, NewTargetRev}));
-
-db_doc_req(Req, _Db, _DocId) ->
- send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY").
-
-
-send_doc(Req, Doc, Options) ->
- case Doc#doc.meta of
- [] ->
- DiskEtag = couch_httpd:doc_etag(Doc),
- % output etag only when we have no meta
- couch_httpd:etag_respond(Req, DiskEtag, fun() ->
- send_doc_efficiently(Req, Doc, [{"Etag", DiskEtag}], Options)
- end);
- _ ->
- send_doc_efficiently(Req, Doc, [], Options)
- end.
-
-
-send_doc_efficiently(Req, #doc{atts=[]}=Doc, Headers, Options) ->
- send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options));
-send_doc_efficiently(#httpd{mochi_req = MochiReq} = Req,
- #doc{atts = Atts} = Doc, Headers, Options) ->
- case lists:member(attachments, Options) of
- true ->
- case MochiReq:accepts_content_type("multipart/related") of
- false ->
- send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options));
- true ->
- Boundary = couch_uuids:random(),
- JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc,
- [attachments, follows|Options])),
- {ContentType, Len} = couch_doc:len_doc_to_multi_part_stream(
- Boundary,JsonBytes, Atts, true),
- CType = {<<"Content-Type">>, ContentType},
- {ok, Resp} = start_response_length(Req, 200, [CType|Headers], Len),
- couch_doc:doc_to_multi_part_stream(Boundary,JsonBytes,Atts,
- fun(Data) -> couch_httpd:send(Resp, Data) end, true)
- end;
- false ->
- send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options))
- end.
-
-send_docs_multipart(Req, Results, Options1) ->
- OuterBoundary = couch_uuids:random(),
- InnerBoundary = couch_uuids:random(),
- Options = [attachments, follows, att_encoding_info | Options1],
- CType = {"Content-Type",
- "multipart/mixed; boundary=\"" ++ ?b2l(OuterBoundary) ++ "\""},
- {ok, Resp} = start_chunked_response(Req, 200, [CType]),
- couch_httpd:send_chunk(Resp, <<"--", OuterBoundary/binary>>),
- lists:foreach(
- fun({ok, #doc{atts=Atts}=Doc}) ->
- JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc, Options)),
- {ContentType, _Len} = couch_doc:len_doc_to_multi_part_stream(
- InnerBoundary, JsonBytes, Atts, true),
- couch_httpd:send_chunk(Resp, <<"\r\nContent-Type: ",
- ContentType/binary, "\r\n\r\n">>),
- couch_doc:doc_to_multi_part_stream(InnerBoundary, JsonBytes, Atts,
- fun(Data) -> couch_httpd:send_chunk(Resp, Data)
- end, true),
- couch_httpd:send_chunk(Resp, <<"\r\n--", OuterBoundary/binary>>);
- ({{not_found, missing}, RevId}) ->
- RevStr = couch_doc:rev_to_str(RevId),
- Json = ?JSON_ENCODE({[{"missing", RevStr}]}),
- couch_httpd:send_chunk(Resp,
- [<<"\r\nContent-Type: application/json; error=\"true\"\r\n\r\n">>,
- Json,
- <<"\r\n--", OuterBoundary/binary>>])
- end, Results),
- couch_httpd:send_chunk(Resp, <<"--">>),
- couch_httpd:last_chunk(Resp).
-
-send_ranges_multipart(Req, ContentType, Len, Att, Ranges) ->
- Boundary = couch_uuids:random(),
- CType = {"Content-Type",
- "multipart/byteranges; boundary=\"" ++ ?b2l(Boundary) ++ "\""},
- {ok, Resp} = start_chunked_response(Req, 206, [CType]),
- couch_httpd:send_chunk(Resp, <<"--", Boundary/binary>>),
- lists:foreach(fun({From, To}) ->
- ContentRange = make_content_range(From, To, Len),
- couch_httpd:send_chunk(Resp,
- <<"\r\nContent-Type: ", ContentType/binary, "\r\n",
- "Content-Range: ", ContentRange/binary, "\r\n",
- "\r\n">>),
- couch_doc:range_att_foldl(Att, From, To + 1,
- fun(Seg, _) -> send_chunk(Resp, Seg) end, {ok, Resp}),
- couch_httpd:send_chunk(Resp, <<"\r\n--", Boundary/binary>>)
- end, Ranges),
- couch_httpd:send_chunk(Resp, <<"--">>),
- couch_httpd:last_chunk(Resp),
- {ok, Resp}.
-
-receive_request_data(Req) ->
- {couch_httpd:recv(Req, 0), fun() -> receive_request_data(Req) end}.
-
-make_content_range(From, To, Len) ->
- ?l2b(io_lib:format("bytes ~B-~B/~B", [From, To, Len])).
-
-update_doc_result_to_json({{Id, Rev}, Error}) ->
- {_Code, Err, Msg} = couch_httpd:error_info(Error),
- {[{id, Id}, {rev, couch_doc:rev_to_str(Rev)},
- {error, Err}, {reason, Msg}]}.
-
-update_doc_result_to_json(#doc{id=DocId}, Result) ->
- update_doc_result_to_json(DocId, Result);
-update_doc_result_to_json(DocId, {ok, NewRev}) ->
- {[{id, DocId}, {rev, couch_doc:rev_to_str(NewRev)}]};
-update_doc_result_to_json(DocId, Error) ->
- {_Code, ErrorStr, Reason} = couch_httpd:error_info(Error),
- {[{id, DocId}, {error, ErrorStr}, {reason, Reason}]}.
-
-
-update_doc(Req, Db, DocId, Doc) ->
- update_doc(Req, Db, DocId, Doc, []).
-
-update_doc(Req, Db, DocId, Doc, Headers) ->
- update_doc(Req, Db, DocId, Doc, Headers, interactive_edit).
-
-update_doc(Req, Db, DocId, #doc{deleted=Deleted}=Doc, Headers, UpdateType) ->
- case couch_httpd:header_value(Req, "X-Couch-Full-Commit") of
- "true" ->
- Options = [full_commit];
- "false" ->
- Options = [delay_commit];
- _ ->
- Options = []
- end,
- {ok, NewRev} = couch_db:update_doc(Db, Doc, Options, UpdateType),
- NewRevStr = couch_doc:rev_to_str(NewRev),
- ResponseHeaders = [{"Etag", <<"\"", NewRevStr/binary, "\"">>}] ++ Headers,
- send_json(Req, if Deleted -> 200; true -> 201 end,
- ResponseHeaders, {[
- {ok, true},
- {id, DocId},
- {rev, NewRevStr}]}).
-
-couch_doc_from_req(Req, DocId, #doc{revs=Revs}=Doc) ->
- validate_attachment_names(Doc),
- ExplicitDocRev =
- case Revs of
- {Start,[RevId|_]} -> {Start, RevId};
- _ -> undefined
- end,
- case extract_header_rev(Req, ExplicitDocRev) of
- missing_rev ->
- Revs2 = {0, []};
- ExplicitDocRev ->
- Revs2 = Revs;
- {Pos, Rev} ->
- Revs2 = {Pos, [Rev]}
- end,
- Doc#doc{id=DocId, revs=Revs2};
-couch_doc_from_req(Req, DocId, Json) ->
- couch_doc_from_req(Req, DocId, couch_doc:from_json_obj(Json)).
-
-
-% Useful for debugging
-% couch_doc_open(Db, DocId) ->
-% couch_doc_open(Db, DocId, nil, []).
-
-couch_doc_open(Db, DocId, Rev, Options) ->
- case Rev of
- nil -> % open most recent rev
- case couch_db:open_doc(Db, DocId, Options) of
- {ok, Doc} ->
- Doc;
- Error ->
- throw(Error)
- end;
- _ -> % open a specific rev (deletions come back as stubs)
- case couch_db:open_doc_revs(Db, DocId, [Rev], Options) of
- {ok, [{ok, Doc}]} ->
- Doc;
- {ok, [{{not_found, missing}, Rev}]} ->
- throw(not_found);
- {ok, [Else]} ->
- throw(Else)
- end
- end.
-
-% Attachment request handlers
-
-db_attachment_req(#httpd{method='GET',mochi_req=MochiReq}=Req, Db, DocId, FileNameParts) ->
- FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, FileNameParts),"/")),
- #doc_query_args{
- rev=Rev,
- options=Options
- } = parse_doc_query(Req),
- #doc{
- atts=Atts
- } = Doc = couch_doc_open(Db, DocId, Rev, Options),
- case [A || A <- Atts, A#att.name == FileName] of
- [] ->
- throw({not_found, "Document is missing attachment"});
- [#att{type=Type, encoding=Enc, disk_len=DiskLen, att_len=AttLen}=Att] ->
- Etag = couch_httpd:doc_etag(Doc),
- ReqAcceptsAttEnc = lists:member(
- atom_to_list(Enc),
- couch_httpd:accepted_encodings(Req)
- ),
- Len = case {Enc, ReqAcceptsAttEnc} of
- {identity, _} ->
- % stored and served in identity form
- DiskLen;
- {_, false} when DiskLen =/= AttLen ->
- % Stored encoded, but client doesn't accept the encoding we used,
- % so we need to decode on the fly. DiskLen is the identity length
- % of the attachment.
- DiskLen;
- {_, true} ->
- % Stored and served encoded. AttLen is the encoded length.
- AttLen;
- _ ->
- % We received an encoded attachment and stored it as such, so we
- % don't know the identity length. The client doesn't accept the
- % encoding, and since we cannot serve a correct Content-Length
- % header we'll fall back to a chunked response.
- undefined
- end,
- Headers = [
- {"ETag", Etag},
- {"Cache-Control", "must-revalidate"},
- {"Content-Type", binary_to_list(Type)}
- ] ++ case ReqAcceptsAttEnc of
- true when Enc =/= identity ->
- % RFC 2616 says that the 'identify' encoding should not be used in
- % the Content-Encoding header
- [{"Content-Encoding", atom_to_list(Enc)}];
- _ ->
- []
- end ++ case Enc of
- identity ->
- [{"Accept-Ranges", "bytes"}];
- _ ->
- [{"Accept-Ranges", "none"}]
- end,
- AttFun = case ReqAcceptsAttEnc of
- false ->
- fun couch_doc:att_foldl_decode/3;
- true ->
- fun couch_doc:att_foldl/3
- end,
- couch_httpd:etag_respond(
- Req,
- Etag,
- fun() ->
- case Len of
- undefined ->
- {ok, Resp} = start_chunked_response(Req, 200, Headers),
- AttFun(Att, fun(Seg, _) -> send_chunk(Resp, Seg) end, {ok, Resp}),
- last_chunk(Resp);
- _ ->
- Ranges = parse_ranges(MochiReq:get(range), Len),
- case {Enc, Ranges} of
- {identity, [{From, To}]} ->
- Headers1 = [{<<"Content-Range">>, make_content_range(From, To, Len)}]
- ++ Headers,
- {ok, Resp} = start_response_length(Req, 206, Headers1, To - From + 1),
- couch_doc:range_att_foldl(Att, From, To + 1,
- fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp});
- {identity, Ranges} when is_list(Ranges) ->
- send_ranges_multipart(Req, Type, Len, Att, Ranges);
- _ ->
- {ok, Resp} = start_response_length(Req, 200, Headers, Len),
- AttFun(Att, fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp})
- end
- end
- end
- )
- end;
-
-
-db_attachment_req(#httpd{method=Method,mochi_req=MochiReq}=Req, Db, DocId, FileNameParts)
- when (Method == 'PUT') or (Method == 'DELETE') ->
- FileName = validate_attachment_name(
- mochiweb_util:join(
- lists:map(fun binary_to_list/1,
- FileNameParts),"/")),
-
- NewAtt = case Method of
- 'DELETE' ->
- [];
- _ ->
- [#att{
- name = FileName,
- type = case couch_httpd:header_value(Req,"Content-Type") of
- undefined ->
- % We could throw an error here or guess by the FileName.
- % Currently, just giving it a default.
- <<"application/octet-stream">>;
- CType ->
- list_to_binary(CType)
- end,
- data = case couch_httpd:body_length(Req) of
- undefined ->
- <<"">>;
- {unknown_transfer_encoding, Unknown} ->
- exit({unknown_transfer_encoding, Unknown});
- chunked ->
- fun(MaxChunkSize, ChunkFun, InitState) ->
- couch_httpd:recv_chunked(Req, MaxChunkSize,
- ChunkFun, InitState)
- end;
- 0 ->
- <<"">>;
- Length when is_integer(Length) ->
- Expect = case couch_httpd:header_value(Req, "expect") of
- undefined ->
- undefined;
- Value when is_list(Value) ->
- string:to_lower(Value)
- end,
- case Expect of
- "100-continue" ->
- MochiReq:start_raw_response({100, gb_trees:empty()});
- _Else ->
- ok
- end,
-
-
- fun() -> couch_httpd:recv(Req, 0) end
- end,
- att_len = case couch_httpd:header_value(Req,"Content-Length") of
- undefined ->
- undefined;
- Length ->
- list_to_integer(Length)
- end,
- md5 = get_md5_header(Req),
- encoding = case string:to_lower(string:strip(
- couch_httpd:header_value(Req,"Content-Encoding","identity")
- )) of
- "identity" ->
- identity;
- "gzip" ->
- gzip;
- _ ->
- throw({
- bad_ctype,
- "Only gzip and identity content-encodings are supported"
- })
- end
- }]
- end,
-
- Doc = case extract_header_rev(Req, couch_httpd:qs_value(Req, "rev")) of
- missing_rev -> % make the new doc
- couch_doc:validate_docid(DocId),
- #doc{id=DocId};
- Rev ->
- case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
- {ok, [{ok, Doc0}]} -> Doc0;
- {ok, [Error]} -> throw(Error)
- end
- end,
-
- #doc{atts=Atts, revs = {Pos, Revs}} = Doc,
- DocEdited = Doc#doc{
- % prune revision list as a workaround for key tree bug (COUCHDB-902)
- revs = {Pos, case Revs of [] -> []; [Hd|_] -> [Hd] end},
- atts = NewAtt ++ [A || A <- Atts, A#att.name /= FileName]
- },
- {ok, UpdatedRev} = couch_db:update_doc(Db, DocEdited, []),
- #db{name=DbName} = Db,
-
- {Status, Headers} = case Method of
- 'DELETE' ->
- {200, []};
- _ ->
- {201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(UpdatedRev)) ++ "\""},
- {"Location", absolute_uri(Req, "/" ++
- binary_to_list(DbName) ++ "/" ++
- binary_to_list(DocId) ++ "/" ++
- binary_to_list(FileName)
- )}]}
- end,
- send_json(Req,Status, Headers, {[
- {ok, true},
- {id, DocId},
- {rev, couch_doc:rev_to_str(UpdatedRev)}
- ]});
-
-db_attachment_req(Req, _Db, _DocId, _FileNameParts) ->
- send_method_not_allowed(Req, "DELETE,GET,HEAD,PUT").
-
-parse_ranges(undefined, _Len) ->
- undefined;
-parse_ranges(fail, _Len) ->
- undefined;
-parse_ranges(Ranges, Len) ->
- parse_ranges(Ranges, Len, []).
-
-parse_ranges([], _Len, Acc) ->
- lists:reverse(Acc);
-parse_ranges([{From, To}|_], _Len, _Acc) when is_integer(From) andalso is_integer(To) andalso To < From ->
- throw(requested_range_not_satisfiable);
-parse_ranges([{From, To}|Rest], Len, Acc) when is_integer(To) andalso To >= Len ->
- parse_ranges([{From, Len-1}] ++ Rest, Len, Acc);
-parse_ranges([{none, To}|Rest], Len, Acc) ->
- parse_ranges([{Len - To, Len - 1}] ++ Rest, Len, Acc);
-parse_ranges([{From, none}|Rest], Len, Acc) ->
- parse_ranges([{From, Len - 1}] ++ Rest, Len, Acc);
-parse_ranges([{From,To}|Rest], Len, Acc) ->
- parse_ranges(Rest, Len, [{From, To}] ++ Acc).
-
-get_md5_header(Req) ->
- ContentMD5 = couch_httpd:header_value(Req, "Content-MD5"),
- Length = couch_httpd:body_length(Req),
- Trailer = couch_httpd:header_value(Req, "Trailer"),
- case {ContentMD5, Length, Trailer} of
- _ when is_list(ContentMD5) orelse is_binary(ContentMD5) ->
- base64:decode(ContentMD5);
- {_, chunked, undefined} ->
- <<>>;
- {_, chunked, _} ->
- case re:run(Trailer, "\\bContent-MD5\\b", [caseless]) of
- {match, _} ->
- md5_in_footer;
- _ ->
- <<>>
- end;
- _ ->
- <<>>
- end.
-
-parse_doc_query(Req) ->
- lists:foldl(fun({Key,Value}, Args) ->
- case {Key, Value} of
- {"attachments", "true"} ->
- Options = [attachments | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"meta", "true"} ->
- Options = [revs_info, conflicts, deleted_conflicts | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"revs", "true"} ->
- Options = [revs | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"local_seq", "true"} ->
- Options = [local_seq | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"revs_info", "true"} ->
- Options = [revs_info | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"conflicts", "true"} ->
- Options = [conflicts | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"deleted_conflicts", "true"} ->
- Options = [deleted_conflicts | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- {"rev", Rev} ->
- Args#doc_query_args{rev=couch_doc:parse_rev(Rev)};
- {"open_revs", "all"} ->
- Args#doc_query_args{open_revs=all};
- {"open_revs", RevsJsonStr} ->
- JsonArray = ?JSON_DECODE(RevsJsonStr),
- Args#doc_query_args{open_revs=couch_doc:parse_revs(JsonArray)};
- {"atts_since", RevsJsonStr} ->
- JsonArray = ?JSON_DECODE(RevsJsonStr),
- Args#doc_query_args{atts_since = couch_doc:parse_revs(JsonArray)};
- {"new_edits", "false"} ->
- Args#doc_query_args{update_type=replicated_changes};
- {"new_edits", "true"} ->
- Args#doc_query_args{update_type=interactive_edit};
- {"att_encoding_info", "true"} ->
- Options = [att_encoding_info | Args#doc_query_args.options],
- Args#doc_query_args{options=Options};
- _Else -> % unknown key value pair, ignore.
- Args
- end
- end, #doc_query_args{}, couch_httpd:qs(Req)).
-
-parse_changes_query(Req) ->
- lists:foldl(fun({Key, Value}, Args) ->
- case {Key, Value} of
- {"feed", _} ->
- Args#changes_args{feed=Value};
- {"descending", "true"} ->
- Args#changes_args{dir=rev};
- {"since", _} ->
- Args#changes_args{since=list_to_integer(Value)};
- {"limit", _} ->
- Args#changes_args{limit=list_to_integer(Value)};
- {"style", _} ->
- Args#changes_args{style=list_to_existing_atom(Value)};
- {"heartbeat", "true"} ->
- Args#changes_args{heartbeat=true};
- {"heartbeat", _} ->
- Args#changes_args{heartbeat=list_to_integer(Value)};
- {"timeout", _} ->
- Args#changes_args{timeout=list_to_integer(Value)};
- {"include_docs", "true"} ->
- Args#changes_args{include_docs=true};
- {"conflicts", "true"} ->
- Args#changes_args{conflicts=true};
- {"filter", _} ->
- Args#changes_args{filter=Value};
- _Else -> % unknown key value pair, ignore.
- Args
- end
- end, #changes_args{}, couch_httpd:qs(Req)).
-
-extract_header_rev(Req, ExplicitRev) when is_binary(ExplicitRev) or is_list(ExplicitRev)->
- extract_header_rev(Req, couch_doc:parse_rev(ExplicitRev));
-extract_header_rev(Req, ExplicitRev) ->
- Etag = case couch_httpd:header_value(Req, "If-Match") of
- undefined -> undefined;
- Value -> couch_doc:parse_rev(string:strip(Value, both, $"))
- end,
- case {ExplicitRev, Etag} of
- {undefined, undefined} -> missing_rev;
- {_, undefined} -> ExplicitRev;
- {undefined, _} -> Etag;
- _ when ExplicitRev == Etag -> Etag;
- _ ->
- throw({bad_request, "Document rev and etag have different values"})
- end.
-
-
-parse_copy_destination_header(Req) ->
- case couch_httpd:header_value(Req, "Destination") of
- undefined ->
- throw({bad_request, "Destination header in mandatory for COPY."});
- Destination ->
- case re:run(Destination, "\\?", [{capture, none}]) of
- nomatch ->
- {list_to_binary(Destination), {0, []}};
- match ->
- [DocId, RevQs] = re:split(Destination, "\\?", [{return, list}]),
- [_RevQueryKey, Rev] = re:split(RevQs, "=", [{return, list}]),
- {Pos, RevId} = couch_doc:parse_rev(Rev),
- {list_to_binary(DocId), {Pos, [RevId]}}
- end
- end.
-
-validate_attachment_names(Doc) ->
- lists:foreach(fun(#att{name=Name}) ->
- validate_attachment_name(Name)
- end, Doc#doc.atts).
-
-validate_attachment_name(Name) when is_list(Name) ->
- validate_attachment_name(list_to_binary(Name));
-validate_attachment_name(<<"_",_/binary>>) ->
- throw({bad_request, <<"Attachment name can't start with '_'">>});
-validate_attachment_name(Name) ->
- case couch_util:validate_utf8(Name) of
- true -> Name;
- false -> throw({bad_request, <<"Attachment name is not UTF-8 encoded">>})
- end.
-
diff --git a/1.1.x/src/couchdb/couch_httpd_external.erl b/1.1.x/src/couchdb/couch_httpd_external.erl
deleted file mode 100644
index 2e91fb50..00000000
--- a/1.1.x/src/couchdb/couch_httpd_external.erl
+++ /dev/null
@@ -1,169 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_external).
-
--export([handle_external_req/2, handle_external_req/3]).
--export([send_external_response/2, json_req_obj/2, json_req_obj/3]).
--export([default_or_content_type/2, parse_external_response/1]).
-
--import(couch_httpd,[send_error/4]).
-
--include("couch_db.hrl").
-
-% handle_external_req/2
-% for the old type of config usage:
-% _external = {couch_httpd_external, handle_external_req}
-% with urls like
-% /db/_external/action/design/name
-handle_external_req(#httpd{
- path_parts=[_DbName, _External, UrlName | _Path]
- }=HttpReq, Db) ->
- process_external_req(HttpReq, Db, UrlName);
-handle_external_req(#httpd{path_parts=[_, _]}=Req, _Db) ->
- send_error(Req, 404, <<"external_server_error">>, <<"No server name specified.">>);
-handle_external_req(Req, _) ->
- send_error(Req, 404, <<"external_server_error">>, <<"Broken assumption">>).
-
-% handle_external_req/3
-% for this type of config usage:
-% _action = {couch_httpd_external, handle_external_req, <<"action">>}
-% with urls like
-% /db/_action/design/name
-handle_external_req(HttpReq, Db, Name) ->
- process_external_req(HttpReq, Db, Name).
-
-process_external_req(HttpReq, Db, Name) ->
-
- Response = couch_external_manager:execute(binary_to_list(Name),
- json_req_obj(HttpReq, Db)),
-
- case Response of
- {unknown_external_server, Msg} ->
- send_error(HttpReq, 404, <<"external_server_error">>, Msg);
- _ ->
- send_external_response(HttpReq, Response)
- end.
-json_req_obj(Req, Db) -> json_req_obj(Req, Db, null).
-json_req_obj(#httpd{mochi_req=Req,
- method=Method,
- requested_path_parts=RequestedPath,
- path_parts=Path,
- req_body=ReqBody
- }, Db, DocId) ->
- Body = case ReqBody of
- undefined -> Req:recv_body();
- Else -> Else
- end,
- ParsedForm = case Req:get_primary_header_value("content-type") of
- "application/x-www-form-urlencoded" ++ _ ->
- case Body of
- undefined -> [];
- _ -> mochiweb_util:parse_qs(Body)
- end;
- _ ->
- []
- end,
- Headers = Req:get(headers),
- Hlist = mochiweb_headers:to_list(Headers),
- {ok, Info} = couch_db:get_db_info(Db),
-
-% add headers...
- {[{<<"info">>, {Info}},
- {<<"id">>, DocId},
- {<<"uuid">>, couch_uuids:new()},
- {<<"method">>, Method},
- {<<"requested_path">>, RequestedPath},
- {<<"path">>, Path},
- {<<"query">>, json_query_keys(to_json_terms(Req:parse_qs()))},
- {<<"headers">>, to_json_terms(Hlist)},
- {<<"body">>, Body},
- {<<"peer">>, ?l2b(Req:get(peer))},
- {<<"form">>, to_json_terms(ParsedForm)},
- {<<"cookie">>, to_json_terms(Req:parse_cookie())},
- {<<"userCtx">>, couch_util:json_user_ctx(Db)},
- {<<"secObj">>, couch_db:get_security(Db)}]}.
-
-to_json_terms(Data) ->
- to_json_terms(Data, []).
-
-to_json_terms([], Acc) ->
- {lists:reverse(Acc)};
-to_json_terms([{Key, Value} | Rest], Acc) when is_atom(Key) ->
- to_json_terms(Rest, [{list_to_binary(atom_to_list(Key)), list_to_binary(Value)} | Acc]);
-to_json_terms([{Key, Value} | Rest], Acc) ->
- to_json_terms(Rest, [{list_to_binary(Key), list_to_binary(Value)} | Acc]).
-
-json_query_keys({Json}) ->
- json_query_keys(Json, []).
-json_query_keys([], Acc) ->
- {lists:reverse(Acc)};
-json_query_keys([{<<"startkey">>, Value} | Rest], Acc) ->
- json_query_keys(Rest, [{<<"startkey">>, couch_util:json_decode(Value)}|Acc]);
-json_query_keys([{<<"endkey">>, Value} | Rest], Acc) ->
- json_query_keys(Rest, [{<<"endkey">>, couch_util:json_decode(Value)}|Acc]);
-json_query_keys([{<<"key">>, Value} | Rest], Acc) ->
- json_query_keys(Rest, [{<<"key">>, couch_util:json_decode(Value)}|Acc]);
-json_query_keys([Term | Rest], Acc) ->
- json_query_keys(Rest, [Term|Acc]).
-
-send_external_response(#httpd{mochi_req=MochiReq}=Req, Response) ->
- #extern_resp_args{
- code = Code,
- data = Data,
- ctype = CType,
- headers = Headers
- } = parse_external_response(Response),
- couch_httpd:log_request(Req, Code),
- Resp = MochiReq:respond({Code,
- default_or_content_type(CType, Headers ++ couch_httpd:server_header()), Data}),
- {ok, Resp}.
-
-parse_external_response({Response}) ->
- lists:foldl(fun({Key,Value}, Args) ->
- case {Key, Value} of
- {"", _} ->
- Args;
- {<<"code">>, Value} ->
- Args#extern_resp_args{code=Value};
- {<<"stop">>, true} ->
- Args#extern_resp_args{stop=true};
- {<<"json">>, Value} ->
- Args#extern_resp_args{
- data=?JSON_ENCODE(Value),
- ctype="application/json"};
- {<<"body">>, Value} ->
- Args#extern_resp_args{data=Value, ctype="text/html; charset=utf-8"};
- {<<"base64">>, Value} ->
- Args#extern_resp_args{
- data=base64:decode(Value),
- ctype="application/binary"
- };
- {<<"headers">>, {Headers}} ->
- NewHeaders = lists:map(fun({Header, HVal}) ->
- {binary_to_list(Header), binary_to_list(HVal)}
- end, Headers),
- Args#extern_resp_args{headers=NewHeaders};
- _ -> % unknown key
- Msg = lists:flatten(io_lib:format("Invalid data from external server: ~p", [{Key, Value}])),
- throw({external_response_error, Msg})
- end
- end, #extern_resp_args{}, Response).
-
-default_or_content_type(DefaultContentType, Headers) ->
- IsContentType = fun({X, _}) -> string:to_lower(X) == "content-type" end,
- case lists:any(IsContentType, Headers) of
- false ->
- [{"Content-Type", DefaultContentType} | Headers];
- true ->
- Headers
- end.
diff --git a/1.1.x/src/couchdb/couch_httpd_misc_handlers.erl b/1.1.x/src/couchdb/couch_httpd_misc_handlers.erl
deleted file mode 100644
index 213cbfd4..00000000
--- a/1.1.x/src/couchdb/couch_httpd_misc_handlers.erl
+++ /dev/null
@@ -1,284 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_misc_handlers).
-
--export([handle_welcome_req/2,handle_favicon_req/2,handle_utils_dir_req/2,
- handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1,
- handle_uuids_req/1,handle_config_req/1,handle_log_req/1,
- handle_task_status_req/1]).
-
--export([increment_update_seq_req/2]).
-
-
--include("couch_db.hrl").
-
--import(couch_httpd,
- [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
- start_json_response/2,send_chunk/2,last_chunk/1,end_json_response/1,
- start_chunked_response/3, send_error/4]).
-
-% httpd global handlers
-
-handle_welcome_req(#httpd{method='GET'}=Req, WelcomeMessage) ->
- send_json(Req, {[
- {couchdb, WelcomeMessage},
- {version, list_to_binary(couch_server:get_version())}
- ]});
-handle_welcome_req(Req, _) ->
- send_method_not_allowed(Req, "GET,HEAD").
-
-handle_favicon_req(#httpd{method='GET'}=Req, DocumentRoot) ->
- {{Year,Month,Day},Time} = erlang:localtime(),
- OneYearFromNow = {{Year+1,Month,Day},Time},
- CachingHeaders = [
- %favicon should expire a year from now
- {"Cache-Control", "public, max-age=31536000"},
- {"Expires", httpd_util:rfc1123_date(OneYearFromNow)}
- ],
- couch_httpd:serve_file(Req, "favicon.ico", DocumentRoot, CachingHeaders);
-
-handle_favicon_req(Req, _) ->
- send_method_not_allowed(Req, "GET,HEAD").
-
-handle_utils_dir_req(#httpd{method='GET'}=Req, DocumentRoot) ->
- "/" ++ UrlPath = couch_httpd:path(Req),
- case couch_httpd:partition(UrlPath) of
- {_ActionKey, "/", RelativePath} ->
- % GET /_utils/path or GET /_utils/
- couch_httpd:serve_file(Req, RelativePath, DocumentRoot);
- {_ActionKey, "", _RelativePath} ->
- % GET /_utils
- RedirectPath = couch_httpd:path(Req) ++ "/",
- couch_httpd:send_redirect(Req, RedirectPath)
- end;
-handle_utils_dir_req(Req, _) ->
- send_method_not_allowed(Req, "GET,HEAD").
-
-handle_all_dbs_req(#httpd{method='GET'}=Req) ->
- {ok, DbNames} = couch_server:all_databases(),
- send_json(Req, DbNames);
-handle_all_dbs_req(Req) ->
- send_method_not_allowed(Req, "GET,HEAD").
-
-
-handle_task_status_req(#httpd{method='GET'}=Req) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- % convert the list of prop lists to a list of json objects
- send_json(Req, [{Props} || Props <- couch_task_status:all()]);
-handle_task_status_req(Req) ->
- send_method_not_allowed(Req, "GET,HEAD").
-
-handle_replicate_req(#httpd{method='POST'}=Req) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- PostBody = couch_httpd:json_body_obj(Req),
- try couch_rep:replicate(PostBody, Req#httpd.user_ctx) of
- {ok, {continuous, RepId}} ->
- send_json(Req, 202, {[{ok, true}, {<<"_local_id">>, RepId}]});
- {ok, {cancelled, RepId}} ->
- send_json(Req, 200, {[{ok, true}, {<<"_local_id">>, RepId}]});
- {ok, {JsonResults}} ->
- send_json(Req, {[{ok, true} | JsonResults]});
- {error, {Type, Details}} ->
- send_json(Req, 500, {[{error, Type}, {reason, Details}]});
- {error, not_found} ->
- send_json(Req, 404, {[{error, not_found}]});
- {error, Reason} ->
- try
- send_json(Req, 500, {[{error, Reason}]})
- catch
- exit:{json_encode, _} ->
- send_json(Req, 500, {[{error, couch_util:to_binary(Reason)}]})
- end
- catch
- throw:{db_not_found, Msg} ->
- send_json(Req, 404, {[{error, db_not_found}, {reason, Msg}]});
- throw:{unauthorized, Msg} ->
- send_json(Req, 404, {[{error, unauthorized}, {reason, Msg}]})
- end;
-handle_replicate_req(Req) ->
- send_method_not_allowed(Req, "POST").
-
-
-handle_restart_req(#httpd{method='POST'}=Req) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- ok = couch_httpd:verify_is_server_admin(Req),
- couch_server_sup:restart_core_server(),
- send_json(Req, 200, {[{ok, true}]});
-handle_restart_req(Req) ->
- send_method_not_allowed(Req, "POST").
-
-
-handle_uuids_req(#httpd{method='GET'}=Req) ->
- Count = list_to_integer(couch_httpd:qs_value(Req, "count", "1")),
- UUIDs = [couch_uuids:new() || _ <- lists:seq(1, Count)],
- Etag = couch_httpd:make_etag(UUIDs),
- couch_httpd:etag_respond(Req, Etag, fun() ->
- CacheBustingHeaders = [
- {"Date", httpd_util:rfc1123_date()},
- {"Cache-Control", "no-cache"},
- % Past date, ON PURPOSE!
- {"Expires", "Fri, 01 Jan 1990 00:00:00 GMT"},
- {"Pragma", "no-cache"},
- {"ETag", Etag}
- ],
- send_json(Req, 200, CacheBustingHeaders, {[{<<"uuids">>, UUIDs}]})
- end);
-handle_uuids_req(Req) ->
- send_method_not_allowed(Req, "GET").
-
-
-% Config request handler
-
-
-% GET /_config/
-% GET /_config
-handle_config_req(#httpd{method='GET', path_parts=[_]}=Req) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- Grouped = lists:foldl(fun({{Section, Key}, Value}, Acc) ->
- case dict:is_key(Section, Acc) of
- true ->
- dict:append(Section, {list_to_binary(Key), list_to_binary(Value)}, Acc);
- false ->
- dict:store(Section, [{list_to_binary(Key), list_to_binary(Value)}], Acc)
- end
- end, dict:new(), couch_config:all()),
- KVs = dict:fold(fun(Section, Values, Acc) ->
- [{list_to_binary(Section), {Values}} | Acc]
- end, [], Grouped),
- send_json(Req, 200, {KVs});
-% GET /_config/Section
-handle_config_req(#httpd{method='GET', path_parts=[_,Section]}=Req) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- KVs = [{list_to_binary(Key), list_to_binary(Value)}
- || {Key, Value} <- couch_config:get(Section)],
- send_json(Req, 200, {KVs});
-% GET /_config/Section/Key
-handle_config_req(#httpd{method='GET', path_parts=[_, Section, Key]}=Req) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- case couch_config:get(Section, Key, null) of
- null ->
- throw({not_found, unknown_config_value});
- Value ->
- send_json(Req, 200, list_to_binary(Value))
- end;
-% PUT or DELETE /_config/Section/Key
-handle_config_req(#httpd{method=Method, path_parts=[_, Section, Key]}=Req)
- when (Method == 'PUT') or (Method == 'DELETE') ->
- ok = couch_httpd:verify_is_server_admin(Req),
- Persist = couch_httpd:header_value(Req, "X-Couch-Persist") /= "false",
- case couch_config:get(<<"httpd">>, <<"config_whitelist">>, null) of
- null ->
- % No whitelist; allow all changes.
- handle_approved_config_req(Req, Persist);
- WhitelistValue ->
- % Provide a failsafe to protect against inadvertently locking
- % onesself out of the config by supplying a syntactically-incorrect
- % Erlang term. To intentionally lock down the whitelist, supply a
- % well-formed list which does not include the whitelist config
- % variable itself.
- FallbackWhitelist = [{<<"httpd">>, <<"config_whitelist">>}],
-
- Whitelist = case couch_util:parse_term(WhitelistValue) of
- {ok, Value} when is_list(Value) ->
- Value;
- {ok, _NonListValue} ->
- FallbackWhitelist;
- {error, _} ->
- [{WhitelistSection, WhitelistKey}] = FallbackWhitelist,
- ?LOG_ERROR("Only whitelisting ~s/~s due to error parsing: ~p",
- [WhitelistSection, WhitelistKey, WhitelistValue]),
- FallbackWhitelist
- end,
-
- IsRequestedKeyVal = fun(Element) ->
- case Element of
- {A, B} ->
- % For readability, tuples may be used instead of binaries
- % in the whitelist.
- case {couch_util:to_binary(A), couch_util:to_binary(B)} of
- {Section, Key} ->
- true;
- {Section, <<"*">>} ->
- true;
- _Else ->
- false
- end;
- _Else ->
- false
- end
- end,
-
- case lists:any(IsRequestedKeyVal, Whitelist) of
- true ->
- % Allow modifying this whitelisted variable.
- handle_approved_config_req(Req, Persist);
- _NotWhitelisted ->
- % Disallow modifying this non-whitelisted variable.
- send_error(Req, 400, <<"modification_not_allowed">>,
- ?l2b("This config variable is read-only"))
- end
- end;
-handle_config_req(Req) ->
- send_method_not_allowed(Req, "GET,PUT,DELETE").
-
-% PUT /_config/Section/Key
-% "value"
-handle_approved_config_req(#httpd{method='PUT', path_parts=[_, Section, Key]}=Req, Persist) ->
- Value = couch_httpd:json_body(Req),
- OldValue = couch_config:get(Section, Key, ""),
- case couch_config:set(Section, Key, ?b2l(Value), Persist) of
- ok ->
- send_json(Req, 200, list_to_binary(OldValue));
- Error ->
- throw(Error)
- end;
-% DELETE /_config/Section/Key
-handle_approved_config_req(#httpd{method='DELETE',path_parts=[_,Section,Key]}=Req, Persist) ->
- case couch_config:get(Section, Key, null) of
- null ->
- throw({not_found, unknown_config_value});
- OldValue ->
- couch_config:delete(Section, Key, Persist),
- send_json(Req, 200, list_to_binary(OldValue))
- end.
-
-
-% httpd db handlers
-
-increment_update_seq_req(#httpd{method='POST'}=Req, Db) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- {ok, NewSeq} = couch_db:increment_update_seq(Db),
- send_json(Req, {[{ok, true},
- {update_seq, NewSeq}
- ]});
-increment_update_seq_req(Req, _Db) ->
- send_method_not_allowed(Req, "POST").
-
-% httpd log handlers
-
-handle_log_req(#httpd{method='GET'}=Req) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- Bytes = list_to_integer(couch_httpd:qs_value(Req, "bytes", "1000")),
- Offset = list_to_integer(couch_httpd:qs_value(Req, "offset", "0")),
- Chunk = couch_log:read(Bytes, Offset),
- {ok, Resp} = start_chunked_response(Req, 200, [
- % send a plaintext response
- {"Content-Type", "text/plain; charset=utf-8"},
- {"Content-Length", integer_to_list(length(Chunk))}
- ]),
- send_chunk(Resp, Chunk),
- last_chunk(Resp);
-handle_log_req(Req) ->
- send_method_not_allowed(Req, "GET").
-
-
diff --git a/1.1.x/src/couchdb/couch_httpd_oauth.erl b/1.1.x/src/couchdb/couch_httpd_oauth.erl
deleted file mode 100644
index 05ee10e2..00000000
--- a/1.1.x/src/couchdb/couch_httpd_oauth.erl
+++ /dev/null
@@ -1,176 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_oauth).
--include("couch_db.hrl").
-
--export([oauth_authentication_handler/1, handle_oauth_req/1, consumer_lookup/2]).
-
-% OAuth auth handler using per-node user db
-oauth_authentication_handler(#httpd{mochi_req=MochiReq}=Req) ->
- serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = couch_util:get_value("oauth_token", Params),
- case couch_config:get("oauth_token_secrets", AccessToken) of
- undefined ->
- couch_httpd:send_error(Req, 400, <<"invalid_token">>,
- <<"Invalid OAuth token.">>);
- TokenSecret ->
- ?LOG_DEBUG("OAuth URL is: ~p", [URL]),
- case oauth:verify(Signature, atom_to_list(MochiReq:get(method)), URL, Params, Consumer, TokenSecret) of
- true ->
- set_user_ctx(Req, AccessToken);
- false ->
- Req
- end
- end
- end, true).
-
-% Look up the consumer key and get the roles to give the consumer
-set_user_ctx(Req, AccessToken) ->
- % TODO move to db storage
- Name = case couch_config:get("oauth_token_users", AccessToken) of
- undefined -> throw({bad_request, unknown_oauth_token});
- Value -> ?l2b(Value)
- end,
- case couch_auth_cache:get_user_creds(Name) of
- nil -> Req;
- User ->
- Roles = couch_util:get_value(<<"roles">>, User, []),
- Req#httpd{user_ctx=#user_ctx{name=Name, roles=Roles}}
- end.
-
-% OAuth request_token
-handle_oauth_req(#httpd{path_parts=[_OAuth, <<"request_token">>], method=Method}=Req) ->
- serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = couch_util:get_value("oauth_token", Params),
- TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
- case oauth:verify(Signature, atom_to_list(Method), URL, Params, Consumer, TokenSecret) of
- true ->
- ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>);
- false ->
- invalid_signature(Req)
- end
- end, false);
-handle_oauth_req(#httpd{path_parts=[_OAuth, <<"authorize">>]}=Req) ->
- {ok, serve_oauth_authorize(Req)};
-handle_oauth_req(#httpd{path_parts=[_OAuth, <<"access_token">>], method='GET'}=Req) ->
- serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- case oauth:token(Params) of
- "requestkey" ->
- case oauth:verify(Signature, "GET", URL, Params, Consumer, "requestsecret") of
- true ->
- ok(Req, <<"oauth_token=accesskey&oauth_token_secret=accesssecret">>);
- false ->
- invalid_signature(Req)
- end;
- _ ->
- couch_httpd:send_error(Req, 400, <<"invalid_token">>, <<"Invalid OAuth token.">>)
- end
- end, false);
-handle_oauth_req(#httpd{path_parts=[_OAuth, <<"access_token">>]}=Req) ->
- couch_httpd:send_method_not_allowed(Req, "GET").
-
-invalid_signature(Req) ->
- couch_httpd:send_error(Req, 400, <<"invalid_signature">>, <<"Invalid signature value.">>).
-
-% This needs to be protected i.e. force user to login using HTTP Basic Auth or form-based login.
-serve_oauth_authorize(#httpd{method=Method}=Req) ->
- case Method of
- 'GET' ->
- % Confirm with the User that they want to authenticate the Consumer
- serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = couch_util:get_value("oauth_token", Params),
- TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
- case oauth:verify(Signature, "GET", URL, Params, Consumer, TokenSecret) of
- true ->
- ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>);
- false ->
- invalid_signature(Req)
- end
- end, false);
- 'POST' ->
- % If the User has confirmed, we direct the User back to the Consumer with a verification code
- serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = couch_util:get_value("oauth_token", Params),
- TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
- case oauth:verify(Signature, "POST", URL, Params, Consumer, TokenSecret) of
- true ->
- %redirect(oauth_callback, oauth_token, oauth_verifier),
- ok(Req, <<"oauth_token=requestkey&oauth_token_secret=requestsecret">>);
- false ->
- invalid_signature(Req)
- end
- end, false);
- _ ->
- couch_httpd:send_method_not_allowed(Req, "GET,POST")
- end.
-
-serve_oauth(#httpd{mochi_req=MochiReq}=Req, Fun, FailSilently) ->
- % 1. In the HTTP Authorization header as defined in OAuth HTTP Authorization Scheme.
- % 2. As the HTTP POST request body with a content-type of application/x-www-form-urlencoded.
- % 3. Added to the URLs in the query part (as defined by [RFC3986] section 3).
- AuthHeader = case MochiReq:get_header_value("authorization") of
- undefined ->
- "";
- Else ->
- [Head | Tail] = re:split(Else, "\\s", [{parts, 2}, {return, list}]),
- case [string:to_lower(Head) | Tail] of
- ["oauth", Rest] -> Rest;
- _ -> ""
- end
- end,
- HeaderParams = oauth_uri:params_from_header_string(AuthHeader),
- %Realm = couch_util:get_value("realm", HeaderParams),
- Params = proplists:delete("realm", HeaderParams) ++ MochiReq:parse_qs(),
- ?LOG_DEBUG("OAuth Params: ~p", [Params]),
- case couch_util:get_value("oauth_version", Params, "1.0") of
- "1.0" ->
- case couch_util:get_value("oauth_consumer_key", Params, undefined) of
- undefined ->
- case FailSilently of
- true -> Req;
- false -> couch_httpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer.">>)
- end;
- ConsumerKey ->
- SigMethod = couch_util:get_value("oauth_signature_method", Params),
- case consumer_lookup(ConsumerKey, SigMethod) of
- none ->
- couch_httpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer (key or signature method).">>);
- Consumer ->
- Signature = couch_util:get_value("oauth_signature", Params),
- URL = couch_httpd:absolute_uri(Req, MochiReq:get(raw_path)),
- Fun(URL, proplists:delete("oauth_signature", Params),
- Consumer, Signature)
- end
- end;
- _ ->
- couch_httpd:send_error(Req, 400, <<"invalid_oauth_version">>, <<"Invalid OAuth version.">>)
- end.
-
-consumer_lookup(Key, MethodStr) ->
- SignatureMethod = case MethodStr of
- "PLAINTEXT" -> plaintext;
- "HMAC-SHA1" -> hmac_sha1;
- %"RSA-SHA1" -> rsa_sha1;
- _Else -> undefined
- end,
- case SignatureMethod of
- undefined -> none;
- _SupportedMethod ->
- case couch_config:get("oauth_consumer_secrets", Key, undefined) of
- undefined -> none;
- Secret -> {Key, Secret, SignatureMethod}
- end
- end.
-
-ok(#httpd{mochi_req=MochiReq}, Body) ->
- {ok, MochiReq:respond({200, [], Body})}.
diff --git a/1.1.x/src/couchdb/couch_httpd_proxy.erl b/1.1.x/src/couchdb/couch_httpd_proxy.erl
deleted file mode 100644
index 65e3e432..00000000
--- a/1.1.x/src/couchdb/couch_httpd_proxy.erl
+++ /dev/null
@@ -1,431 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
--module(couch_httpd_proxy).
-
--export([handle_proxy_req/2]).
-
--include("couch_db.hrl").
--include("../ibrowse/ibrowse.hrl").
-
--define(TIMEOUT, infinity).
--define(PKT_SIZE, 4096).
-
-
-handle_proxy_req(Req, ProxyDest) ->
-
- %% Bug in Mochiweb?
- %% Reported here: http://github.com/mochi/mochiweb/issues/issue/16
- erase(mochiweb_request_body_length),
-
- Method = get_method(Req),
- Url = get_url(Req, ProxyDest),
- Version = get_version(Req),
- Headers = get_headers(Req),
- Body = get_body(Req),
- Options = [
- {http_vsn, Version},
- {headers_as_is, true},
- {response_format, binary},
- {stream_to, {self(), once}}
- ],
- case ibrowse:send_req(Url, Headers, Method, Body, Options, ?TIMEOUT) of
- {ibrowse_req_id, ReqId} ->
- stream_response(Req, ProxyDest, ReqId);
- {error, Reason} ->
- throw({error, Reason})
- end.
-
-
-get_method(#httpd{mochi_req=MochiReq}) ->
- case MochiReq:get(method) of
- Method when is_atom(Method) ->
- list_to_atom(string:to_lower(atom_to_list(Method)));
- Method when is_list(Method) ->
- list_to_atom(string:to_lower(Method));
- Method when is_binary(Method) ->
- list_to_atom(string:to_lower(?b2l(Method)))
- end.
-
-
-get_url(Req, ProxyDest) when is_binary(ProxyDest) ->
- get_url(Req, ?b2l(ProxyDest));
-get_url(#httpd{mochi_req=MochiReq}=Req, ProxyDest) ->
- BaseUrl = case mochiweb_util:partition(ProxyDest, "/") of
- {[], "/", _} -> couch_httpd:absolute_uri(Req, ProxyDest);
- _ -> ProxyDest
- end,
- ProxyPrefix = "/" ++ ?b2l(hd(Req#httpd.path_parts)),
- RequestedPath = MochiReq:get(raw_path),
- case mochiweb_util:partition(RequestedPath, ProxyPrefix) of
- {[], ProxyPrefix, []} ->
- BaseUrl;
- {[], ProxyPrefix, [$/ | DestPath]} ->
- remove_trailing_slash(BaseUrl) ++ "/" ++ DestPath;
- {[], ProxyPrefix, DestPath} ->
- remove_trailing_slash(BaseUrl) ++ "/" ++ DestPath;
- _Else ->
- throw({invalid_url_path, {ProxyPrefix, RequestedPath}})
- end.
-
-get_version(#httpd{mochi_req=MochiReq}) ->
- MochiReq:get(version).
-
-
-get_headers(#httpd{mochi_req=MochiReq}) ->
- to_ibrowse_headers(mochiweb_headers:to_list(MochiReq:get(headers)), []).
-
-to_ibrowse_headers([], Acc) ->
- lists:reverse(Acc);
-to_ibrowse_headers([{K, V} | Rest], Acc) when is_atom(K) ->
- to_ibrowse_headers([{atom_to_list(K), V} | Rest], Acc);
-to_ibrowse_headers([{K, V} | Rest], Acc) when is_list(K) ->
- case string:to_lower(K) of
- "content-length" ->
- to_ibrowse_headers(Rest, [{content_length, V} | Acc]);
- % This appears to make ibrowse too smart.
- %"transfer-encoding" ->
- % to_ibrowse_headers(Rest, [{transfer_encoding, V} | Acc]);
- _ ->
- to_ibrowse_headers(Rest, [{K, V} | Acc])
- end.
-
-get_body(#httpd{method='GET'}) ->
- fun() -> eof end;
-get_body(#httpd{method='HEAD'}) ->
- fun() -> eof end;
-get_body(#httpd{method='DELETE'}) ->
- fun() -> eof end;
-get_body(#httpd{mochi_req=MochiReq}) ->
- case MochiReq:get(body_length) of
- undefined ->
- <<>>;
- {unknown_transfer_encoding, Unknown} ->
- exit({unknown_transfer_encoding, Unknown});
- chunked ->
- {fun stream_chunked_body/1, {init, MochiReq, 0}};
- 0 ->
- <<>>;
- Length when is_integer(Length) andalso Length > 0 ->
- {fun stream_length_body/1, {init, MochiReq, Length}};
- Length ->
- exit({invalid_body_length, Length})
- end.
-
-
-remove_trailing_slash(Url) ->
- rem_slash(lists:reverse(Url)).
-
-rem_slash([]) ->
- [];
-rem_slash([$\s | RevUrl]) ->
- rem_slash(RevUrl);
-rem_slash([$\t | RevUrl]) ->
- rem_slash(RevUrl);
-rem_slash([$\r | RevUrl]) ->
- rem_slash(RevUrl);
-rem_slash([$\n | RevUrl]) ->
- rem_slash(RevUrl);
-rem_slash([$/ | RevUrl]) ->
- rem_slash(RevUrl);
-rem_slash(RevUrl) ->
- lists:reverse(RevUrl).
-
-
-stream_chunked_body({init, MReq, 0}) ->
- % First chunk, do expect-continue dance.
- init_body_stream(MReq),
- stream_chunked_body({stream, MReq, 0, [], ?PKT_SIZE});
-stream_chunked_body({stream, MReq, 0, Buf, BRem}) ->
- % Finished a chunk, get next length. If next length
- % is 0, its time to try and read trailers.
- {CRem, Data} = read_chunk_length(MReq),
- case CRem of
- 0 ->
- BodyData = lists:reverse(Buf, Data),
- {ok, BodyData, {trailers, MReq, [], ?PKT_SIZE}};
- _ ->
- stream_chunked_body(
- {stream, MReq, CRem, [Data | Buf], BRem-size(Data)}
- )
- end;
-stream_chunked_body({stream, MReq, CRem, Buf, BRem}) when BRem =< 0 ->
- % Time to empty our buffers to the upstream socket.
- BodyData = lists:reverse(Buf),
- {ok, BodyData, {stream, MReq, CRem, [], ?PKT_SIZE}};
-stream_chunked_body({stream, MReq, CRem, Buf, BRem}) ->
- % Buffer some more data from the client.
- Length = lists:min([CRem, BRem]),
- Socket = MReq:get(socket),
- NewState = case mochiweb_socket:recv(Socket, Length, ?TIMEOUT) of
- {ok, Data} when size(Data) == CRem ->
- case mochiweb_socket:recv(Socket, 2, ?TIMEOUT) of
- {ok, <<"\r\n">>} ->
- {stream, MReq, 0, [<<"\r\n">>, Data | Buf], BRem-Length-2};
- _ ->
- exit(normal)
- end;
- {ok, Data} ->
- {stream, MReq, CRem-Length, [Data | Buf], BRem-Length};
- _ ->
- exit(normal)
- end,
- stream_chunked_body(NewState);
-stream_chunked_body({trailers, MReq, Buf, BRem}) when BRem =< 0 ->
- % Empty our buffers and send data upstream.
- BodyData = lists:reverse(Buf),
- {ok, BodyData, {trailers, MReq, [], ?PKT_SIZE}};
-stream_chunked_body({trailers, MReq, Buf, BRem}) ->
- % Read another trailer into the buffer or stop on an
- % empty line.
- Socket = MReq:get(socket),
- mochiweb_socket:setopts(Socket, [{packet, line}]),
- case mochiweb_socket:recv(Socket, 0, ?TIMEOUT) of
- {ok, <<"\r\n">>} ->
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- BodyData = lists:reverse(Buf, <<"\r\n">>),
- {ok, BodyData, eof};
- {ok, Footer} ->
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- NewState = {trailers, MReq, [Footer | Buf], BRem-size(Footer)},
- stream_chunked_body(NewState);
- _ ->
- exit(normal)
- end;
-stream_chunked_body(eof) ->
- % Tell ibrowse we're done sending data.
- eof.
-
-
-stream_length_body({init, MochiReq, Length}) ->
- % Do the expect-continue dance
- init_body_stream(MochiReq),
- stream_length_body({stream, MochiReq, Length});
-stream_length_body({stream, _MochiReq, 0}) ->
- % Finished streaming.
- eof;
-stream_length_body({stream, MochiReq, Length}) ->
- BufLen = lists:min([Length, ?PKT_SIZE]),
- case MochiReq:recv(BufLen) of
- <<>> -> eof;
- Bin -> {ok, Bin, {stream, MochiReq, Length-BufLen}}
- end.
-
-
-init_body_stream(MochiReq) ->
- Expect = case MochiReq:get_header_value("expect") of
- undefined ->
- undefined;
- Value when is_list(Value) ->
- string:to_lower(Value)
- end,
- case Expect of
- "100-continue" ->
- MochiReq:start_raw_response({100, gb_trees:empty()});
- _Else ->
- ok
- end.
-
-
-read_chunk_length(MochiReq) ->
- Socket = MochiReq:get(socket),
- mochiweb_socket:setopts(Socket, [{packet, line}]),
- case mochiweb_socket:recv(Socket, 0, ?TIMEOUT) of
- {ok, Header} ->
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- Splitter = fun(C) ->
- C =/= $\r andalso C =/= $\n andalso C =/= $\s
- end,
- {Hex, _Rest} = lists:splitwith(Splitter, ?b2l(Header)),
- {mochihex:to_int(Hex), Header};
- _ ->
- exit(normal)
- end.
-
-
-stream_response(Req, ProxyDest, ReqId) ->
- receive
- {ibrowse_async_headers, ReqId, "100", _} ->
- % ibrowse doesn't handle 100 Continue responses which
- % means we have to discard them so the proxy client
- % doesn't get confused.
- ibrowse:stream_next(ReqId),
- stream_response(Req, ProxyDest, ReqId);
- {ibrowse_async_headers, ReqId, Status, Headers} ->
- {Source, Dest} = get_urls(Req, ProxyDest),
- FixedHeaders = fix_headers(Source, Dest, Headers, []),
- case body_length(FixedHeaders) of
- chunked ->
- {ok, Resp} = couch_httpd:start_chunked_response(
- Req, list_to_integer(Status), FixedHeaders
- ),
- ibrowse:stream_next(ReqId),
- stream_chunked_response(Req, ReqId, Resp),
- {ok, Resp};
- Length when is_integer(Length) ->
- {ok, Resp} = couch_httpd:start_response_length(
- Req, list_to_integer(Status), FixedHeaders, Length
- ),
- ibrowse:stream_next(ReqId),
- stream_length_response(Req, ReqId, Resp),
- {ok, Resp};
- _ ->
- {ok, Resp} = couch_httpd:start_response(
- Req, list_to_integer(Status), FixedHeaders
- ),
- ibrowse:stream_next(ReqId),
- stream_length_response(Req, ReqId, Resp),
- % XXX: MochiWeb apparently doesn't look at the
- % response to see if it must force close the
- % connection. So we help it out here.
- erlang:put(mochiweb_request_force_close, true),
- {ok, Resp}
- end
- end.
-
-
-stream_chunked_response(Req, ReqId, Resp) ->
- receive
- {ibrowse_async_response, ReqId, {error, Reason}} ->
- throw({error, Reason});
- {ibrowse_async_response, ReqId, Chunk} ->
- couch_httpd:send_chunk(Resp, Chunk),
- ibrowse:stream_next(ReqId),
- stream_chunked_response(Req, ReqId, Resp);
- {ibrowse_async_response_end, ReqId} ->
- couch_httpd:last_chunk(Resp)
- end.
-
-
-stream_length_response(Req, ReqId, Resp) ->
- receive
- {ibrowse_async_response, ReqId, {error, Reason}} ->
- throw({error, Reason});
- {ibrowse_async_response, ReqId, Chunk} ->
- couch_httpd:send(Resp, Chunk),
- ibrowse:stream_next(ReqId),
- stream_length_response(Req, ReqId, Resp);
- {ibrowse_async_response_end, ReqId} ->
- ok
- end.
-
-
-get_urls(Req, ProxyDest) ->
- SourceUrl = couch_httpd:absolute_uri(Req, "/" ++ hd(Req#httpd.path_parts)),
- Source = parse_url(?b2l(iolist_to_binary(SourceUrl))),
- case (catch parse_url(ProxyDest)) of
- Dest when is_record(Dest, url) ->
- {Source, Dest};
- _ ->
- DestUrl = couch_httpd:absolute_uri(Req, ProxyDest),
- {Source, parse_url(DestUrl)}
- end.
-
-
-fix_headers(_, _, [], Acc) ->
- lists:reverse(Acc);
-fix_headers(Source, Dest, [{K, V} | Rest], Acc) ->
- Fixed = case string:to_lower(K) of
- "location" -> rewrite_location(Source, Dest, V);
- "content-location" -> rewrite_location(Source, Dest, V);
- "uri" -> rewrite_location(Source, Dest, V);
- "destination" -> rewrite_location(Source, Dest, V);
- "set-cookie" -> rewrite_cookie(Source, Dest, V);
- _ -> V
- end,
- fix_headers(Source, Dest, Rest, [{K, Fixed} | Acc]).
-
-
-rewrite_location(Source, #url{host=Host, port=Port, protocol=Proto}, Url) ->
- case (catch parse_url(Url)) of
- #url{host=Host, port=Port, protocol=Proto} = Location ->
- DestLoc = #url{
- protocol=Source#url.protocol,
- host=Source#url.host,
- port=Source#url.port,
- path=join_url_path(Source#url.path, Location#url.path)
- },
- url_to_url(DestLoc);
- #url{} ->
- Url;
- _ ->
- url_to_url(Source#url{path=join_url_path(Source#url.path, Url)})
- end.
-
-
-rewrite_cookie(_Source, _Dest, Cookie) ->
- Cookie.
-
-
-parse_url(Url) when is_binary(Url) ->
- ibrowse_lib:parse_url(?b2l(Url));
-parse_url(Url) when is_list(Url) ->
- ibrowse_lib:parse_url(?b2l(iolist_to_binary(Url))).
-
-
-join_url_path(Src, Dst) ->
- Src2 = case lists:reverse(Src) of
- "/" ++ RestSrc -> lists:reverse(RestSrc);
- _ -> Src
- end,
- Dst2 = case Dst of
- "/" ++ RestDst -> RestDst;
- _ -> Dst
- end,
- Src2 ++ "/" ++ Dst2.
-
-
-url_to_url(#url{host=Host, port=Port, path=Path, protocol=Proto} = Url) ->
- LPort = case {Proto, Port} of
- {http, 80} -> "";
- {https, 443} -> "";
- _ -> ":" ++ integer_to_list(Port)
- end,
- LPath = case Path of
- "/" ++ _RestPath -> Path;
- _ -> "/" ++ Path
- end,
- HostPart = case Url#url.host_type of
- ipv6_address ->
- "[" ++ Host ++ "]";
- _ ->
- Host
- end,
- atom_to_list(Proto) ++ "://" ++ HostPart ++ LPort ++ LPath.
-
-
-body_length(Headers) ->
- case is_chunked(Headers) of
- true -> chunked;
- _ -> content_length(Headers)
- end.
-
-
-is_chunked([]) ->
- false;
-is_chunked([{K, V} | Rest]) ->
- case string:to_lower(K) of
- "transfer-encoding" ->
- string:to_lower(V) == "chunked";
- _ ->
- is_chunked(Rest)
- end.
-
-content_length([]) ->
- undefined;
-content_length([{K, V} | Rest]) ->
- case string:to_lower(K) of
- "content-length" ->
- list_to_integer(V);
- _ ->
- content_length(Rest)
- end.
-
diff --git a/1.1.x/src/couchdb/couch_httpd_rewrite.erl b/1.1.x/src/couchdb/couch_httpd_rewrite.erl
deleted file mode 100644
index a8297ae1..00000000
--- a/1.1.x/src/couchdb/couch_httpd_rewrite.erl
+++ /dev/null
@@ -1,434 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-%
-% bind_path is based on bind method from Webmachine
-
-
-%% @doc Module for URL rewriting by pattern matching.
-
--module(couch_httpd_rewrite).
--export([handle_rewrite_req/3]).
--include("couch_db.hrl").
-
--define(SEPARATOR, $\/).
--define(MATCH_ALL, {bind, <<"*">>}).
-
-
-%% doc The http rewrite handler. All rewriting is done from
-%% /dbname/_design/ddocname/_rewrite by default.
-%%
-%% each rules should be in rewrites member of the design doc.
-%% Ex of a complete rule :
-%%
-%% {
-%% ....
-%% "rewrites": [
-%% {
-%% "from": "",
-%% "to": "index.html",
-%% "method": "GET",
-%% "query": {}
-%% }
-%% ]
-%% }
-%%
-%% from: is the path rule used to bind current uri to the rule. It
-%% use pattern matching for that.
-%%
-%% to: rule to rewrite an url. It can contain variables depending on binding
-%% variables discovered during pattern matching and query args (url args and from
-%% the query member.)
-%%
-%% method: method to bind the request method to the rule. by default "*"
-%% query: query args you want to define they can contain dynamic variable
-%% by binding the key to the bindings
-%%
-%%
-%% to and from are path with patterns. pattern can be string starting with ":" or
-%% "*". ex:
-%% /somepath/:var/*
-%%
-%% This path is converted in erlang list by splitting "/". Each var are
-%% converted in atom. "*" is converted to '*' atom. The pattern matching is done
-%% by splitting "/" in request url in a list of token. A string pattern will
-%% match equal token. The star atom ('*' in single quotes) will match any number
-%% of tokens, but may only be present as the last pathtern in a pathspec. If all
-%% tokens are matched and all pathterms are used, then the pathspec matches. It works
-%% like webmachine. Each identified token will be reused in to rule and in query
-%%
-%% The pattern matching is done by first matching the request method to a rule. by
-%% default all methods match a rule. (method is equal to "*" by default). Then
-%% It will try to match the path to one rule. If no rule match, then a 404 error
-%% is displayed.
-%%
-%% Once a rule is found we rewrite the request url using the "to" and
-%% "query" members. The identified token are matched to the rule and
-%% will replace var. if '*' is found in the rule it will contain the remaining
-%% part if it exists.
-%%
-%% Examples:
-%%
-%% Dispatch rule URL TO Tokens
-%%
-%% {"from": "/a/b", /a/b?k=v /some/b?k=v var =:= b
-%% "to": "/some/"} k = v
-%%
-%% {"from": "/a/b", /a/b /some/b?var=b var =:= b
-%% "to": "/some/:var"}
-%%
-%% {"from": "/a", /a /some
-%% "to": "/some/*"}
-%%
-%% {"from": "/a/*", /a/b/c /some/b/c
-%% "to": "/some/*"}
-%%
-%% {"from": "/a", /a /some
-%% "to": "/some/*"}
-%%
-%% {"from": "/a/:foo/*", /a/b/c /some/b/c?foo=b foo =:= b
-%% "to": "/some/:foo/*"}
-%%
-%% {"from": "/a/:foo", /a/b /some/?k=b&foo=b foo =:= b
-%% "to": "/some",
-%% "query": {
-%% "k": ":foo"
-%% }}
-%%
-%% {"from": "/a", /a?foo=b /some/b foo =:= b
-%% "to": "/some/:foo",
-%% }}
-
-
-
-handle_rewrite_req(#httpd{
- path_parts=[DbName, <<"_design">>, DesignName, _Rewrite|PathParts],
- method=Method,
- mochi_req=MochiReq}=Req, _Db, DDoc) ->
-
- % we are in a design handler
- DesignId = <<"_design/", DesignName/binary>>,
- Prefix = <<"/", DbName/binary, "/", DesignId/binary>>,
- QueryList = couch_httpd:qs(Req),
- QueryList1 = [{to_binding(K), V} || {K, V} <- QueryList],
-
- #doc{body={Props}} = DDoc,
-
- % get rules from ddoc
- case couch_util:get_value(<<"rewrites">>, Props) of
- undefined ->
- couch_httpd:send_error(Req, 404, <<"rewrite_error">>,
- <<"Invalid path.">>);
- Rules ->
- % create dispatch list from rules
- DispatchList = [make_rule(Rule) || {Rule} <- Rules],
-
- %% get raw path by matching url to a rule.
- RawPath = case try_bind_path(DispatchList, couch_util:to_binary(Method), PathParts,
- QueryList1) of
- no_dispatch_path ->
- throw(not_found);
- {NewPathParts, Bindings} ->
- Parts = [quote_plus(X) || X <- NewPathParts],
-
- % build new path, reencode query args, eventually convert
- % them to json
- Path = lists:append(
- string:join(Parts, [?SEPARATOR]),
- case Bindings of
- [] -> [];
- _ -> [$?, encode_query(Bindings)]
- end),
-
- % if path is relative detect it and rewrite path
- case mochiweb_util:safe_relative_path(Path) of
- undefined ->
- ?b2l(Prefix) ++ "/" ++ Path;
- P1 ->
- ?b2l(Prefix) ++ "/" ++ P1
- end
-
- end,
-
- % normalize final path (fix levels "." and "..")
- RawPath1 = ?b2l(iolist_to_binary(normalize_path(RawPath))),
-
- ?LOG_DEBUG("rewrite to ~p ~n", [RawPath1]),
-
- % build a new mochiweb request
- MochiReq1 = mochiweb_request:new(MochiReq:get(socket),
- MochiReq:get(method),
- RawPath1,
- MochiReq:get(version),
- MochiReq:get(headers)),
-
- % cleanup, It force mochiweb to reparse raw uri.
- MochiReq1:cleanup(),
-
- #httpd{
- db_url_handlers = DbUrlHandlers,
- design_url_handlers = DesignUrlHandlers,
- default_fun = DefaultFun,
- url_handlers = UrlHandlers
- } = Req,
- couch_httpd:handle_request_int(MochiReq1, DefaultFun,
- UrlHandlers, DbUrlHandlers, DesignUrlHandlers)
- end.
-
-quote_plus({bind, X}) ->
- mochiweb_util:quote_plus(X);
-quote_plus(X) ->
- mochiweb_util:quote_plus(X).
-
-%% @doc Try to find a rule matching current url. If none is found
-%% 404 error not_found is raised
-try_bind_path([], _Method, _PathParts, _QueryList) ->
- no_dispatch_path;
-try_bind_path([Dispatch|Rest], Method, PathParts, QueryList) ->
- [{PathParts1, Method1}, RedirectPath, QueryArgs] = Dispatch,
- case bind_method(Method1, Method) of
- true ->
- case bind_path(PathParts1, PathParts, []) of
- {ok, Remaining, Bindings} ->
- Bindings1 = Bindings ++ QueryList,
- % we parse query args from the rule and fill
- % it eventually with bindings vars
- QueryArgs1 = make_query_list(QueryArgs, Bindings1, []),
- % remove params in QueryLists1 that are already in
- % QueryArgs1
- Bindings2 = lists:foldl(fun({K, V}, Acc) ->
- K1 = to_binding(K),
- KV = case couch_util:get_value(K1, QueryArgs1) of
- undefined -> [{K1, V}];
- _V1 -> []
- end,
- Acc ++ KV
- end, [], Bindings1),
-
- FinalBindings = Bindings2 ++ QueryArgs1,
- NewPathParts = make_new_path(RedirectPath, FinalBindings,
- Remaining, []),
- {NewPathParts, FinalBindings};
- fail ->
- try_bind_path(Rest, Method, PathParts, QueryList)
- end;
- false ->
- try_bind_path(Rest, Method, PathParts, QueryList)
- end.
-
-%% rewriting dynamically the quey list given as query member in
-%% rewrites. Each value is replaced by one binding or an argument
-%% passed in url.
-make_query_list([], _Bindings, Acc) ->
- Acc;
-make_query_list([{Key, {Value}}|Rest], Bindings, Acc) ->
- Value1 = to_json({Value}),
- make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
-make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_binary(Value) ->
- Value1 = replace_var(Key, Value, Bindings),
- make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
-make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_list(Value) ->
- Value1 = replace_var(Key, Value, Bindings),
- make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
-make_query_list([{Key, Value}|Rest], Bindings, Acc) ->
- make_query_list(Rest, Bindings, [{to_binding(Key), Value}|Acc]).
-
-replace_var(Key, Value, Bindings) ->
- case Value of
- <<":", Var/binary>> ->
- get_var(Var, Bindings, Value);
- <<"*">> ->
- get_var(Value, Bindings, Value);
- _ when is_list(Value) ->
- Value1 = lists:foldr(fun(V, Acc) ->
- V1 = case V of
- <<":", VName/binary>> ->
- case get_var(VName, Bindings, V) of
- V2 when is_list(V2) ->
- iolist_to_binary(V2);
- V2 -> V2
- end;
- <<"*">> ->
- get_var(V, Bindings, V);
- _ ->
- V
- end,
- [V1|Acc]
- end, [], Value),
- to_json(Value1);
- _ when is_binary(Value) ->
- Value;
- _ ->
- case Key of
- <<"key">> -> to_json(Value);
- <<"startkey">> -> to_json(Value);
- <<"start_key">> -> to_json(Value);
- <<"endkey">> -> to_json(Value);
- <<"end_key">> -> to_json(Value);
- _ ->
- lists:flatten(?JSON_ENCODE(Value))
- end
- end.
-
-
-get_var(VarName, Props, Default) ->
- VarName1 = to_binding(VarName),
- couch_util:get_value(VarName1, Props, Default).
-
-%% doc: build new patch from bindings. bindings are query args
-%% (+ dynamic query rewritten if needed) and bindings found in
-%% bind_path step.
-make_new_path([], _Bindings, _Remaining, Acc) ->
- lists:reverse(Acc);
-make_new_path([?MATCH_ALL], _Bindings, Remaining, Acc) ->
- Acc1 = lists:reverse(Acc) ++ Remaining,
- Acc1;
-make_new_path([?MATCH_ALL|_Rest], _Bindings, Remaining, Acc) ->
- Acc1 = lists:reverse(Acc) ++ Remaining,
- Acc1;
-make_new_path([{bind, P}|Rest], Bindings, Remaining, Acc) ->
- P2 = case couch_util:get_value({bind, P}, Bindings) of
- undefined -> << "undefined">>;
- P1 -> P1
- end,
- make_new_path(Rest, Bindings, Remaining, [P2|Acc]);
-make_new_path([P|Rest], Bindings, Remaining, Acc) ->
- make_new_path(Rest, Bindings, Remaining, [P|Acc]).
-
-
-%% @doc If method of the query fith the rule method. If the
-%% method rule is '*', which is the default, all
-%% request method will bind. It allows us to make rules
-%% depending on HTTP method.
-bind_method(?MATCH_ALL, _Method ) ->
- true;
-bind_method({bind, Method}, Method) ->
- true;
-bind_method(_, _) ->
- false.
-
-
-%% @doc bind path. Using the rule from we try to bind variables given
-%% to the current url by pattern matching
-bind_path([], [], Bindings) ->
- {ok, [], Bindings};
-bind_path([?MATCH_ALL], [Match|_RestMatch]=Rest, Bindings) ->
- {ok, Rest, [{?MATCH_ALL, Match}|Bindings]};
-bind_path(_, [], _) ->
- fail;
-bind_path([{bind, Token}|RestToken],[Match|RestMatch],Bindings) ->
- bind_path(RestToken, RestMatch, [{{bind, Token}, Match}|Bindings]);
-bind_path([Token|RestToken], [Token|RestMatch], Bindings) ->
- bind_path(RestToken, RestMatch, Bindings);
-bind_path(_, _, _) ->
- fail.
-
-
-%% normalize path.
-normalize_path(Path) ->
- "/" ++ string:join(normalize_path1(string:tokens(Path,
- "/"), []), [?SEPARATOR]).
-
-
-normalize_path1([], Acc) ->
- lists:reverse(Acc);
-normalize_path1([".."|Rest], Acc) ->
- Acc1 = case Acc of
- [] -> [".."|Acc];
- [T|_] when T =:= ".." -> [".."|Acc];
- [_|R] -> R
- end,
- normalize_path1(Rest, Acc1);
-normalize_path1(["."|Rest], Acc) ->
- normalize_path1(Rest, Acc);
-normalize_path1([Path|Rest], Acc) ->
- normalize_path1(Rest, [Path|Acc]).
-
-
-%% @doc transform json rule in erlang for pattern matching
-make_rule(Rule) ->
- Method = case couch_util:get_value(<<"method">>, Rule) of
- undefined -> ?MATCH_ALL;
- M -> to_binding(M)
- end,
- QueryArgs = case couch_util:get_value(<<"query">>, Rule) of
- undefined -> [];
- {Args} -> Args
- end,
- FromParts = case couch_util:get_value(<<"from">>, Rule) of
- undefined -> [?MATCH_ALL];
- From ->
- parse_path(From)
- end,
- ToParts = case couch_util:get_value(<<"to">>, Rule) of
- undefined ->
- throw({error, invalid_rewrite_target});
- To ->
- parse_path(To)
- end,
- [{FromParts, Method}, ToParts, QueryArgs].
-
-parse_path(Path) ->
- {ok, SlashRE} = re:compile(<<"\\/">>),
- path_to_list(re:split(Path, SlashRE), [], 0).
-
-%% @doc convert a path rule (from or to) to an erlang list
-%% * and path variable starting by ":" are converted
-%% in erlang atom.
-path_to_list([], Acc, _DotDotCount) ->
- lists:reverse(Acc);
-path_to_list([<<>>|R], Acc, DotDotCount) ->
- path_to_list(R, Acc, DotDotCount);
-path_to_list([<<"*">>|R], Acc, DotDotCount) ->
- path_to_list(R, [?MATCH_ALL|Acc], DotDotCount);
-path_to_list([<<"..">>|R], Acc, DotDotCount) when DotDotCount == 2 ->
- case couch_config:get("httpd", "secure_rewrites", "true") of
- "false" ->
- path_to_list(R, [<<"..">>|Acc], DotDotCount+1);
- _Else ->
- ?LOG_INFO("insecure_rewrite_rule ~p blocked", [lists:reverse(Acc) ++ [<<"..">>] ++ R]),
- throw({insecure_rewrite_rule, "too many ../.. segments"})
- end;
-path_to_list([<<"..">>|R], Acc, DotDotCount) ->
- path_to_list(R, [<<"..">>|Acc], DotDotCount+1);
-path_to_list([P|R], Acc, DotDotCount) ->
- P1 = case P of
- <<":", Var/binary>> ->
- to_binding(Var);
- _ -> P
- end,
- path_to_list(R, [P1|Acc], DotDotCount).
-
-encode_query(Props) ->
- Props1 = lists:foldl(fun ({{bind, K}, V}, Acc) ->
- case K of
- <<"*">> -> Acc;
- _ ->
- V1 = case is_list(V) orelse is_binary(V) of
- true -> V;
- false ->
- % probably it's a number
- quote_plus(V)
- end,
- [{K, V1} | Acc]
- end
- end, [], Props),
- lists:flatten(mochiweb_util:urlencode(Props1)).
-
-to_binding({bind, V}) ->
- {bind, V};
-to_binding(V) when is_list(V) ->
- to_binding(?l2b(V));
-to_binding(V) ->
- {bind, V}.
-
-to_json(V) ->
- iolist_to_binary(?JSON_ENCODE(V)).
diff --git a/1.1.x/src/couchdb/couch_httpd_show.erl b/1.1.x/src/couchdb/couch_httpd_show.erl
deleted file mode 100644
index 59f74e1c..00000000
--- a/1.1.x/src/couchdb/couch_httpd_show.erl
+++ /dev/null
@@ -1,404 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_show).
-
--export([handle_doc_show_req/3, handle_doc_update_req/3, handle_view_list_req/3,
- handle_view_list/6, get_fun_key/3]).
-
--include("couch_db.hrl").
-
--import(couch_httpd,
- [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
- start_json_response/2,send_chunk/2,last_chunk/1,send_chunked_error/2,
- start_chunked_response/3, send_error/4]).
-
-
-% /db/_design/foo/_show/bar/docid
-% show converts a json doc to a response of any content-type.
-% it looks up the doc an then passes it to the query server.
-% then it sends the response from the query server to the http client.
-
-maybe_open_doc(Db, DocId) ->
- case catch couch_httpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of
- {not_found, missing} -> nil;
- {not_found,deleted} -> nil;
- Doc -> Doc
- end.
-handle_doc_show_req(#httpd{
- path_parts=[_, _, _, _, ShowName, DocId]
- }=Req, Db, DDoc) ->
-
- % open the doc
- Doc = maybe_open_doc(Db, DocId),
-
- % we don't handle revs here b/c they are an internal api
- % returns 404 if there is no doc with DocId
- handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId);
-
-handle_doc_show_req(#httpd{
- path_parts=[_, _, _, _, ShowName, DocId|Rest]
- }=Req, Db, DDoc) ->
-
- DocParts = [DocId|Rest],
- DocId1 = ?l2b(string:join([?b2l(P)|| P <- DocParts], "/")),
-
- % open the doc
- Doc = maybe_open_doc(Db, DocId1),
-
- % we don't handle revs here b/c they are an internal api
- % pass 404 docs to the show function
- handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId1);
-
-handle_doc_show_req(#httpd{
- path_parts=[_, _, _, _, ShowName]
- }=Req, Db, DDoc) ->
- % with no docid the doc is nil
- handle_doc_show(Req, Db, DDoc, ShowName, nil);
-
-handle_doc_show_req(Req, _Db, _DDoc) ->
- send_error(Req, 404, <<"show_error">>, <<"Invalid path.">>).
-
-handle_doc_show(Req, Db, DDoc, ShowName, Doc) ->
- handle_doc_show(Req, Db, DDoc, ShowName, Doc, null).
-
-handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId) ->
- % get responder for ddoc/showname
- CurrentEtag = show_etag(Req, Doc, DDoc, []),
- couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- JsonReq = couch_httpd_external:json_req_obj(Req, Db, DocId),
- JsonDoc = couch_query_servers:json_doc(Doc),
- [<<"resp">>, ExternalResp] =
- couch_query_servers:ddoc_prompt(DDoc, [<<"shows">>, ShowName], [JsonDoc, JsonReq]),
- JsonResp = apply_etag(ExternalResp, CurrentEtag),
- couch_httpd_external:send_external_response(Req, JsonResp)
- end).
-
-
-
-show_etag(#httpd{user_ctx=UserCtx}=Req, Doc, DDoc, More) ->
- Accept = couch_httpd:header_value(Req, "Accept"),
- DocPart = case Doc of
- nil -> nil;
- Doc -> couch_httpd:doc_etag(Doc)
- end,
- couch_httpd:make_etag({couch_httpd:doc_etag(DDoc), DocPart, Accept, UserCtx#user_ctx.roles, More}).
-
-get_fun_key(DDoc, Type, Name) ->
- #doc{body={Props}} = DDoc,
- Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
- Src = couch_util:get_nested_json_value({Props}, [Type, Name]),
- {Lang, Src}.
-
-% /db/_design/foo/update/bar/docid
-% updates a doc based on a request
-% handle_doc_update_req(#httpd{method = 'GET'}=Req, _Db, _DDoc) ->
-% % anything but GET
-% send_method_not_allowed(Req, "POST,PUT,DELETE,ETC");
-
-handle_doc_update_req(#httpd{
- path_parts=[_, _, _, _, UpdateName, DocId]
- }=Req, Db, DDoc) ->
- Doc = try couch_httpd_db:couch_doc_open(Db, DocId, nil, [conflicts])
- catch
- _ -> nil
- end,
- send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId);
-
-handle_doc_update_req(#httpd{
- path_parts=[_, _, _, _, UpdateName]
- }=Req, Db, DDoc) ->
- send_doc_update_response(Req, Db, DDoc, UpdateName, nil, null);
-
-handle_doc_update_req(Req, _Db, _DDoc) ->
- send_error(Req, 404, <<"update_error">>, <<"Invalid path.">>).
-
-send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) ->
- JsonReq = couch_httpd_external:json_req_obj(Req, Db, DocId),
- JsonDoc = couch_query_servers:json_doc(Doc),
- {Code, JsonResp1} = case couch_query_servers:ddoc_prompt(DDoc,
- [<<"updates">>, UpdateName], [JsonDoc, JsonReq]) of
- [<<"up">>, {NewJsonDoc}, {JsonResp}] ->
- Options = case couch_httpd:header_value(Req, "X-Couch-Full-Commit",
- "false") of
- "true" ->
- [full_commit];
- _ ->
- []
- end,
- NewDoc = couch_doc:from_json_obj({NewJsonDoc}),
- {ok, NewRev} = couch_db:update_doc(Db, NewDoc, Options),
- NewRevStr = couch_doc:rev_to_str(NewRev),
- JsonRespWithRev = {[{<<"headers">>,
- {[{<<"X-Couch-Update-NewRev">>, NewRevStr}]}} | JsonResp]},
- {201, JsonRespWithRev};
- [<<"up">>, _Other, JsonResp] ->
- {200, JsonResp}
- end,
-
- JsonResp2 = couch_util:json_apply_field({<<"code">>, Code}, JsonResp1),
- % todo set location field
- couch_httpd_external:send_external_response(Req, JsonResp2).
-
-
-% view-list request with view and list from same design doc.
-handle_view_list_req(#httpd{method='GET',
- path_parts=[_, _, DesignName, _, ListName, ViewName]}=Req, Db, DDoc) ->
- Keys = couch_httpd:qs_json_value(Req, "keys", nil),
- handle_view_list(Req, Db, DDoc, ListName, {DesignName, ViewName}, Keys);
-
-% view-list request with view and list from different design docs.
-handle_view_list_req(#httpd{method='GET',
- path_parts=[_, _, _, _, ListName, ViewDesignName, ViewName]}=Req, Db, DDoc) ->
- Keys = couch_httpd:qs_json_value(Req, "keys", nil),
- handle_view_list(Req, Db, DDoc, ListName, {ViewDesignName, ViewName}, Keys);
-
-handle_view_list_req(#httpd{method='GET'}=Req, _Db, _DDoc) ->
- send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>);
-
-handle_view_list_req(#httpd{method='POST',
- path_parts=[_, _, DesignName, _, ListName, ViewName]}=Req, Db, DDoc) ->
- % {Props2} = couch_httpd:json_body(Req),
- ReqBody = couch_httpd:body(Req),
- {Props2} = ?JSON_DECODE(ReqBody),
- Keys = couch_util:get_value(<<"keys">>, Props2, nil),
- handle_view_list(Req#httpd{req_body=ReqBody}, Db, DDoc, ListName, {DesignName, ViewName}, Keys);
-
-handle_view_list_req(#httpd{method='POST',
- path_parts=[_, _, _, _, ListName, ViewDesignName, ViewName]}=Req, Db, DDoc) ->
- % {Props2} = couch_httpd:json_body(Req),
- ReqBody = couch_httpd:body(Req),
- {Props2} = ?JSON_DECODE(ReqBody),
- Keys = couch_util:get_value(<<"keys">>, Props2, nil),
- handle_view_list(Req#httpd{req_body=ReqBody}, Db, DDoc, ListName, {ViewDesignName, ViewName}, Keys);
-
-handle_view_list_req(#httpd{method='POST'}=Req, _Db, _DDoc) ->
- send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>);
-
-handle_view_list_req(Req, _Db, _DDoc) ->
- send_method_not_allowed(Req, "GET,POST,HEAD").
-
-handle_view_list(Req, Db, DDoc, LName, {ViewDesignName, ViewName}, Keys) ->
- ViewDesignId = <<"_design/", ViewDesignName/binary>>,
- {ViewType, View, Group, QueryArgs} = couch_httpd_view:load_view(Req, Db, {ViewDesignId, ViewName}, Keys),
- Etag = list_etag(Req, Db, Group, View, {couch_httpd:doc_etag(DDoc), Keys}),
- couch_httpd:etag_respond(Req, Etag, fun() ->
- output_list(ViewType, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group)
- end).
-
-list_etag(#httpd{user_ctx=UserCtx}=Req, Db, Group, View, More) ->
- Accept = couch_httpd:header_value(Req, "Accept"),
- couch_httpd_view:view_etag(Db, Group, View, {More, Accept, UserCtx#user_ctx.roles}).
-
-output_list(map, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
- output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group);
-output_list(reduce, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
- output_reduce_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group).
-
-% next step:
-% use with_ddoc_proc/2 to make this simpler
-output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
- #view_query_args{
- limit = Limit,
- skip = SkipCount
- } = QueryArgs,
-
- FoldAccInit = {Limit, SkipCount, undefined, []},
- {ok, RowCount} = couch_view:get_row_count(View),
-
-
- couch_query_servers:with_ddoc_proc(DDoc, fun(QServer) ->
-
- ListFoldHelpers = #view_fold_helper_funs{
- reduce_count = fun couch_view:reduce_to_count/1,
- start_response = StartListRespFun = make_map_start_resp_fun(QServer, Db, LName),
- send_row = make_map_send_row_fun(QServer)
- },
- CurrentSeq = Group#group.current_seq,
-
- {ok, _, FoldResult} = case Keys of
- nil ->
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, Etag, Db, CurrentSeq, RowCount, ListFoldHelpers),
- couch_view:fold(View, FoldlFun, FoldAccInit,
- couch_httpd_view:make_key_options(QueryArgs));
- Keys ->
- lists:foldl(
- fun(Key, {ok, _, FoldAcc}) ->
- QueryArgs2 = QueryArgs#view_query_args{
- start_key = Key,
- end_key = Key
- },
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs2, Etag, Db, CurrentSeq, RowCount, ListFoldHelpers),
- couch_view:fold(View, FoldlFun, FoldAcc,
- couch_httpd_view:make_key_options(QueryArgs2))
- end, {ok, nil, FoldAccInit}, Keys)
- end,
- finish_list(Req, QServer, Etag, FoldResult, StartListRespFun, CurrentSeq, RowCount)
- end).
-
-
-output_reduce_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
- #view_query_args{
- limit = Limit,
- skip = SkipCount,
- group_level = GroupLevel
- } = QueryArgs,
-
- CurrentSeq = Group#group.current_seq,
-
- couch_query_servers:with_ddoc_proc(DDoc, fun(QServer) ->
- StartListRespFun = make_reduce_start_resp_fun(QServer, Db, LName),
- SendListRowFun = make_reduce_send_row_fun(QServer, Db),
- {ok, GroupRowsFun, RespFun} = couch_httpd_view:make_reduce_fold_funs(Req,
- GroupLevel, QueryArgs, Etag, CurrentSeq,
- #reduce_fold_helper_funs{
- start_response = StartListRespFun,
- send_row = SendListRowFun
- }),
- FoldAccInit = {Limit, SkipCount, undefined, []},
- {ok, FoldResult} = case Keys of
- nil ->
- couch_view:fold_reduce(View, RespFun, FoldAccInit, [{key_group_fun, GroupRowsFun} |
- couch_httpd_view:make_key_options(QueryArgs)]);
- Keys ->
- lists:foldl(
- fun(Key, {ok, FoldAcc}) ->
- couch_view:fold_reduce(View, RespFun, FoldAcc,
- [{key_group_fun, GroupRowsFun} |
- couch_httpd_view:make_key_options(
- QueryArgs#view_query_args{start_key=Key, end_key=Key})]
- )
- end, {ok, FoldAccInit}, Keys)
- end,
- finish_list(Req, QServer, Etag, FoldResult, StartListRespFun, CurrentSeq, null)
- end).
-
-
-make_map_start_resp_fun(QueryServer, Db, LName) ->
- fun(Req, Etag, TotalRows, Offset, _Acc, UpdateSeq) ->
- Head = {[{<<"total_rows">>, TotalRows}, {<<"offset">>, Offset}, {<<"update_seq">>, UpdateSeq}]},
- start_list_resp(QueryServer, LName, Req, Db, Head, Etag)
- end.
-
-make_reduce_start_resp_fun(QueryServer, Db, LName) ->
- fun(Req2, Etag, _Acc, UpdateSeq) ->
- start_list_resp(QueryServer, LName, Req2, Db, {[{<<"update_seq">>, UpdateSeq}]}, Etag)
- end.
-
-start_list_resp(QServer, LName, Req, Db, Head, Etag) ->
- JsonReq = couch_httpd_external:json_req_obj(Req, Db),
- [<<"start">>,Chunks,JsonResp] = couch_query_servers:ddoc_proc_prompt(QServer,
- [<<"lists">>, LName], [Head, JsonReq]),
- JsonResp2 = apply_etag(JsonResp, Etag),
- #extern_resp_args{
- code = Code,
- ctype = CType,
- headers = ExtHeaders
- } = couch_httpd_external:parse_external_response(JsonResp2),
- JsonHeaders = couch_httpd_external:default_or_content_type(CType, ExtHeaders),
- {ok, Resp} = start_chunked_response(Req, Code, JsonHeaders),
- {ok, Resp, ?b2l(?l2b(Chunks))}.
-
-make_map_send_row_fun(QueryServer) ->
- fun(Resp, Db, Row, IncludeDocs, Conflicts, RowFront) ->
- send_list_row(
- Resp, QueryServer, Db, Row, RowFront, IncludeDocs, Conflicts)
- end.
-
-make_reduce_send_row_fun(QueryServer, Db) ->
- fun(Resp, Row, RowFront) ->
- send_list_row(Resp, QueryServer, Db, Row, RowFront, false, false)
- end.
-
-send_list_row(Resp, QueryServer, Db, Row, RowFront, IncludeDoc, Conflicts) ->
- try
- [Go,Chunks] = prompt_list_row(
- QueryServer, Db, Row, IncludeDoc, Conflicts),
- Chunk = RowFront ++ ?b2l(?l2b(Chunks)),
- send_non_empty_chunk(Resp, Chunk),
- case Go of
- <<"chunks">> ->
- {ok, ""};
- <<"end">> ->
- {stop, stop}
- end
- catch
- throw:Error ->
- send_chunked_error(Resp, Error),
- throw({already_sent, Resp, Error})
- end.
-
-
-prompt_list_row({Proc, _DDocId}, Db, {{_Key, _DocId}, _} = Kv,
- IncludeDoc, Conflicts) ->
- JsonRow = couch_httpd_view:view_row_obj(Db, Kv, IncludeDoc, Conflicts),
- couch_query_servers:proc_prompt(Proc, [<<"list_row">>, JsonRow]);
-
-prompt_list_row({Proc, _DDocId}, _, {Key, Value}, _IncludeDoc, _Conflicts) ->
- JsonRow = {[{key, Key}, {value, Value}]},
- couch_query_servers:proc_prompt(Proc, [<<"list_row">>, JsonRow]).
-
-send_non_empty_chunk(Resp, Chunk) ->
- case Chunk of
- [] -> ok;
- _ -> send_chunk(Resp, Chunk)
- end.
-
-finish_list(Req, {Proc, _DDocId}, Etag, FoldResult, StartFun, CurrentSeq, TotalRows) ->
- FoldResult2 = case FoldResult of
- {Limit, SkipCount, Response, RowAcc} ->
- {Limit, SkipCount, Response, RowAcc, nil};
- Else ->
- Else
- end,
- case FoldResult2 of
- {_, _, undefined, _, _} ->
- {ok, Resp, BeginBody} =
- render_head_for_empty_list(StartFun, Req, Etag, CurrentSeq, TotalRows),
- [<<"end">>, Chunks] = couch_query_servers:proc_prompt(Proc, [<<"list_end">>]),
- Chunk = BeginBody ++ ?b2l(?l2b(Chunks)),
- send_non_empty_chunk(Resp, Chunk);
- {_, _, Resp, stop, _} ->
- ok;
- {_, _, Resp, _, _} ->
- [<<"end">>, Chunks] = couch_query_servers:proc_prompt(Proc, [<<"list_end">>]),
- send_non_empty_chunk(Resp, ?b2l(?l2b(Chunks)))
- end,
- last_chunk(Resp).
-
-
-render_head_for_empty_list(StartListRespFun, Req, Etag, CurrentSeq, null) ->
- StartListRespFun(Req, Etag, [], CurrentSeq); % for reduce
-render_head_for_empty_list(StartListRespFun, Req, Etag, CurrentSeq, TotalRows) ->
- StartListRespFun(Req, Etag, TotalRows, null, [], CurrentSeq).
-
-apply_etag({ExternalResponse}, CurrentEtag) ->
- % Here we embark on the delicate task of replacing or creating the
- % headers on the JsonResponse object. We need to control the Etag and
- % Vary headers. If the external function controls the Etag, we'd have to
- % run it to check for a match, which sort of defeats the purpose.
- case couch_util:get_value(<<"headers">>, ExternalResponse, nil) of
- nil ->
- % no JSON headers
- % add our Etag and Vary headers to the response
- {[{<<"headers">>, {[{<<"Etag">>, CurrentEtag}, {<<"Vary">>, <<"Accept">>}]}} | ExternalResponse]};
- JsonHeaders ->
- {[case Field of
- {<<"headers">>, JsonHeaders} -> % add our headers
- JsonHeadersEtagged = couch_util:json_apply_field({<<"Etag">>, CurrentEtag}, JsonHeaders),
- JsonHeadersVaried = couch_util:json_apply_field({<<"Vary">>, <<"Accept">>}, JsonHeadersEtagged),
- {<<"headers">>, JsonHeadersVaried};
- _ -> % skip non-header fields
- Field
- end || Field <- ExternalResponse]}
- end.
-
diff --git a/1.1.x/src/couchdb/couch_httpd_stats_handlers.erl b/1.1.x/src/couchdb/couch_httpd_stats_handlers.erl
deleted file mode 100644
index 41aeaed0..00000000
--- a/1.1.x/src/couchdb/couch_httpd_stats_handlers.erl
+++ /dev/null
@@ -1,56 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_stats_handlers).
--include("couch_db.hrl").
-
--export([handle_stats_req/1]).
--import(couch_httpd, [
- send_json/2, send_json/3, send_json/4, send_method_not_allowed/2,
- start_json_response/2, send_chunk/2, end_json_response/1,
- start_chunked_response/3, send_error/4
-]).
-
-handle_stats_req(#httpd{method='GET', path_parts=[_]}=Req) ->
- flush(Req),
- send_json(Req, couch_stats_aggregator:all(range(Req)));
-
-handle_stats_req(#httpd{method='GET', path_parts=[_, _Mod]}) ->
- throw({bad_request, <<"Stat names must have exactly to parts.">>});
-
-handle_stats_req(#httpd{method='GET', path_parts=[_, Mod, Key]}=Req) ->
- flush(Req),
- Stats = couch_stats_aggregator:get_json({list_to_atom(binary_to_list(Mod)),
- list_to_atom(binary_to_list(Key))}, range(Req)),
- send_json(Req, {[{Mod, {[{Key, Stats}]}}]});
-
-handle_stats_req(#httpd{method='GET', path_parts=[_, _Mod, _Key | _Extra]}) ->
- throw({bad_request, <<"Stat names must have exactly two parts.">>});
-
-handle_stats_req(Req) ->
- send_method_not_allowed(Req, "GET").
-
-range(Req) ->
- case couch_util:get_value("range", couch_httpd:qs(Req)) of
- undefined ->
- 0;
- Value ->
- list_to_integer(Value)
- end.
-
-flush(Req) ->
- case couch_util:get_value("flush", couch_httpd:qs(Req)) of
- "true" ->
- couch_stats_aggregator:collect_sample();
- _Else ->
- ok
- end.
diff --git a/1.1.x/src/couchdb/couch_httpd_vhost.erl b/1.1.x/src/couchdb/couch_httpd_vhost.erl
deleted file mode 100644
index 9bfb5951..00000000
--- a/1.1.x/src/couchdb/couch_httpd_vhost.erl
+++ /dev/null
@@ -1,403 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-
-
--module(couch_httpd_vhost).
--behaviour(gen_server).
-
--export([start_link/0, init/1, handle_call/3, handle_info/2, handle_cast/2]).
--export([code_change/3, terminate/2]).
--export([match_vhost/1, urlsplit_netloc/2]).
--export([redirect_to_vhost/2]).
-
--include("couch_db.hrl").
-
--define(SEPARATOR, $\/).
--define(MATCH_ALL, {bind, '*'}).
-
--record(vhosts, {
- vhost_globals,
- vhosts = [],
- vhost_fun
-}).
-
-
-%% doc the vhost manager.
-%% This gen_server keep state of vhosts added to the ini and try to
-%% match the Host header (or forwarded) against rules built against
-%% vhost list.
-%%
-%% Declaration of vhosts take place in the configuration file :
-%%
-%% [vhosts]
-%% example.com = /example
-%% *.example.com = /example
-%%
-%% The first line will rewrite the rquest to display the content of the
-%% example database. This rule works only if the Host header is
-%% 'example.com' and won't work for CNAMEs. Second rule on the other hand
-%% match all CNAMES to example db. So www.example.com or db.example.com
-%% will work.
-%%
-%% The wildcard ('*') should always be the last in the cnames:
-%%
-%% "*.db.example.com = /" will match all cname on top of db
-%% examples to the root of the machine.
-%%
-%%
-%% Rewriting Hosts to path
-%% -----------------------
-%%
-%% Like in the _rewrite handler you could match some variable and use
-%them to create the target path. Some examples:
-%%
-%% [vhosts]
-%% *.example.com = /*
-%% :dbname.example.com = /:dbname
-%% :ddocname.:dbname.example.com = /:dbname/_design/:ddocname/_rewrite
-%%
-%% First rule pass wildcard as dbname, second do the same but use a
-%% variable name and the third one allows you to use any app with
-%% @ddocname in any db with @dbname .
-%%
-%% You could also change the default function to handle request by
-%% changing the setting `redirect_vhost_handler` in `httpd` section of
-%% the Ini:
-%%
-%% [httpd]
-%% redirect_vhost_handler = {Module, Fun}
-%%
-%% The function take 2 args : the mochiweb request object and the target
-%%% path.
-
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-%% @doc Try to find a rule matching current Host heade. some rule is
-%% found it rewrite the Mochiweb Request else it return current Request.
-match_vhost(MochiReq) ->
- {ok, MochiReq1} = gen_server:call(couch_httpd_vhost, {match_vhost,
- MochiReq}),
-
- MochiReq1.
-
-
-%% --------------------
-%% gen_server functions
-%% --------------------
-
-init(_) ->
- process_flag(trap_exit, true),
-
- % init state
- VHosts = make_vhosts(),
- VHostGlobals = re:split(
- couch_config:get("httpd", "vhost_global_handlers", ""),
- ", ?",
- [{return, list}]
- ),
-
- % Set vhost fun
- DefaultVHostFun = "{couch_httpd_vhost, redirect_to_vhost}",
- VHostFun = couch_httpd:make_arity_2_fun(
- couch_config:get("httpd", "redirect_vhost_handler", DefaultVHostFun)
- ),
-
-
- Self = self(),
- % register for changes in vhosts section
- ok = couch_config:register(
- fun("vhosts") ->
- ok = gen_server:call(Self, vhosts_changed, infinity)
- end
- ),
-
- % register for changes in vhost_global_handlers key
- ok = couch_config:register(
- fun("httpd", "vhost_global_handlers") ->
- ok = gen_server:call(Self, vhosts_global_changed, infinity)
- end
- ),
-
- ok = couch_config:register(
- fun("httpd", "redirect_vhost_handler") ->
- ok = gen_server:call(Self, fun_changed, infinity)
- end
- ),
-
- {ok, #vhosts{
- vhost_globals = VHostGlobals,
- vhosts = VHosts,
- vhost_fun = VHostFun}
- }.
-
-
-handle_call({match_vhost, MochiReq}, _From, State) ->
- #vhosts{
- vhost_globals = VHostGlobals,
- vhosts = VHosts,
- vhost_fun = Fun
- } = State,
-
- {"/" ++ VPath, Query, Fragment} = mochiweb_util:urlsplit_path(MochiReq:get(raw_path)),
- VPathParts = string:tokens(VPath, "/"),
-
- XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"),
- VHost = case MochiReq:get_header_value(XHost) of
- undefined ->
- case MochiReq:get_header_value("Host") of
- undefined -> [];
- Value1 -> Value1
- end;
- Value -> Value
- end,
- {VHostParts, VhostPort} = split_host_port(VHost),
- FinalMochiReq = case try_bind_vhost(VHosts, lists:reverse(VHostParts),
- VhostPort, VPathParts) of
- no_vhost_matched -> MochiReq;
- {VhostTarget, NewPath} ->
- case vhost_global(VHostGlobals, MochiReq) of
- true ->
- MochiReq;
- _Else ->
- NewPath1 = mochiweb_util:urlunsplit_path({NewPath, Query,
- Fragment}),
- MochiReq1 = mochiweb_request:new(MochiReq:get(socket),
- MochiReq:get(method),
- NewPath1,
- MochiReq:get(version),
- MochiReq:get(headers)),
- Fun(MochiReq1, VhostTarget)
- end
- end,
- {reply, {ok, FinalMochiReq}, State};
-
-% update vhosts
-handle_call(vhosts_changed, _From, State) ->
- {reply, ok, State#vhosts{vhosts= make_vhosts()}};
-
-
-% update vhosts_globals
-handle_call(vhosts_global_changed, _From, State) ->
- VHostGlobals = re:split(
- couch_config:get("httpd", "vhost_global_handlers", ""),
- ", ?",
- [{return, list}]
- ),
- {reply, ok, State#vhosts{vhost_globals=VHostGlobals}};
-% change fun
-handle_call(fun_changed, _From, State) ->
- DefaultVHostFun = "{couch_httpd_vhosts, redirect_to_vhost}",
- VHostFun = couch_httpd:make_arity_2_fun(
- couch_config:get("httpd", "redirect_vhost_handler", DefaultVHostFun)
- ),
- {reply, ok, State#vhosts{vhost_fun=VHostFun}}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Msg, State) ->
- {noreply, State}.
-
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
-
-% default redirect vhost handler
-
-redirect_to_vhost(MochiReq, VhostTarget) ->
- Path = MochiReq:get(raw_path),
- Target = VhostTarget ++ Path,
-
- ?LOG_DEBUG("Vhost Target: '~p'~n", [Target]),
-
- Headers = mochiweb_headers:enter("x-couchdb-vhost-path", Path,
- MochiReq:get(headers)),
-
- % build a new mochiweb request
- MochiReq1 = mochiweb_request:new(MochiReq:get(socket),
- MochiReq:get(method),
- Target,
- MochiReq:get(version),
- Headers),
- % cleanup, It force mochiweb to reparse raw uri.
- MochiReq1:cleanup(),
-
- MochiReq1.
-
-%% if so, then it will not be rewritten, but will run as a normal couchdb request.
-%* normally you'd use this for _uuids _utils and a few of the others you want to
-%% keep available on vhosts. You can also use it to make databases 'global'.
-vhost_global( VhostGlobals, MochiReq) ->
- RawUri = MochiReq:get(raw_path),
- {"/" ++ Path, _, _} = mochiweb_util:urlsplit_path(RawUri),
-
- Front = case couch_httpd:partition(Path) of
- {"", "", ""} ->
- "/"; % Special case the root url handler
- {FirstPart, _, _} ->
- FirstPart
- end,
- [true] == [true||V <- VhostGlobals, V == Front].
-
-%% bind host
-%% first it try to bind the port then the hostname.
-try_bind_vhost([], _HostParts, _Port, _PathParts) ->
- no_vhost_matched;
-try_bind_vhost([VhostSpec|Rest], HostParts, Port, PathParts) ->
- {{VHostParts, VPort, VPath}, Path} = VhostSpec,
- case bind_port(VPort, Port) of
- ok ->
- case bind_vhost(lists:reverse(VHostParts), HostParts, []) of
- {ok, Bindings, Remainings} ->
- case bind_path(VPath, PathParts) of
- {ok, PathParts1} ->
- Path1 = make_target(Path, Bindings, Remainings, []),
- {make_path(Path1), make_path(PathParts1)};
- fail ->
- try_bind_vhost(Rest, HostParts, Port,
- PathParts)
- end;
- fail -> try_bind_vhost(Rest, HostParts, Port, PathParts)
- end;
- fail -> try_bind_vhost(Rest, HostParts, Port, PathParts)
- end.
-
-%% doc: build new patch from bindings. bindings are query args
-%% (+ dynamic query rewritten if needed) and bindings found in
-%% bind_path step.
-%% TODO: merge code wit rewrite. But we need to make sure we are
-%% in string here.
-make_target([], _Bindings, _Remaining, Acc) ->
- lists:reverse(Acc);
-make_target([?MATCH_ALL], _Bindings, Remaining, Acc) ->
- Acc1 = lists:reverse(Acc) ++ Remaining,
- Acc1;
-make_target([?MATCH_ALL|_Rest], _Bindings, Remaining, Acc) ->
- Acc1 = lists:reverse(Acc) ++ Remaining,
- Acc1;
-make_target([{bind, P}|Rest], Bindings, Remaining, Acc) ->
- P2 = case couch_util:get_value({bind, P}, Bindings) of
- undefined -> "undefined";
- P1 -> P1
- end,
- make_target(Rest, Bindings, Remaining, [P2|Acc]);
-make_target([P|Rest], Bindings, Remaining, Acc) ->
- make_target(Rest, Bindings, Remaining, [P|Acc]).
-
-%% bind port
-bind_port(Port, Port) -> ok;
-bind_port('*', _) -> ok;
-bind_port(_,_) -> fail.
-
-%% bind bhost
-bind_vhost([],[], Bindings) -> {ok, Bindings, []};
-bind_vhost([?MATCH_ALL], [], _Bindings) -> fail;
-bind_vhost([?MATCH_ALL], Rest, Bindings) -> {ok, Bindings, Rest};
-bind_vhost([], _HostParts, _Bindings) -> fail;
-bind_vhost([{bind, Token}|Rest], [Match|RestHost], Bindings) ->
- bind_vhost(Rest, RestHost, [{{bind, Token}, Match}|Bindings]);
-bind_vhost([Cname|Rest], [Cname|RestHost], Bindings) ->
- bind_vhost(Rest, RestHost, Bindings);
-bind_vhost(_, _, _) -> fail.
-
-%% bind path
-bind_path([], PathParts) ->
- {ok, PathParts};
-bind_path(_VPathParts, []) ->
- fail;
-bind_path([Path|VRest],[Path|Rest]) ->
- bind_path(VRest, Rest);
-bind_path(_, _) ->
- fail.
-
-% utilities
-
-
-%% create vhost list from ini
-make_vhosts() ->
- Vhosts = lists:foldl(fun({Vhost, Path}, Acc) ->
- [{parse_vhost(Vhost), split_path(Path)}|Acc]
- end, [], couch_config:get("vhosts")),
- lists:reverse(lists:usort(Vhosts)).
-
-parse_vhost(Vhost) ->
- case urlsplit_netloc(Vhost, []) of
- {[], Path} ->
- {make_spec("*", []), '*', Path};
- {HostPort, []} ->
- {H, P} = split_host_port(HostPort),
- H1 = make_spec(H, []),
- {H1, P, []};
- {HostPort, Path} ->
- {H, P} = split_host_port(HostPort),
- H1 = make_spec(H, []),
- {H1, P, string:tokens(Path, "/")}
- end.
-
-
-split_host_port(HostAsString) ->
- case string:rchr(HostAsString, $:) of
- 0 ->
- {split_host(HostAsString), '*'};
- N ->
- HostPart = string:substr(HostAsString, 1, N-1),
- case (catch erlang:list_to_integer(HostAsString, N+1,
- length(HostAsString))) of
- {'EXIT', _} ->
- {split_host(HostAsString), '*'};
- Port ->
- {split_host(HostPart), Port}
- end
- end.
-
-split_host(HostAsString) ->
- string:tokens(HostAsString, "\.").
-
-split_path(Path) ->
- make_spec(string:tokens(Path, "/"), []).
-
-
-make_spec([], Acc) ->
- lists:reverse(Acc);
-make_spec([""|R], Acc) ->
- make_spec(R, Acc);
-make_spec(["*"|R], Acc) ->
- make_spec(R, [?MATCH_ALL|Acc]);
-make_spec([P|R], Acc) ->
- P1 = parse_var(P),
- make_spec(R, [P1|Acc]).
-
-
-parse_var(P) ->
- case P of
- ":" ++ Var ->
- {bind, Var};
- _ -> P
- end.
-
-
-% mochiweb doesn't export it.
-urlsplit_netloc("", Acc) ->
- {lists:reverse(Acc), ""};
-urlsplit_netloc(Rest=[C | _], Acc) when C =:= $/; C =:= $?; C =:= $# ->
- {lists:reverse(Acc), Rest};
-urlsplit_netloc([C | Rest], Acc) ->
- urlsplit_netloc(Rest, [C | Acc]).
-
-make_path(Parts) ->
- "/" ++ string:join(Parts,[?SEPARATOR]).
diff --git a/1.1.x/src/couchdb/couch_httpd_view.erl b/1.1.x/src/couchdb/couch_httpd_view.erl
deleted file mode 100644
index b71fc2c6..00000000
--- a/1.1.x/src/couchdb/couch_httpd_view.erl
+++ /dev/null
@@ -1,755 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_httpd_view).
--include("couch_db.hrl").
-
--export([handle_view_req/3,handle_temp_view_req/2]).
-
--export([parse_view_params/3]).
--export([make_view_fold_fun/7, finish_view_fold/4, finish_view_fold/5, view_row_obj/4]).
--export([view_etag/3, view_etag/4, make_reduce_fold_funs/6]).
--export([design_doc_view/5, parse_bool_param/1, doc_member/3]).
--export([make_key_options/1, load_view/4]).
-
--import(couch_httpd,
- [send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,send_chunk/2,
- start_json_response/2, start_json_response/3, end_json_response/1,
- send_chunked_error/2]).
-
--import(couch_db,[get_update_seq/1]).
-
-design_doc_view(Req, Db, DName, ViewName, Keys) ->
- DesignId = <<"_design/", DName/binary>>,
- Stale = get_stale_type(Req),
- Reduce = get_reduce_type(Req),
- Result = case couch_view:get_map_view(Db, DesignId, ViewName, Stale) of
- {ok, View, Group} ->
- QueryArgs = parse_view_params(Req, Keys, map),
- output_map_view(Req, View, Group, Db, QueryArgs, Keys);
- {not_found, Reason} ->
- case couch_view:get_reduce_view(Db, DesignId, ViewName, Stale) of
- {ok, ReduceView, Group} ->
- case Reduce of
- false ->
- QueryArgs = parse_view_params(Req, Keys, red_map),
- MapView = couch_view:extract_map_view(ReduceView),
- output_map_view(Req, MapView, Group, Db, QueryArgs, Keys);
- _ ->
- QueryArgs = parse_view_params(Req, Keys, reduce),
- output_reduce_view(Req, Db, ReduceView, Group, QueryArgs, Keys)
- end;
- _ ->
- throw({not_found, Reason})
- end
- end,
- couch_stats_collector:increment({httpd, view_reads}),
- Result.
-
-handle_view_req(#httpd{method='GET',
- path_parts=[_, _, DName, _, ViewName]}=Req, Db, _DDoc) ->
- Keys = couch_httpd:qs_json_value(Req, "keys", nil),
- design_doc_view(Req, Db, DName, ViewName, Keys);
-
-handle_view_req(#httpd{method='POST',
- path_parts=[_, _, DName, _, ViewName]}=Req, Db, _DDoc) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- {Fields} = couch_httpd:json_body_obj(Req),
- case couch_util:get_value(<<"keys">>, Fields, nil) of
- nil ->
- Fmt = "POST to view ~p/~p in database ~p with no keys member.",
- ?LOG_DEBUG(Fmt, [DName, ViewName, Db]),
- design_doc_view(Req, Db, DName, ViewName, nil);
- Keys when is_list(Keys) ->
- design_doc_view(Req, Db, DName, ViewName, Keys);
- _ ->
- throw({bad_request, "`keys` member must be a array."})
- end;
-
-handle_view_req(Req, _Db, _DDoc) ->
- send_method_not_allowed(Req, "GET,POST,HEAD").
-
-handle_temp_view_req(#httpd{method='POST'}=Req, Db) ->
- couch_httpd:validate_ctype(Req, "application/json"),
- ok = couch_db:check_is_admin(Db),
- couch_stats_collector:increment({httpd, temporary_view_reads}),
- {Props} = couch_httpd:json_body_obj(Req),
- Language = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
- {DesignOptions} = couch_util:get_value(<<"options">>, Props, {[]}),
- MapSrc = couch_util:get_value(<<"map">>, Props),
- Keys = couch_util:get_value(<<"keys">>, Props, nil),
- Reduce = get_reduce_type(Req),
- case couch_util:get_value(<<"reduce">>, Props, null) of
- null ->
- QueryArgs = parse_view_params(Req, Keys, map),
- {ok, View, Group} = couch_view:get_temp_map_view(Db, Language,
- DesignOptions, MapSrc),
- output_map_view(Req, View, Group, Db, QueryArgs, Keys);
- _ when Reduce =:= false ->
- QueryArgs = parse_view_params(Req, Keys, red_map),
- {ok, View, Group} = couch_view:get_temp_map_view(Db, Language,
- DesignOptions, MapSrc),
- output_map_view(Req, View, Group, Db, QueryArgs, Keys);
- RedSrc ->
- QueryArgs = parse_view_params(Req, Keys, reduce),
- {ok, View, Group} = couch_view:get_temp_reduce_view(Db, Language,
- DesignOptions, MapSrc, RedSrc),
- output_reduce_view(Req, Db, View, Group, QueryArgs, Keys)
- end;
-
-handle_temp_view_req(Req, _Db) ->
- send_method_not_allowed(Req, "POST").
-
-output_map_view(Req, View, Group, Db, QueryArgs, nil) ->
- #view_query_args{
- limit = Limit,
- skip = SkipCount
- } = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View),
- couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- {ok, RowCount} = couch_view:get_row_count(View),
- FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, Group#group.current_seq, RowCount, #view_fold_helper_funs{reduce_count=fun couch_view:reduce_to_count/1}),
- FoldAccInit = {Limit, SkipCount, undefined, []},
- {ok, LastReduce, FoldResult} = couch_view:fold(View,
- FoldlFun, FoldAccInit, make_key_options(QueryArgs)),
- finish_view_fold(Req, RowCount,
- couch_view:reduce_to_count(LastReduce), FoldResult)
- end);
-
-output_map_view(Req, View, Group, Db, QueryArgs, Keys) ->
- #view_query_args{
- limit = Limit,
- skip = SkipCount
- } = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View, Keys),
- couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- {ok, RowCount} = couch_view:get_row_count(View),
- FoldAccInit = {Limit, SkipCount, undefined, []},
- {LastReduce, FoldResult} = lists:foldl(fun(Key, {_, FoldAcc}) ->
- FoldlFun = make_view_fold_fun(Req, QueryArgs#view_query_args{},
- CurrentEtag, Db, Group#group.current_seq, RowCount,
- #view_fold_helper_funs{
- reduce_count = fun couch_view:reduce_to_count/1
- }),
- {ok, LastReduce, FoldResult} = couch_view:fold(View, FoldlFun,
- FoldAcc, make_key_options(
- QueryArgs#view_query_args{start_key=Key, end_key=Key})),
- {LastReduce, FoldResult}
- end, {{[],[]}, FoldAccInit}, Keys),
- finish_view_fold(Req, RowCount, couch_view:reduce_to_count(LastReduce),
- FoldResult, [{update_seq,Group#group.current_seq}])
- end).
-
-output_reduce_view(Req, Db, View, Group, QueryArgs, nil) ->
- #view_query_args{
- limit = Limit,
- skip = Skip,
- group_level = GroupLevel
- } = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View),
- couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel,
- QueryArgs, CurrentEtag, Group#group.current_seq,
- #reduce_fold_helper_funs{}),
- FoldAccInit = {Limit, Skip, undefined, []},
- {ok, {_, _, Resp, _}} = couch_view:fold_reduce(View,
- RespFun, FoldAccInit, [{key_group_fun, GroupRowsFun} |
- make_key_options(QueryArgs)]),
- finish_reduce_fold(Req, Resp)
- end);
-
-output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) ->
- #view_query_args{
- limit = Limit,
- skip = Skip,
- group_level = GroupLevel
- } = QueryArgs,
- CurrentEtag = view_etag(Db, Group, View, Keys),
- couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel,
- QueryArgs, CurrentEtag, Group#group.current_seq,
- #reduce_fold_helper_funs{}),
- {Resp, _RedAcc3} = lists:foldl(
- fun(Key, {Resp, RedAcc}) ->
- % run the reduce once for each key in keys, with limit etc
- % reapplied for each key
- FoldAccInit = {Limit, Skip, Resp, RedAcc},
- {_, {_, _, Resp2, RedAcc2}} = couch_view:fold_reduce(View,
- RespFun, FoldAccInit, [{key_group_fun, GroupRowsFun} |
- make_key_options(QueryArgs#view_query_args{
- start_key=Key, end_key=Key})]),
- % Switch to comma
- {Resp2, RedAcc2}
- end,
- {undefined, []}, Keys), % Start with no comma
- finish_reduce_fold(Req, Resp, [{update_seq,Group#group.current_seq}])
- end).
-
-reverse_key_default(?MIN_STR) -> ?MAX_STR;
-reverse_key_default(?MAX_STR) -> ?MIN_STR;
-reverse_key_default(Key) -> Key.
-
-get_stale_type(Req) ->
- list_to_existing_atom(couch_httpd:qs_value(Req, "stale", "nil")).
-
-get_reduce_type(Req) ->
- list_to_existing_atom(couch_httpd:qs_value(Req, "reduce", "true")).
-
-load_view(Req, Db, {ViewDesignId, ViewName}, Keys) ->
- Stale = get_stale_type(Req),
- Reduce = get_reduce_type(Req),
- case couch_view:get_map_view(Db, ViewDesignId, ViewName, Stale) of
- {ok, View, Group} ->
- QueryArgs = parse_view_params(Req, Keys, map),
- {map, View, Group, QueryArgs};
- {not_found, _Reason} ->
- case couch_view:get_reduce_view(Db, ViewDesignId, ViewName, Stale) of
- {ok, ReduceView, Group} ->
- case Reduce of
- false ->
- QueryArgs = parse_view_params(Req, Keys, map_red),
- MapView = couch_view:extract_map_view(ReduceView),
- {map, MapView, Group, QueryArgs};
- _ ->
- QueryArgs = parse_view_params(Req, Keys, reduce),
- {reduce, ReduceView, Group, QueryArgs}
- end;
- {not_found, Reason} ->
- throw({not_found, Reason})
- end
- end.
-
-% query_parse_error could be removed
-% we wouldn't need to pass the view type, it'd just parse params.
-% I'm not sure what to do about the error handling, but
-% it might simplify things to have a parse_view_params function
-% that doesn't throw().
-parse_view_params(Req, Keys, ViewType) ->
- QueryList = couch_httpd:qs(Req),
- QueryParams =
- lists:foldl(fun({K, V}, Acc) ->
- parse_view_param(K, V) ++ Acc
- end, [], QueryList),
- IsMultiGet = (Keys =/= nil),
- Args = #view_query_args{
- view_type=ViewType,
- multi_get=IsMultiGet
- },
- QueryArgs = lists:foldl(fun({K, V}, Args2) ->
- validate_view_query(K, V, Args2)
- end, Args, lists:reverse(QueryParams)), % Reverse to match QS order.
- warn_on_empty_key_range(QueryArgs),
- GroupLevel = QueryArgs#view_query_args.group_level,
- case {ViewType, GroupLevel, IsMultiGet} of
- {reduce, exact, true} ->
- QueryArgs;
- {reduce, _, false} ->
- QueryArgs;
- {reduce, _, _} ->
- % we can simplify code if we just drop this error message.
- Msg = <<"Multi-key fetchs for reduce "
- "view must include `group=true`">>,
- throw({query_parse_error, Msg});
- _ ->
- QueryArgs
- end,
- QueryArgs.
-
-parse_view_param("", _) ->
- [];
-parse_view_param("key", Value) ->
- JsonKey = ?JSON_DECODE(Value),
- [{start_key, JsonKey}, {end_key, JsonKey}];
-% TODO: maybe deprecate startkey_docid
-parse_view_param("startkey_docid", Value) ->
- [{start_docid, ?l2b(Value)}];
-parse_view_param("start_key_doc_id", Value) ->
- [{start_docid, ?l2b(Value)}];
-% TODO: maybe deprecate endkey_docid
-parse_view_param("endkey_docid", Value) ->
- [{end_docid, ?l2b(Value)}];
-parse_view_param("end_key_doc_id", Value) ->
- [{end_docid, ?l2b(Value)}];
-% TODO: maybe deprecate startkey
-parse_view_param("startkey", Value) ->
- [{start_key, ?JSON_DECODE(Value)}];
-parse_view_param("start_key", Value) ->
- [{start_key, ?JSON_DECODE(Value)}];
-% TODO: maybe deprecate endkey
-parse_view_param("endkey", Value) ->
- [{end_key, ?JSON_DECODE(Value)}];
-parse_view_param("end_key", Value) ->
- [{end_key, ?JSON_DECODE(Value)}];
-parse_view_param("limit", Value) ->
- [{limit, parse_positive_int_param(Value)}];
-parse_view_param("count", _Value) ->
- throw({query_parse_error, <<"Query parameter 'count' is now 'limit'.">>});
-parse_view_param("stale", "ok") ->
- [{stale, ok}];
-parse_view_param("stale", "update_after") ->
- [{stale, update_after}];
-parse_view_param("stale", _Value) ->
- throw({query_parse_error,
- <<"stale only available as stale=ok or as stale=update_after">>});
-parse_view_param("update", _Value) ->
- throw({query_parse_error, <<"update=false is now stale=ok">>});
-parse_view_param("descending", Value) ->
- [{descending, parse_bool_param(Value)}];
-parse_view_param("skip", Value) ->
- [{skip, parse_int_param(Value)}];
-parse_view_param("group", Value) ->
- case parse_bool_param(Value) of
- true -> [{group_level, exact}];
- false -> [{group_level, 0}]
- end;
-parse_view_param("group_level", Value) ->
- [{group_level, parse_positive_int_param(Value)}];
-parse_view_param("inclusive_end", Value) ->
- [{inclusive_end, parse_bool_param(Value)}];
-parse_view_param("reduce", Value) ->
- [{reduce, parse_bool_param(Value)}];
-parse_view_param("include_docs", Value) ->
- [{include_docs, parse_bool_param(Value)}];
-parse_view_param("conflicts", Value) ->
- [{conflicts, parse_bool_param(Value)}];
-parse_view_param("list", Value) ->
- [{list, ?l2b(Value)}];
-parse_view_param("callback", _) ->
- []; % Verified in the JSON response functions
-parse_view_param(Key, Value) ->
- [{extra, {Key, Value}}].
-
-warn_on_empty_key_range(#view_query_args{start_key=undefined}) ->
- ok;
-warn_on_empty_key_range(#view_query_args{end_key=undefined}) ->
- ok;
-warn_on_empty_key_range(#view_query_args{start_key=A, end_key=A}) ->
- ok;
-warn_on_empty_key_range(#view_query_args{
- start_key=StartKey, end_key=EndKey, direction=Dir}) ->
- case {Dir, couch_view:less_json(StartKey, EndKey)} of
- {fwd, false} ->
- throw({query_parse_error,
- <<"No rows can match your key range, reverse your ",
- "start_key and end_key or set descending=true">>});
- {rev, true} ->
- throw({query_parse_error,
- <<"No rows can match your key range, reverse your ",
- "start_key and end_key or set descending=false">>});
- _ -> ok
- end.
-
-validate_view_query(start_key, Value, Args) ->
- case Args#view_query_args.multi_get of
- true ->
- Msg = <<"Query parameter `start_key` is "
- "not compatible with multi-get">>,
- throw({query_parse_error, Msg});
- _ ->
- Args#view_query_args{start_key=Value}
- end;
-validate_view_query(start_docid, Value, Args) ->
- Args#view_query_args{start_docid=Value};
-validate_view_query(end_key, Value, Args) ->
- case Args#view_query_args.multi_get of
- true->
- Msg = <<"Query parameter `end_key` is "
- "not compatible with multi-get">>,
- throw({query_parse_error, Msg});
- _ ->
- Args#view_query_args{end_key=Value}
- end;
-validate_view_query(end_docid, Value, Args) ->
- Args#view_query_args{end_docid=Value};
-validate_view_query(limit, Value, Args) ->
- Args#view_query_args{limit=Value};
-validate_view_query(list, Value, Args) ->
- Args#view_query_args{list=Value};
-validate_view_query(stale, ok, Args) ->
- Args#view_query_args{stale=ok};
-validate_view_query(stale, update_after, Args) ->
- Args#view_query_args{stale=update_after};
-validate_view_query(stale, _, Args) ->
- Args;
-validate_view_query(descending, true, Args) ->
- case Args#view_query_args.direction of
- rev -> Args; % Already reversed
- fwd ->
- Args#view_query_args{
- direction = rev,
- start_docid =
- reverse_key_default(Args#view_query_args.start_docid),
- end_docid =
- reverse_key_default(Args#view_query_args.end_docid)
- }
- end;
-validate_view_query(descending, false, Args) ->
- Args; % Ignore default condition
-validate_view_query(skip, Value, Args) ->
- Args#view_query_args{skip=Value};
-validate_view_query(group_level, Value, Args) ->
- case Args#view_query_args.view_type of
- reduce ->
- Args#view_query_args{group_level=Value};
- _ ->
- Msg = <<"Invalid URL parameter 'group' or "
- " 'group_level' for non-reduce view.">>,
- throw({query_parse_error, Msg})
- end;
-validate_view_query(inclusive_end, Value, Args) ->
- Args#view_query_args{inclusive_end=Value};
-validate_view_query(reduce, false, Args) ->
- Args;
-validate_view_query(reduce, _, Args) ->
- case Args#view_query_args.view_type of
- map ->
- Msg = <<"Invalid URL parameter `reduce` for map view.">>,
- throw({query_parse_error, Msg});
- _ ->
- Args
- end;
-validate_view_query(include_docs, true, Args) ->
- case Args#view_query_args.view_type of
- reduce ->
- Msg = <<"Query parameter `include_docs` "
- "is invalid for reduce views.">>,
- throw({query_parse_error, Msg});
- _ ->
- Args#view_query_args{include_docs=true}
- end;
-% Use the view_query_args record's default value
-validate_view_query(include_docs, _Value, Args) ->
- Args;
-validate_view_query(conflicts, true, Args) ->
- case Args#view_query_args.view_type of
- reduce ->
- Msg = <<"Query parameter `conflicts` "
- "is invalid for reduce views.">>,
- throw({query_parse_error, Msg});
- _ ->
- Args#view_query_args{conflicts = true}
- end;
-validate_view_query(extra, _Value, Args) ->
- Args.
-
-make_view_fold_fun(Req, QueryArgs, Etag, Db, UpdateSeq, TotalViewCount, HelperFuns) ->
- #view_fold_helper_funs{
- start_response = StartRespFun,
- send_row = SendRowFun,
- reduce_count = ReduceCountFun
- } = apply_default_helper_funs(HelperFuns),
-
- #view_query_args{
- include_docs = IncludeDocs,
- conflicts = Conflicts
- } = QueryArgs,
-
- fun({{Key, DocId}, Value}, OffsetReds,
- {AccLimit, AccSkip, Resp, RowFunAcc}) ->
- case {AccLimit, AccSkip, Resp} of
- {0, _, _} ->
- % we've done "limit" rows, stop foldling
- {stop, {0, 0, Resp, RowFunAcc}};
- {_, AccSkip, _} when AccSkip > 0 ->
- % just keep skipping
- {ok, {AccLimit, AccSkip - 1, Resp, RowFunAcc}};
- {_, _, undefined} ->
- % rendering the first row, first we start the response
- Offset = ReduceCountFun(OffsetReds),
- {ok, Resp2, RowFunAcc0} = StartRespFun(Req, Etag,
- TotalViewCount, Offset, RowFunAcc, UpdateSeq),
- {Go, RowFunAcc2} = SendRowFun(Resp2, Db, {{Key, DocId}, Value},
- IncludeDocs, Conflicts, RowFunAcc0),
- {Go, {AccLimit - 1, 0, Resp2, RowFunAcc2}};
- {AccLimit, _, Resp} when (AccLimit > 0) ->
- % rendering all other rows
- {Go, RowFunAcc2} = SendRowFun(Resp, Db, {{Key, DocId}, Value},
- IncludeDocs, Conflicts, RowFunAcc),
- {Go, {AccLimit - 1, 0, Resp, RowFunAcc2}}
- end
- end.
-
-make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, UpdateSeq, HelperFuns) ->
- #reduce_fold_helper_funs{
- start_response = StartRespFun,
- send_row = SendRowFun
- } = apply_default_helper_funs(HelperFuns),
-
- GroupRowsFun =
- fun({_Key1,_}, {_Key2,_}) when GroupLevel == 0 ->
- true;
- ({Key1,_}, {Key2,_})
- when is_integer(GroupLevel) and is_list(Key1) and is_list(Key2) ->
- lists:sublist(Key1, GroupLevel) == lists:sublist(Key2, GroupLevel);
- ({Key1,_}, {Key2,_}) ->
- Key1 == Key2
- end,
-
- RespFun = fun
- (_Key, _Red, {AccLimit, AccSkip, Resp, RowAcc}) when AccSkip > 0 ->
- % keep skipping
- {ok, {AccLimit, AccSkip - 1, Resp, RowAcc}};
- (_Key, _Red, {0, _AccSkip, Resp, RowAcc}) ->
- % we've exhausted limit rows, stop
- {stop, {0, _AccSkip, Resp, RowAcc}};
-
- (_Key, Red, {AccLimit, 0, undefined, RowAcc0}) when GroupLevel == 0 ->
- % we haven't started responding yet and group=false
- {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0, UpdateSeq),
- {Go, RowAcc2} = SendRowFun(Resp2, {null, Red}, RowAcc),
- {Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
- (_Key, Red, {AccLimit, 0, Resp, RowAcc}) when GroupLevel == 0 ->
- % group=false but we've already started the response
- {Go, RowAcc2} = SendRowFun(Resp, {null, Red}, RowAcc),
- {Go, {AccLimit - 1, 0, Resp, RowAcc2}};
-
- (Key, Red, {AccLimit, 0, undefined, RowAcc0})
- when is_integer(GroupLevel), is_list(Key) ->
- % group_level and we haven't responded yet
- {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0, UpdateSeq),
- {Go, RowAcc2} = SendRowFun(Resp2,
- {lists:sublist(Key, GroupLevel), Red}, RowAcc),
- {Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
- (Key, Red, {AccLimit, 0, Resp, RowAcc})
- when is_integer(GroupLevel), is_list(Key) ->
- % group_level and we've already started the response
- {Go, RowAcc2} = SendRowFun(Resp,
- {lists:sublist(Key, GroupLevel), Red}, RowAcc),
- {Go, {AccLimit - 1, 0, Resp, RowAcc2}};
-
- (Key, Red, {AccLimit, 0, undefined, RowAcc0}) ->
- % group=true and we haven't responded yet
- {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0, UpdateSeq),
- {Go, RowAcc2} = SendRowFun(Resp2, {Key, Red}, RowAcc),
- {Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
- (Key, Red, {AccLimit, 0, Resp, RowAcc}) ->
- % group=true and we've already started the response
- {Go, RowAcc2} = SendRowFun(Resp, {Key, Red}, RowAcc),
- {Go, {AccLimit - 1, 0, Resp, RowAcc2}}
- end,
- {ok, GroupRowsFun, RespFun}.
-
-apply_default_helper_funs(
- #view_fold_helper_funs{
- start_response = StartResp,
- send_row = SendRow
- }=Helpers) ->
- StartResp2 = case StartResp of
- undefined -> fun json_view_start_resp/6;
- _ -> StartResp
- end,
-
- SendRow2 = case SendRow of
- undefined -> fun send_json_view_row/6;
- _ -> SendRow
- end,
-
- Helpers#view_fold_helper_funs{
- start_response = StartResp2,
- send_row = SendRow2
- };
-
-
-apply_default_helper_funs(
- #reduce_fold_helper_funs{
- start_response = StartResp,
- send_row = SendRow
- }=Helpers) ->
- StartResp2 = case StartResp of
- undefined -> fun json_reduce_start_resp/4;
- _ -> StartResp
- end,
-
- SendRow2 = case SendRow of
- undefined -> fun send_json_reduce_row/3;
- _ -> SendRow
- end,
-
- Helpers#reduce_fold_helper_funs{
- start_response = StartResp2,
- send_row = SendRow2
- }.
-
-make_key_options(#view_query_args{direction = Dir}=QueryArgs) ->
- [{dir,Dir} | make_start_key_option(QueryArgs) ++
- make_end_key_option(QueryArgs)].
-
-make_start_key_option(
- #view_query_args{
- start_key = StartKey,
- start_docid = StartDocId}) ->
- if StartKey == undefined ->
- [];
- true ->
- [{start_key, {StartKey, StartDocId}}]
- end.
-
-make_end_key_option(#view_query_args{end_key = undefined}) ->
- [];
-make_end_key_option(
- #view_query_args{end_key = EndKey,
- end_docid = EndDocId,
- inclusive_end = true}) ->
- [{end_key, {EndKey, EndDocId}}];
-make_end_key_option(
- #view_query_args{
- end_key = EndKey,
- end_docid = EndDocId,
- inclusive_end = false}) ->
- [{end_key_gt, {EndKey,reverse_key_default(EndDocId)}}].
-
-json_view_start_resp(Req, Etag, TotalViewCount, Offset, _Acc, UpdateSeq) ->
- {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]),
- BeginBody = case couch_httpd:qs_value(Req, "update_seq") of
- "true" ->
- io_lib:format(
- "{\"total_rows\":~w,\"update_seq\":~w,"
- "\"offset\":~w,\"rows\":[\r\n",
- [TotalViewCount, UpdateSeq, Offset]);
- _Else ->
- io_lib:format(
- "{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
- [TotalViewCount, Offset])
- end,
- {ok, Resp, BeginBody}.
-
-send_json_view_row(Resp, Db, Kv, IncludeDocs, Conflicts, RowFront) ->
- JsonObj = view_row_obj(Db, Kv, IncludeDocs, Conflicts),
- send_chunk(Resp, RowFront ++ ?JSON_ENCODE(JsonObj)),
- {ok, ",\r\n"}.
-
-json_reduce_start_resp(Req, Etag, _Acc0, UpdateSeq) ->
- {ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]),
- case couch_httpd:qs_value(Req, "update_seq") of
- "true" ->
- {ok, Resp, io_lib:format("{\"update_seq\":~w,\"rows\":[\r\n",[UpdateSeq])};
- _Else ->
- {ok, Resp, "{\"rows\":[\r\n"}
- end.
-
-send_json_reduce_row(Resp, {Key, Value}, RowFront) ->
- send_chunk(Resp, RowFront ++ ?JSON_ENCODE({[{key, Key}, {value, Value}]})),
- {ok, ",\r\n"}.
-
-view_etag(Db, Group, View) ->
- view_etag(Db, Group, View, nil).
-
-view_etag(Db, Group, {reduce, _, _, View}, Extra) ->
- view_etag(Db, Group, View, Extra);
-view_etag(Db, Group, {temp_reduce, View}, Extra) ->
- view_etag(Db, Group, View, Extra);
-view_etag(_Db, #group{sig=Sig}, #view{update_seq=UpdateSeq, purge_seq=PurgeSeq}, Extra) ->
- couch_httpd:make_etag({Sig, UpdateSeq, PurgeSeq, Extra}).
-
-% the view row has an error
-view_row_obj(_Db, {{Key, error}, Value}, _IncludeDocs, _Conflicts) ->
- {[{key, Key}, {error, Value}]};
-% include docs in the view output
-view_row_obj(Db, {{Key, DocId}, {Props}}, true, Conflicts) ->
- Rev = case couch_util:get_value(<<"_rev">>, Props) of
- undefined ->
- nil;
- Rev0 ->
- couch_doc:parse_rev(Rev0)
- end,
- IncludeId = couch_util:get_value(<<"_id">>, Props, DocId),
- view_row_with_doc(Db, {{Key, DocId}, {Props}}, {IncludeId, Rev}, Conflicts);
-view_row_obj(Db, {{Key, DocId}, Value}, true, Conflicts) ->
- view_row_with_doc(Db, {{Key, DocId}, Value}, {DocId, nil}, Conflicts);
-% the normal case for rendering a view row
-view_row_obj(_Db, {{Key, DocId}, Value}, _IncludeDocs, _Conflicts) ->
- {[{id, DocId}, {key, Key}, {value, Value}]}.
-
-view_row_with_doc(Db, {{Key, DocId}, Value}, IdRev, Conflicts) ->
- {[{id, DocId}, {key, Key}, {value, Value}] ++
- doc_member(Db, IdRev, if Conflicts -> [conflicts]; true -> [] end)}.
-
-doc_member(Db, #doc_info{id = Id, revs = [#rev_info{rev = Rev} | _]} = Info,
- Options) ->
- ?LOG_DEBUG("Include Doc: ~p ~p", [Id, Rev]),
- case couch_db:open_doc(Db, Info, [deleted | Options]) of
- {ok, Doc} ->
- [{doc, couch_doc:to_json_obj(Doc, [])}];
- _ ->
- [{doc, null}]
- end;
-doc_member(Db, {DocId, Rev}, Options) ->
- ?LOG_DEBUG("Include Doc: ~p ~p", [DocId, Rev]),
- case (catch couch_httpd_db:couch_doc_open(Db, DocId, Rev, Options)) of
- #doc{} = Doc ->
- JsonDoc = couch_doc:to_json_obj(Doc, []),
- [{doc, JsonDoc}];
- _Else ->
- [{doc, null}]
- end.
-
-finish_view_fold(Req, TotalRows, Offset, FoldResult) ->
- finish_view_fold(Req, TotalRows, Offset, FoldResult, []).
-
-finish_view_fold(Req, TotalRows, Offset, FoldResult, Fields) ->
- case FoldResult of
- {_, _, undefined, _} ->
- % nothing found in the view or keys, nothing has been returned
- % send empty view
- send_json(Req, 200, {[
- {total_rows, TotalRows},
- {offset, Offset},
- {rows, []}
- ] ++ Fields});
- {_, _, Resp, _} ->
- % end the view
- send_chunk(Resp, "\r\n]}"),
- end_json_response(Resp)
- end.
-
-finish_reduce_fold(Req, Resp) ->
- finish_reduce_fold(Req, Resp, []).
-
-finish_reduce_fold(Req, Resp, Fields) ->
- case Resp of
- undefined ->
- send_json(Req, 200, {[
- {rows, []}
- ] ++ Fields});
- Resp ->
- send_chunk(Resp, "\r\n]}"),
- end_json_response(Resp)
- end.
-
-parse_bool_param(Val) ->
- case string:to_lower(Val) of
- "true" -> true;
- "false" -> false;
- _ ->
- Msg = io_lib:format("Invalid boolean parameter: ~p", [Val]),
- throw({query_parse_error, ?l2b(Msg)})
- end.
-
-parse_int_param(Val) ->
- case (catch list_to_integer(Val)) of
- IntVal when is_integer(IntVal) ->
- IntVal;
- _ ->
- Msg = io_lib:format("Invalid value for integer parameter: ~p", [Val]),
- throw({query_parse_error, ?l2b(Msg)})
- end.
-
-parse_positive_int_param(Val) ->
- case parse_int_param(Val) of
- IntVal when IntVal >= 0 ->
- IntVal;
- _ ->
- Fmt = "Invalid value for positive integer parameter: ~p",
- Msg = io_lib:format(Fmt, [Val]),
- throw({query_parse_error, ?l2b(Msg)})
- end.
-
diff --git a/1.1.x/src/couchdb/couch_js_functions.hrl b/1.1.x/src/couchdb/couch_js_functions.hrl
deleted file mode 100644
index 0cc49d62..00000000
--- a/1.1.x/src/couchdb/couch_js_functions.hrl
+++ /dev/null
@@ -1,226 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--define(AUTH_DB_DOC_VALIDATE_FUNCTION, <<"
- function(newDoc, oldDoc, userCtx) {
- if (newDoc._deleted === true) {
- // allow deletes by admins and matching users
- // without checking the other fields
- if ((userCtx.roles.indexOf('_admin') !== -1) ||
- (userCtx.name == oldDoc.name)) {
- return;
- } else {
- throw({forbidden: 'Only admins may delete other user docs.'});
- }
- }
-
- if ((oldDoc && oldDoc.type !== 'user') || newDoc.type !== 'user') {
- throw({forbidden : 'doc.type must be user'});
- } // we only allow user docs for now
-
- if (!newDoc.name) {
- throw({forbidden: 'doc.name is required'});
- }
-
- if (newDoc.roles && !isArray(newDoc.roles)) {
- throw({forbidden: 'doc.roles must be an array'});
- }
-
- if (newDoc._id !== ('org.couchdb.user:' + newDoc.name)) {
- throw({
- forbidden: 'Doc ID must be of the form org.couchdb.user:name'
- });
- }
-
- if (oldDoc) { // validate all updates
- if (oldDoc.name !== newDoc.name) {
- throw({forbidden: 'Usernames can not be changed.'});
- }
- }
-
- if (newDoc.password_sha && !newDoc.salt) {
- throw({
- forbidden: 'Users with password_sha must have a salt.' +
- 'See /_utils/script/couch.js for example code.'
- });
- }
-
- if (userCtx.roles.indexOf('_admin') === -1) {
- if (oldDoc) { // validate non-admin updates
- if (userCtx.name !== newDoc.name) {
- throw({
- forbidden: 'You may only update your own user document.'
- });
- }
- // validate role updates
- var oldRoles = oldDoc.roles.sort();
- var newRoles = newDoc.roles.sort();
-
- if (oldRoles.length !== newRoles.length) {
- throw({forbidden: 'Only _admin may edit roles'});
- }
-
- for (var i = 0; i < oldRoles.length; i++) {
- if (oldRoles[i] !== newRoles[i]) {
- throw({forbidden: 'Only _admin may edit roles'});
- }
- }
- } else if (newDoc.roles.length > 0) {
- throw({forbidden: 'Only _admin may set roles'});
- }
- }
-
- // no system roles in users db
- for (var i = 0; i < newDoc.roles.length; i++) {
- if (newDoc.roles[i][0] === '_') {
- throw({
- forbidden:
- 'No system roles (starting with underscore) in users db.'
- });
- }
- }
-
- // no system names as names
- if (newDoc.name[0] === '_') {
- throw({forbidden: 'Username may not start with underscore.'});
- }
- }
-">>).
-
-
--define(REP_DB_DOC_VALIDATE_FUN, <<"
- function(newDoc, oldDoc, userCtx) {
- function reportError(error_msg) {
- log('Error writing document `' + newDoc._id +
- '\\' to the replicator database: ' + error_msg);
- throw({forbidden: error_msg});
- }
-
- function validateEndpoint(endpoint, fieldName) {
- if ((typeof endpoint !== 'string') &&
- ((typeof endpoint !== 'object') || (endpoint === null))) {
-
- reportError('The `' + fieldName + '\\' property must exist' +
- ' and be either a string or an object.');
- }
-
- if (typeof endpoint === 'object') {
- if ((typeof endpoint.url !== 'string') || !endpoint.url) {
- reportError('The url property must exist in the `' +
- fieldName + '\\' field and must be a non-empty string.');
- }
-
- if ((typeof endpoint.auth !== 'undefined') &&
- ((typeof endpoint.auth !== 'object') ||
- endpoint.auth === null)) {
-
- reportError('`' + fieldName +
- '.auth\\' must be a non-null object.');
- }
-
- if ((typeof endpoint.headers !== 'undefined') &&
- ((typeof endpoint.headers !== 'object') ||
- endpoint.headers === null)) {
-
- reportError('`' + fieldName +
- '.headers\\' must be a non-null object.');
- }
- }
- }
-
- var isReplicator = (userCtx.roles.indexOf('_replicator') >= 0);
- var isAdmin = (userCtx.roles.indexOf('_admin') >= 0);
-
- if (oldDoc && !newDoc._deleted && !isReplicator) {
- reportError('Only the replicator can edit replication documents.');
- }
-
- if (!newDoc._deleted) {
- validateEndpoint(newDoc.source, 'source');
- validateEndpoint(newDoc.target, 'target');
-
- if ((typeof newDoc.create_target !== 'undefined') &&
- (typeof newDoc.create_target !== 'boolean')) {
-
- reportError('The `create_target\\' field must be a boolean.');
- }
-
- if ((typeof newDoc.continuous !== 'undefined') &&
- (typeof newDoc.continuous !== 'boolean')) {
-
- reportError('The `continuous\\' field must be a boolean.');
- }
-
- if ((typeof newDoc.doc_ids !== 'undefined') &&
- !isArray(newDoc.doc_ids)) {
-
- reportError('The `doc_ids\\' field must be an array of strings.');
- }
-
- if ((typeof newDoc.filter !== 'undefined') &&
- ((typeof newDoc.filter !== 'string') || !newDoc.filter)) {
-
- reportError('The `filter\\' field must be a non-empty string.');
- }
-
- if ((typeof newDoc.query_params !== 'undefined') &&
- ((typeof newDoc.query_params !== 'object') ||
- newDoc.query_params === null)) {
-
- reportError('The `query_params\\' field must be an object.');
- }
-
- if (newDoc.user_ctx) {
- if (!isAdmin) {
- reportError('Delegated replications (use of the ' +
- '`user_ctx\\' property) can only be triggered by ' +
- 'administrators.');
- }
-
- var user_ctx = newDoc.user_ctx;
-
- if ((typeof user_ctx !== 'object') || (user_ctx === null)) {
- reportError('The `user_ctx\\' property must be a ' +
- 'non-null object.');
- }
-
- if (!(user_ctx.name === null ||
- (typeof user_ctx.name === 'undefined') ||
- ((typeof user_ctx.name === 'string') &&
- user_ctx.name.length > 0))) {
-
- reportError('The `user_ctx.name\\' property must be a ' +
- 'non-empty string or null.');
- }
-
- if (user_ctx.roles && !isArray(user_ctx.roles)) {
- reportError('The `user_ctx.roles\\' property must be ' +
- 'an array of strings.');
- }
-
- if (user_ctx.roles) {
- for (var i = 0; i < user_ctx.roles.length; i++) {
- var role = user_ctx.roles[i];
-
- if (typeof role !== 'string' || role.length === 0) {
- reportError('Roles must be non-empty strings.');
- }
- if (role[0] === '_') {
- reportError('System roles (starting with an ' +
- 'underscore) are not allowed.');
- }
- }
- }
- }
- }
- }
-">>).
diff --git a/1.1.x/src/couchdb/couch_key_tree.erl b/1.1.x/src/couchdb/couch_key_tree.erl
deleted file mode 100644
index bc723cc2..00000000
--- a/1.1.x/src/couchdb/couch_key_tree.erl
+++ /dev/null
@@ -1,332 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_key_tree).
-
--export([merge/3, find_missing/2, get_key_leafs/2, get_full_key_paths/2, get/2]).
--export([map/2, get_all_leafs/1, count_leafs/1, remove_leafs/2,
- get_all_leafs_full/1,stem/2,map_leafs/2]).
-
-% Tree::term() is really a tree(), but we don't want to require R13B04 yet
--type branch() :: {Key::term(), Value::term(), Tree::term()}.
--type path() :: {Start::pos_integer(), branch()}.
--type tree() :: [branch()]. % sorted by key
-
-% partial trees arranged by how much they are cut off.
-
--spec merge([path()], path(), pos_integer()) -> {[path()],
- conflicts | no_conflicts}.
-merge(Paths, Path, Depth) ->
- {Merged, Conflicts} = merge(Paths, Path),
- {stem(Merged, Depth), Conflicts}.
-
--spec merge([path()], path()) -> {[path()], conflicts | no_conflicts}.
-merge(Paths, Path) ->
- {ok, Merged, HasConflicts} = merge_one(Paths, Path, [], false),
- if HasConflicts ->
- Conflicts = conflicts;
- (length(Merged) =/= length(Paths)) and (length(Merged) =/= 1) ->
- Conflicts = conflicts;
- true ->
- Conflicts = no_conflicts
- end,
- {lists:sort(Merged), Conflicts}.
-
--spec merge_one(Original::[path()], Inserted::path(), [path()], boolean()) ->
- {ok, Merged::[path()], NewConflicts::boolean()}.
-merge_one([], Insert, OutAcc, ConflictsAcc) ->
- {ok, [Insert | OutAcc], ConflictsAcc};
-merge_one([{Start, Tree}|Rest], {StartInsert, TreeInsert}, Acc, HasConflicts) ->
- case merge_at([Tree], StartInsert - Start, [TreeInsert]) of
- {ok, [Merged], Conflicts} ->
- MergedStart = lists:min([Start, StartInsert]),
- {ok, Rest ++ [{MergedStart, Merged} | Acc], Conflicts or HasConflicts};
- no ->
- AccOut = [{Start, Tree} | Acc],
- merge_one(Rest, {StartInsert, TreeInsert}, AccOut, HasConflicts)
- end.
-
--spec merge_at(tree(), Place::integer(), tree()) ->
- {ok, Merged::tree(), HasConflicts::boolean()} | no.
-merge_at(_Ours, _Place, []) ->
- no;
-merge_at([], _Place, _Insert) ->
- no;
-merge_at([{Key, Value, SubTree}|Sibs], Place, InsertTree) when Place > 0 ->
- % inserted starts later than committed, need to drill into committed subtree
- case merge_at(SubTree, Place - 1, InsertTree) of
- {ok, Merged, Conflicts} ->
- {ok, [{Key, Value, Merged} | Sibs], Conflicts};
- no ->
- case merge_at(Sibs, Place, InsertTree) of
- {ok, Merged, Conflicts} ->
- {ok, [{Key, Value, SubTree} | Merged], Conflicts};
- no ->
- no
- end
- end;
-merge_at(OurTree, Place, [{Key, Value, SubTree}]) when Place < 0 ->
- % inserted starts earlier than committed, need to drill into insert subtree
- case merge_at(OurTree, Place + 1, SubTree) of
- {ok, Merged, Conflicts} ->
- {ok, [{Key, Value, Merged}], Conflicts};
- no ->
- no
- end;
-merge_at([{Key, Value, SubTree}|Sibs], 0, [{Key, _Value, InsertSubTree}]) ->
- {Merged, Conflicts} = merge_simple(SubTree, InsertSubTree),
- {ok, [{Key, Value, Merged} | Sibs], Conflicts};
-merge_at([{OurKey, _, _} | _], 0, [{Key, _, _}]) when OurKey > Key ->
- % siblings keys are ordered, no point in continuing
- no;
-merge_at([Tree | Sibs], 0, InsertTree) ->
- case merge_at(Sibs, 0, InsertTree) of
- {ok, Merged, Conflicts} ->
- {ok, [Tree | Merged], Conflicts};
- no ->
- no
- end.
-
-% key tree functions
-
--spec merge_simple(tree(), tree()) -> {Merged::tree(), NewConflicts::boolean()}.
-merge_simple([], B) ->
- {B, false};
-merge_simple(A, []) ->
- {A, false};
-merge_simple([{Key, Value, SubA} | NextA], [{Key, _, SubB} | NextB]) ->
- {MergedSubTree, Conflict1} = merge_simple(SubA, SubB),
- {MergedNextTree, Conflict2} = merge_simple(NextA, NextB),
- {[{Key, Value, MergedSubTree} | MergedNextTree], Conflict1 or Conflict2};
-merge_simple([{A, _, _} = Tree | Next], [{B, _, _} | _] = Insert) when A < B ->
- {Merged, _} = merge_simple(Next, Insert),
- {[Tree | Merged], true};
-merge_simple(Ours, [Tree | Next]) ->
- {Merged, _} = merge_simple(Ours, Next),
- {[Tree | Merged], true}.
-
-find_missing(_Tree, []) ->
- [];
-find_missing([], SeachKeys) ->
- SeachKeys;
-find_missing([{Start, {Key, Value, SubTree}} | RestTree], SeachKeys) ->
- PossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos >= Start],
- ImpossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos < Start],
- Missing = find_missing_simple(Start, [{Key, Value, SubTree}], PossibleKeys),
- find_missing(RestTree, ImpossibleKeys ++ Missing).
-
-find_missing_simple(_Pos, _Tree, []) ->
- [];
-find_missing_simple(_Pos, [], SeachKeys) ->
- SeachKeys;
-find_missing_simple(Pos, [{Key, _, SubTree} | RestTree], SeachKeys) ->
- PossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos >= Pos],
- ImpossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos < Pos],
-
- SrcKeys2 = PossibleKeys -- [{Pos, Key}],
- SrcKeys3 = find_missing_simple(Pos + 1, SubTree, SrcKeys2),
- ImpossibleKeys ++ find_missing_simple(Pos, RestTree, SrcKeys3).
-
-
-filter_leafs([], _Keys, FilteredAcc, RemovedKeysAcc) ->
- {FilteredAcc, RemovedKeysAcc};
-filter_leafs([{Pos, [{LeafKey, _}|_]} = Path |Rest], Keys, FilteredAcc, RemovedKeysAcc) ->
- FilteredKeys = lists:delete({Pos, LeafKey}, Keys),
- if FilteredKeys == Keys ->
- % this leaf is not a key we are looking to remove
- filter_leafs(Rest, Keys, [Path | FilteredAcc], RemovedKeysAcc);
- true ->
- % this did match a key, remove both the node and the input key
- filter_leafs(Rest, FilteredKeys, FilteredAcc, [{Pos, LeafKey} | RemovedKeysAcc])
- end.
-
-% Removes any branches from the tree whose leaf node(s) are in the Keys
-remove_leafs(Trees, Keys) ->
- % flatten each branch in a tree into a tree path
- Paths = get_all_leafs_full(Trees),
-
- % filter out any that are in the keys list.
- {FilteredPaths, RemovedKeys} = filter_leafs(Paths, Keys, [], []),
-
- % convert paths back to trees
- NewTree = lists:foldl(
- fun({PathPos, Path},TreeAcc) ->
- [SingleTree] = lists:foldl(
- fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
- {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
- NewTrees
- end, [], FilteredPaths),
- {NewTree, RemovedKeys}.
-
-
-% get the leafs in the tree matching the keys. The matching key nodes can be
-% leafs or an inner nodes. If an inner node, then the leafs for that node
-% are returned.
-get_key_leafs(Tree, Keys) ->
- get_key_leafs(Tree, Keys, []).
-
-get_key_leafs(_, [], Acc) ->
- {Acc, []};
-get_key_leafs([], Keys, Acc) ->
- {Acc, Keys};
-get_key_leafs([{Pos, Tree}|Rest], Keys, Acc) ->
- {Gotten, RemainingKeys} = get_key_leafs_simple(Pos, [Tree], Keys, []),
- get_key_leafs(Rest, RemainingKeys, Gotten ++ Acc).
-
-get_key_leafs_simple(_Pos, _Tree, [], _KeyPathAcc) ->
- {[], []};
-get_key_leafs_simple(_Pos, [], KeysToGet, _KeyPathAcc) ->
- {[], KeysToGet};
-get_key_leafs_simple(Pos, [{Key, _Value, SubTree}=Tree | RestTree], KeysToGet, KeyPathAcc) ->
- case lists:delete({Pos, Key}, KeysToGet) of
- KeysToGet -> % same list, key not found
- {LeafsFound, KeysToGet2} = get_key_leafs_simple(Pos + 1, SubTree, KeysToGet, [Key | KeyPathAcc]),
- {RestLeafsFound, KeysRemaining} = get_key_leafs_simple(Pos, RestTree, KeysToGet2, KeyPathAcc),
- {LeafsFound ++ RestLeafsFound, KeysRemaining};
- KeysToGet2 ->
- LeafsFound = get_all_leafs_simple(Pos, [Tree], KeyPathAcc),
- LeafKeysFound = [LeafKeyFound || {LeafKeyFound, _} <- LeafsFound],
- KeysToGet2 = KeysToGet2 -- LeafKeysFound,
- {RestLeafsFound, KeysRemaining} = get_key_leafs_simple(Pos, RestTree, KeysToGet2, KeyPathAcc),
- {LeafsFound ++ RestLeafsFound, KeysRemaining}
- end.
-
-get(Tree, KeysToGet) ->
- {KeyPaths, KeysNotFound} = get_full_key_paths(Tree, KeysToGet),
- FixedResults = [ {Value, {Pos, [Key0 || {Key0, _} <- Path]}} || {Pos, [{_Key, Value}|_]=Path} <- KeyPaths],
- {FixedResults, KeysNotFound}.
-
-get_full_key_paths(Tree, Keys) ->
- get_full_key_paths(Tree, Keys, []).
-
-get_full_key_paths(_, [], Acc) ->
- {Acc, []};
-get_full_key_paths([], Keys, Acc) ->
- {Acc, Keys};
-get_full_key_paths([{Pos, Tree}|Rest], Keys, Acc) ->
- {Gotten, RemainingKeys} = get_full_key_paths(Pos, [Tree], Keys, []),
- get_full_key_paths(Rest, RemainingKeys, Gotten ++ Acc).
-
-
-get_full_key_paths(_Pos, _Tree, [], _KeyPathAcc) ->
- {[], []};
-get_full_key_paths(_Pos, [], KeysToGet, _KeyPathAcc) ->
- {[], KeysToGet};
-get_full_key_paths(Pos, [{KeyId, Value, SubTree} | RestTree], KeysToGet, KeyPathAcc) ->
- KeysToGet2 = KeysToGet -- [{Pos, KeyId}],
- CurrentNodeResult =
- case length(KeysToGet2) =:= length(KeysToGet) of
- true -> % not in the key list.
- [];
- false -> % this node is the key list. return it
- [{Pos, [{KeyId, Value} | KeyPathAcc]}]
- end,
- {KeysGotten, KeysRemaining} = get_full_key_paths(Pos + 1, SubTree, KeysToGet2, [{KeyId, Value} | KeyPathAcc]),
- {KeysGotten2, KeysRemaining2} = get_full_key_paths(Pos, RestTree, KeysRemaining, KeyPathAcc),
- {CurrentNodeResult ++ KeysGotten ++ KeysGotten2, KeysRemaining2}.
-
-get_all_leafs_full(Tree) ->
- get_all_leafs_full(Tree, []).
-
-get_all_leafs_full([], Acc) ->
- Acc;
-get_all_leafs_full([{Pos, Tree} | Rest], Acc) ->
- get_all_leafs_full(Rest, get_all_leafs_full_simple(Pos, [Tree], []) ++ Acc).
-
-get_all_leafs_full_simple(_Pos, [], _KeyPathAcc) ->
- [];
-get_all_leafs_full_simple(Pos, [{KeyId, Value, []} | RestTree], KeyPathAcc) ->
- [{Pos, [{KeyId, Value} | KeyPathAcc]} | get_all_leafs_full_simple(Pos, RestTree, KeyPathAcc)];
-get_all_leafs_full_simple(Pos, [{KeyId, Value, SubTree} | RestTree], KeyPathAcc) ->
- get_all_leafs_full_simple(Pos + 1, SubTree, [{KeyId, Value} | KeyPathAcc]) ++ get_all_leafs_full_simple(Pos, RestTree, KeyPathAcc).
-
-get_all_leafs(Trees) ->
- get_all_leafs(Trees, []).
-
-get_all_leafs([], Acc) ->
- Acc;
-get_all_leafs([{Pos, Tree}|Rest], Acc) ->
- get_all_leafs(Rest, get_all_leafs_simple(Pos, [Tree], []) ++ Acc).
-
-get_all_leafs_simple(_Pos, [], _KeyPathAcc) ->
- [];
-get_all_leafs_simple(Pos, [{KeyId, Value, []} | RestTree], KeyPathAcc) ->
- [{Value, {Pos, [KeyId | KeyPathAcc]}} | get_all_leafs_simple(Pos, RestTree, KeyPathAcc)];
-get_all_leafs_simple(Pos, [{KeyId, _Value, SubTree} | RestTree], KeyPathAcc) ->
- get_all_leafs_simple(Pos + 1, SubTree, [KeyId | KeyPathAcc]) ++ get_all_leafs_simple(Pos, RestTree, KeyPathAcc).
-
-
-count_leafs([]) ->
- 0;
-count_leafs([{_Pos,Tree}|Rest]) ->
- count_leafs_simple([Tree]) + count_leafs(Rest).
-
-count_leafs_simple([]) ->
- 0;
-count_leafs_simple([{_Key, _Value, []} | RestTree]) ->
- 1 + count_leafs_simple(RestTree);
-count_leafs_simple([{_Key, _Value, SubTree} | RestTree]) ->
- count_leafs_simple(SubTree) + count_leafs_simple(RestTree).
-
-
-map(_Fun, []) ->
- [];
-map(Fun, [{Pos, Tree}|Rest]) ->
- case erlang:fun_info(Fun, arity) of
- {arity, 2} ->
- [NewTree] = map_simple(fun(A,B,_C) -> Fun(A,B) end, Pos, [Tree]),
- [{Pos, NewTree} | map(Fun, Rest)];
- {arity, 3} ->
- [NewTree] = map_simple(Fun, Pos, [Tree]),
- [{Pos, NewTree} | map(Fun, Rest)]
- end.
-
-map_simple(_Fun, _Pos, []) ->
- [];
-map_simple(Fun, Pos, [{Key, Value, SubTree} | RestTree]) ->
- Value2 = Fun({Pos, Key}, Value,
- if SubTree == [] -> leaf; true -> branch end),
- [{Key, Value2, map_simple(Fun, Pos + 1, SubTree)} | map_simple(Fun, Pos, RestTree)].
-
-
-map_leafs(_Fun, []) ->
- [];
-map_leafs(Fun, [{Pos, Tree}|Rest]) ->
- [NewTree] = map_leafs_simple(Fun, Pos, [Tree]),
- [{Pos, NewTree} | map_leafs(Fun, Rest)].
-
-map_leafs_simple(_Fun, _Pos, []) ->
- [];
-map_leafs_simple(Fun, Pos, [{Key, Value, []} | RestTree]) ->
- Value2 = Fun({Pos, Key}, Value),
- [{Key, Value2, []} | map_leafs_simple(Fun, Pos, RestTree)];
-map_leafs_simple(Fun, Pos, [{Key, Value, SubTree} | RestTree]) ->
- [{Key, Value, map_leafs_simple(Fun, Pos + 1, SubTree)} | map_leafs_simple(Fun, Pos, RestTree)].
-
-
-stem(Trees, Limit) ->
- % flatten each branch in a tree into a tree path
- Paths = get_all_leafs_full(Trees),
-
- Paths2 = [{Pos, lists:sublist(Path, Limit)} || {Pos, Path} <- Paths],
-
- % convert paths back to trees
- lists:foldl(
- fun({PathPos, Path},TreeAcc) ->
- [SingleTree] = lists:foldl(
- fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
- {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
- NewTrees
- end, [], Paths2).
-
-% Tests moved to test/etap/06?-*.t
-
diff --git a/1.1.x/src/couchdb/couch_log.erl b/1.1.x/src/couchdb/couch_log.erl
deleted file mode 100644
index b3d3297c..00000000
--- a/1.1.x/src/couchdb/couch_log.erl
+++ /dev/null
@@ -1,193 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_log).
--behaviour(gen_event).
-
--export([start_link/0,stop/0]).
--export([debug/2, info/2, error/2]).
--export([debug_on/0,info_on/0,get_level/0,get_level_integer/0, set_level/1]).
--export([init/1, handle_event/2, terminate/2, code_change/3, handle_info/2, handle_call/2]).
--export([read/2]).
-
--define(LEVEL_ERROR, 3).
--define(LEVEL_INFO, 2).
--define(LEVEL_DEBUG, 1).
--define(LEVEL_TMI, 0).
-
-debug(Format, Args) ->
- case debug_on() of
- false ->
- ok;
- true ->
- {ConsoleMsg, FileMsg} = get_log_messages(self(), debug, Format, Args),
- gen_event:sync_notify(error_logger, {couch_debug, ConsoleMsg, FileMsg})
- end.
-
-info(Format, Args) ->
- case info_on() of
- false ->
- ok;
- true ->
- {ConsoleMsg, FileMsg} = get_log_messages(self(), info, Format, Args),
- gen_event:sync_notify(error_logger, {couch_info, ConsoleMsg, FileMsg})
- end.
-
-error(Format, Args) ->
- {ConsoleMsg, FileMsg} = get_log_messages(self(), error, Format, Args),
- gen_event:sync_notify(error_logger, {couch_error, ConsoleMsg, FileMsg}).
-
-
-level_integer(error) -> ?LEVEL_ERROR;
-level_integer(info) -> ?LEVEL_INFO;
-level_integer(debug) -> ?LEVEL_DEBUG;
-level_integer(tmi) -> ?LEVEL_TMI;
-level_integer(_Else) -> ?LEVEL_ERROR. % anything else default to ERROR level
-
-level_atom(?LEVEL_ERROR) -> error;
-level_atom(?LEVEL_INFO) -> info;
-level_atom(?LEVEL_DEBUG) -> debug;
-level_atom(?LEVEL_TMI) -> tmi.
-
-
-start_link() ->
- couch_event_sup:start_link({local, couch_log}, error_logger, couch_log, []).
-
-stop() ->
- couch_event_sup:stop(couch_log).
-
-init([]) ->
- % read config and register for configuration changes
-
- % just stop if one of the config settings change. couch_server_sup
- % will restart us and then we will pick up the new settings.
- ok = couch_config:register(
- fun("log", "file") ->
- ?MODULE:stop();
- ("log", "level") ->
- ?MODULE:stop();
- ("log", "include_sasl") ->
- ?MODULE:stop()
- end),
-
- Filename = couch_config:get("log", "file", "couchdb.log"),
- Level = level_integer(list_to_atom(couch_config:get("log", "level", "info"))),
- Sasl = list_to_atom(couch_config:get("log", "include_sasl", "true")),
-
- case ets:info(?MODULE) of
- undefined -> ets:new(?MODULE, [named_table]);
- _ -> ok
- end,
- ets:insert(?MODULE, {level, Level}),
-
- case file:open(Filename, [append]) of
- {ok, Fd} ->
- {ok, {Fd, Level, Sasl}};
- {error, eacces} ->
- {stop, {file_permission_error, Filename}};
- Error ->
- {stop, Error}
- end.
-
-debug_on() ->
- get_level_integer() =< ?LEVEL_DEBUG.
-
-info_on() ->
- get_level_integer() =< ?LEVEL_INFO.
-
-set_level(LevelAtom) ->
- set_level_integer(level_integer(LevelAtom)).
-
-get_level() ->
- level_atom(get_level_integer()).
-
-get_level_integer() ->
- try
- ets:lookup_element(?MODULE, level, 2)
- catch error:badarg ->
- ?LEVEL_ERROR
- end.
-
-set_level_integer(Int) ->
- gen_event:call(error_logger, couch_log, {set_level_integer, Int}).
-
-handle_event({couch_error, ConMsg, FileMsg}, {Fd, _LogLevel, _Sasl}=State) ->
- log(Fd, ConMsg, FileMsg),
- {ok, State};
-handle_event({couch_info, ConMsg, FileMsg}, {Fd, LogLevel, _Sasl}=State)
-when LogLevel =< ?LEVEL_INFO ->
- log(Fd, ConMsg, FileMsg),
- {ok, State};
-handle_event({couch_debug, ConMsg, FileMsg}, {Fd, LogLevel, _Sasl}=State)
-when LogLevel =< ?LEVEL_DEBUG ->
- log(Fd, ConMsg, FileMsg),
- {ok, State};
-handle_event({error_report, _, {Pid, _, _}}=Event, {Fd, _LogLevel, Sasl}=State)
-when Sasl =/= false ->
- {ConMsg, FileMsg} = get_log_messages(Pid, error, "~p", [Event]),
- log(Fd, ConMsg, FileMsg),
- {ok, State};
-handle_event({error, _, {Pid, Format, Args}}, {Fd, _LogLevel, Sasl}=State)
-when Sasl =/= false ->
- {ConMsg, FileMsg} = get_log_messages(Pid, error, Format, Args),
- log(Fd, ConMsg, FileMsg),
- {ok, State};
-handle_event({_, _, {Pid, _, _}}=Event, {Fd, LogLevel, _Sasl}=State)
-when LogLevel =< ?LEVEL_TMI ->
- % log every remaining event if tmi!
- log(Fd, Pid, tmi, "~p", [Event]),
- {ok, State};
-handle_event(_Event, State) ->
- {ok, State}.
-
-handle_call({set_level_integer, NewLevel}, {Fd, _LogLevel, Sasl}) ->
- ets:insert(?MODULE, {level, NewLevel}),
- {ok, ok, {Fd, NewLevel, Sasl}}.
-
-handle_info(_Info, State) ->
- {ok, State}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-terminate(_Arg, {Fd, _LoggingLevel, _Sasl}) ->
- file:close(Fd).
-
-log(Fd, Pid, Level, Format, Args) ->
- Msg = io_lib:format(Format, Args),
- ok = io:format("[~s] [~p] ~s~n", [Level, Pid, Msg]), % dump to console too
- Msg2 = re:replace(lists:flatten(Msg),"\\r\\n|\\r|\\n", "\r\n",
- [global, {return, list}]),
- ok = io:format(Fd, "[~s] [~s] [~p] ~s\r~n", [httpd_util:rfc1123_date(), Level, Pid, Msg2]).
-
-log(Fd, ConsoleMsg, FileMsg) ->
- ok = io:put_chars(ConsoleMsg),
- ok = io:put_chars(Fd, FileMsg).
-
-get_log_messages(Pid, Level, Format, Args) ->
- ConsoleMsg = unicode:characters_to_binary(io_lib:format(
- "[~s] [~p] " ++ Format ++ "~n", [Level, Pid | Args])),
- FileMsg = ["[", httpd_util:rfc1123_date(), "] ", ConsoleMsg],
- {ConsoleMsg, iolist_to_binary(FileMsg)}.
-
-read(Bytes, Offset) ->
- LogFileName = couch_config:get("log", "file"),
- LogFileSize = filelib:file_size(LogFileName),
-
- {ok, Fd} = file:open(LogFileName, [read]),
- Start = lists:max([LogFileSize - Bytes, 0]) + Offset,
-
- % TODO: truncate chopped first line
- % TODO: make streaming
-
- {ok, Chunk} = file:pread(Fd, Start, LogFileSize),
- Chunk.
diff --git a/1.1.x/src/couchdb/couch_native_process.erl b/1.1.x/src/couchdb/couch_native_process.erl
deleted file mode 100644
index b512f712..00000000
--- a/1.1.x/src/couchdb/couch_native_process.erl
+++ /dev/null
@@ -1,402 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License");
-% you may not use this file except in compliance with the License.
-%
-% You may obtain a copy of the License at
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-% either express or implied.
-%
-% See the License for the specific language governing permissions
-% and limitations under the License.
-%
-% This file drew much inspiration from erlview, which was written by and
-% copyright Michael McDaniel [http://autosys.us], and is also under APL 2.0
-%
-%
-% This module provides the smallest possible native view-server.
-% With this module in-place, you can add the following to your couch INI files:
-% [native_query_servers]
-% erlang={couch_native_process, start_link, []}
-%
-% Which will then allow following example map function to be used:
-%
-% fun({Doc}) ->
-% % Below, we emit a single record - the _id as key, null as value
-% DocId = couch_util:get_value(Doc, <<"_id">>, null),
-% Emit(DocId, null)
-% end.
-%
-% which should be roughly the same as the javascript:
-% emit(doc._id, null);
-%
-% This module exposes enough functions such that a native erlang server can
-% act as a fully-fleged view server, but no 'helper' functions specifically
-% for simplifying your erlang view code. It is expected other third-party
-% extensions will evolve which offer useful layers on top of this view server
-% to help simplify your view code.
--module(couch_native_process).
--behaviour(gen_server).
-
--export([start_link/0,init/1,terminate/2,handle_call/3,handle_cast/2,code_change/3,
- handle_info/2]).
--export([set_timeout/2, prompt/2]).
-
--define(STATE, native_proc_state).
--record(evstate, {ddocs, funs=[], query_config=[], list_pid=nil, timeout=5000}).
-
--include("couch_db.hrl").
-
-start_link() ->
- gen_server:start_link(?MODULE, [], []).
-
-% this is a bit messy, see also couch_query_servers handle_info
-% stop(_Pid) ->
-% ok.
-
-set_timeout(Pid, TimeOut) ->
- gen_server:call(Pid, {set_timeout, TimeOut}).
-
-prompt(Pid, Data) when is_list(Data) ->
- gen_server:call(Pid, {prompt, Data}).
-
-% gen_server callbacks
-init([]) ->
- {ok, #evstate{ddocs=dict:new()}}.
-
-handle_call({set_timeout, TimeOut}, _From, State) ->
- {reply, ok, State#evstate{timeout=TimeOut}};
-
-handle_call({prompt, Data}, _From, State) ->
- ?LOG_DEBUG("Prompt native qs: ~s",[?JSON_ENCODE(Data)]),
- {NewState, Resp} = try run(State, to_binary(Data)) of
- {S, R} -> {S, R}
- catch
- throw:{error, Why} ->
- {State, [<<"error">>, Why, Why]}
- end,
-
- case Resp of
- {error, Reason} ->
- Msg = io_lib:format("couch native server error: ~p", [Reason]),
- {reply, [<<"error">>, <<"native_query_server">>, list_to_binary(Msg)], NewState};
- [<<"error">> | Rest] ->
- % Msg = io_lib:format("couch native server error: ~p", [Rest]),
- % TODO: markh? (jan)
- {reply, [<<"error">> | Rest], NewState};
- [<<"fatal">> | Rest] ->
- % Msg = io_lib:format("couch native server error: ~p", [Rest]),
- % TODO: markh? (jan)
- {stop, fatal, [<<"error">> | Rest], NewState};
- Resp ->
- {reply, Resp, NewState}
- end.
-
-handle_cast(foo, State) -> {noreply, State}.
-handle_info({'EXIT',_,normal}, State) -> {noreply, State};
-handle_info({'EXIT',_,Reason}, State) ->
- {stop, Reason, State}.
-terminate(_Reason, _State) -> ok.
-code_change(_OldVersion, State, _Extra) -> {ok, State}.
-
-run(#evstate{list_pid=Pid}=State, [<<"list_row">>, Row]) when is_pid(Pid) ->
- Pid ! {self(), list_row, Row},
- receive
- {Pid, chunks, Data} ->
- {State, [<<"chunks">>, Data]};
- {Pid, list_end, Data} ->
- receive
- {'EXIT', Pid, normal} -> ok
- after State#evstate.timeout ->
- throw({timeout, list_cleanup})
- end,
- process_flag(trap_exit, erlang:get(do_trap)),
- {State#evstate{list_pid=nil}, [<<"end">>, Data]}
- after State#evstate.timeout ->
- throw({timeout, list_row})
- end;
-run(#evstate{list_pid=Pid}=State, [<<"list_end">>]) when is_pid(Pid) ->
- Pid ! {self(), list_end},
- Resp =
- receive
- {Pid, list_end, Data} ->
- receive
- {'EXIT', Pid, normal} -> ok
- after State#evstate.timeout ->
- throw({timeout, list_cleanup})
- end,
- [<<"end">>, Data]
- after State#evstate.timeout ->
- throw({timeout, list_end})
- end,
- process_flag(trap_exit, erlang:get(do_trap)),
- {State#evstate{list_pid=nil}, Resp};
-run(#evstate{list_pid=Pid}=State, _Command) when is_pid(Pid) ->
- {State, [<<"error">>, list_error, list_error]};
-run(#evstate{ddocs=DDocs}, [<<"reset">>]) ->
- {#evstate{ddocs=DDocs}, true};
-run(#evstate{ddocs=DDocs}, [<<"reset">>, QueryConfig]) ->
- {#evstate{ddocs=DDocs, query_config=QueryConfig}, true};
-run(#evstate{funs=Funs}=State, [<<"add_fun">> , BinFunc]) ->
- FunInfo = makefun(State, BinFunc),
- {State#evstate{funs=Funs ++ [FunInfo]}, true};
-run(State, [<<"map_doc">> , Doc]) ->
- Resp = lists:map(fun({Sig, Fun}) ->
- erlang:put(Sig, []),
- Fun(Doc),
- lists:reverse(erlang:get(Sig))
- end, State#evstate.funs),
- {State, Resp};
-run(State, [<<"reduce">>, Funs, KVs]) ->
- {Keys, Vals} =
- lists:foldl(fun([K, V], {KAcc, VAcc}) ->
- {[K | KAcc], [V | VAcc]}
- end, {[], []}, KVs),
- Keys2 = lists:reverse(Keys),
- Vals2 = lists:reverse(Vals),
- {State, catch reduce(State, Funs, Keys2, Vals2, false)};
-run(State, [<<"rereduce">>, Funs, Vals]) ->
- {State, catch reduce(State, Funs, null, Vals, true)};
-run(#evstate{ddocs=DDocs}=State, [<<"ddoc">>, <<"new">>, DDocId, DDoc]) ->
- DDocs2 = store_ddoc(DDocs, DDocId, DDoc),
- {State#evstate{ddocs=DDocs2}, true};
-run(#evstate{ddocs=DDocs}=State, [<<"ddoc">>, DDocId | Rest]) ->
- DDoc = load_ddoc(DDocs, DDocId),
- ddoc(State, DDoc, Rest);
-run(_, Unknown) ->
- ?LOG_ERROR("Native Process: Unknown command: ~p~n", [Unknown]),
- throw({error, unknown_command}).
-
-ddoc(State, {DDoc}, [FunPath, Args]) ->
- % load fun from the FunPath
- BFun = lists:foldl(fun
- (Key, {Props}) when is_list(Props) ->
- couch_util:get_value(Key, Props, nil);
- (_Key, Fun) when is_binary(Fun) ->
- Fun;
- (_Key, nil) ->
- throw({error, not_found});
- (_Key, _Fun) ->
- throw({error, malformed_ddoc})
- end, {DDoc}, FunPath),
- ddoc(State, makefun(State, BFun, {DDoc}), FunPath, Args).
-
-ddoc(State, {_, Fun}, [<<"validate_doc_update">>], Args) ->
- {State, (catch apply(Fun, Args))};
-ddoc(State, {_, Fun}, [<<"filters">>|_], [Docs, Req]) ->
- Resp = lists:map(fun(Doc) -> (catch Fun(Doc, Req)) =:= true end, Docs),
- {State, [true, Resp]};
-ddoc(State, {_, Fun}, [<<"shows">>|_], Args) ->
- Resp = case (catch apply(Fun, Args)) of
- FunResp when is_list(FunResp) ->
- FunResp;
- {FunResp} ->
- [<<"resp">>, {FunResp}];
- FunResp ->
- FunResp
- end,
- {State, Resp};
-ddoc(State, {_, Fun}, [<<"updates">>|_], Args) ->
- Resp = case (catch apply(Fun, Args)) of
- [JsonDoc, JsonResp] ->
- [<<"up">>, JsonDoc, JsonResp]
- end,
- {State, Resp};
-ddoc(State, {Sig, Fun}, [<<"lists">>|_], Args) ->
- Self = self(),
- SpawnFun = fun() ->
- LastChunk = (catch apply(Fun, Args)),
- case start_list_resp(Self, Sig) of
- started ->
- receive
- {Self, list_row, _Row} -> ignore;
- {Self, list_end} -> ignore
- after State#evstate.timeout ->
- throw({timeout, list_cleanup_pid})
- end;
- _ ->
- ok
- end,
- LastChunks =
- case erlang:get(Sig) of
- undefined -> [LastChunk];
- OtherChunks -> [LastChunk | OtherChunks]
- end,
- Self ! {self(), list_end, lists:reverse(LastChunks)}
- end,
- erlang:put(do_trap, process_flag(trap_exit, true)),
- Pid = spawn_link(SpawnFun),
- Resp =
- receive
- {Pid, start, Chunks, JsonResp} ->
- [<<"start">>, Chunks, JsonResp]
- after State#evstate.timeout ->
- throw({timeout, list_start})
- end,
- {State#evstate{list_pid=Pid}, Resp}.
-
-store_ddoc(DDocs, DDocId, DDoc) ->
- dict:store(DDocId, DDoc, DDocs).
-load_ddoc(DDocs, DDocId) ->
- try dict:fetch(DDocId, DDocs) of
- {DDoc} -> {DDoc}
- catch
- _:_Else -> throw({error, ?l2b(io_lib:format("Native Query Server missing DDoc with Id: ~s",[DDocId]))})
- end.
-
-bindings(State, Sig) ->
- bindings(State, Sig, nil).
-bindings(State, Sig, DDoc) ->
- Self = self(),
-
- Log = fun(Msg) ->
- ?LOG_INFO(Msg, [])
- end,
-
- Emit = fun(Id, Value) ->
- Curr = erlang:get(Sig),
- erlang:put(Sig, [[Id, Value] | Curr])
- end,
-
- Start = fun(Headers) ->
- erlang:put(list_headers, Headers)
- end,
-
- Send = fun(Chunk) ->
- Curr =
- case erlang:get(Sig) of
- undefined -> [];
- Else -> Else
- end,
- erlang:put(Sig, [Chunk | Curr])
- end,
-
- GetRow = fun() ->
- case start_list_resp(Self, Sig) of
- started ->
- ok;
- _ ->
- Chunks =
- case erlang:get(Sig) of
- undefined -> [];
- CurrChunks -> CurrChunks
- end,
- Self ! {self(), chunks, lists:reverse(Chunks)}
- end,
- erlang:put(Sig, []),
- receive
- {Self, list_row, Row} -> Row;
- {Self, list_end} -> nil
- after State#evstate.timeout ->
- throw({timeout, list_pid_getrow})
- end
- end,
-
- FoldRows = fun(Fun, Acc) -> foldrows(GetRow, Fun, Acc) end,
-
- Bindings = [
- {'Log', Log},
- {'Emit', Emit},
- {'Start', Start},
- {'Send', Send},
- {'GetRow', GetRow},
- {'FoldRows', FoldRows}
- ],
- case DDoc of
- {_Props} ->
- Bindings ++ [{'DDoc', DDoc}];
- _Else -> Bindings
- end.
-
-% thanks to erlview, via:
-% http://erlang.org/pipermail/erlang-questions/2003-November/010544.html
-makefun(State, Source) ->
- Sig = couch_util:md5(Source),
- BindFuns = bindings(State, Sig),
- {Sig, makefun(State, Source, BindFuns)}.
-makefun(State, Source, {DDoc}) ->
- Sig = couch_util:md5(lists:flatten([Source, term_to_binary(DDoc)])),
- BindFuns = bindings(State, Sig, {DDoc}),
- {Sig, makefun(State, Source, BindFuns)};
-makefun(_State, Source, BindFuns) when is_list(BindFuns) ->
- FunStr = binary_to_list(Source),
- {ok, Tokens, _} = erl_scan:string(FunStr),
- Form = case (catch erl_parse:parse_exprs(Tokens)) of
- {ok, [ParsedForm]} ->
- ParsedForm;
- {error, {LineNum, _Mod, [Mesg, Params]}}=Error ->
- io:format(standard_error, "Syntax error on line: ~p~n", [LineNum]),
- io:format(standard_error, "~s~p~n", [Mesg, Params]),
- throw(Error)
- end,
- Bindings = lists:foldl(fun({Name, Fun}, Acc) ->
- erl_eval:add_binding(Name, Fun, Acc)
- end, erl_eval:new_bindings(), BindFuns),
- {value, Fun, _} = erl_eval:expr(Form, Bindings),
- Fun.
-
-reduce(State, BinFuns, Keys, Vals, ReReduce) ->
- Funs = case is_list(BinFuns) of
- true ->
- lists:map(fun(BF) -> makefun(State, BF) end, BinFuns);
- _ ->
- [makefun(State, BinFuns)]
- end,
- Reds = lists:map(fun({_Sig, Fun}) ->
- Fun(Keys, Vals, ReReduce)
- end, Funs),
- [true, Reds].
-
-foldrows(GetRow, ProcRow, Acc) ->
- case GetRow() of
- nil ->
- {ok, Acc};
- Row ->
- case (catch ProcRow(Row, Acc)) of
- {ok, Acc2} ->
- foldrows(GetRow, ProcRow, Acc2);
- {stop, Acc2} ->
- {ok, Acc2}
- end
- end.
-
-start_list_resp(Self, Sig) ->
- case erlang:get(list_started) of
- undefined ->
- Headers =
- case erlang:get(list_headers) of
- undefined -> {[{<<"headers">>, {[]}}]};
- CurrHdrs -> CurrHdrs
- end,
- Chunks =
- case erlang:get(Sig) of
- undefined -> [];
- CurrChunks -> CurrChunks
- end,
- Self ! {self(), start, lists:reverse(Chunks), Headers},
- erlang:put(list_started, true),
- erlang:put(Sig, []),
- started;
- _ ->
- ok
- end.
-
-to_binary({Data}) ->
- Pred = fun({Key, Value}) ->
- {to_binary(Key), to_binary(Value)}
- end,
- {lists:map(Pred, Data)};
-to_binary(Data) when is_list(Data) ->
- [to_binary(D) || D <- Data];
-to_binary(null) ->
- null;
-to_binary(true) ->
- true;
-to_binary(false) ->
- false;
-to_binary(Data) when is_atom(Data) ->
- list_to_binary(atom_to_list(Data));
-to_binary(Data) ->
- Data.
diff --git a/1.1.x/src/couchdb/couch_os_daemons.erl b/1.1.x/src/couchdb/couch_os_daemons.erl
deleted file mode 100644
index d03f550c..00000000
--- a/1.1.x/src/couchdb/couch_os_daemons.erl
+++ /dev/null
@@ -1,364 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
--module(couch_os_daemons).
--behaviour(gen_server).
-
--export([start_link/0, info/0, info/1, config_change/2]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--include("couch_db.hrl").
-
--record(daemon, {
- port,
- name,
- cmd,
- kill,
- status=running,
- cfg_patterns=[],
- errors=[],
- buf=[]
-}).
-
--define(PORT_OPTIONS, [stream, {line, 1024}, binary, exit_status, hide]).
--define(TIMEOUT, 5000).
-
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-info() ->
- info([]).
-
-info(Options) ->
- gen_server:call(?MODULE, {daemon_info, Options}).
-
-config_change(Section, Key) ->
- gen_server:cast(?MODULE, {config_change, Section, Key}).
-
-init(_) ->
- process_flag(trap_exit, true),
- ok = couch_config:register(fun couch_os_daemons:config_change/2),
- Table = ets:new(?MODULE, [protected, set, {keypos, #daemon.port}]),
- reload_daemons(Table),
- {ok, Table}.
-
-terminate(_Reason, Table) ->
- [stop_port(D) || D <- ets:tab2list(Table)],
- ok.
-
-handle_call({daemon_info, Options}, _From, Table) when is_list(Options) ->
- case lists:member(table, Options) of
- true ->
- {reply, {ok, ets:tab2list(Table)}, Table};
- _ ->
- {reply, {ok, Table}, Table}
- end;
-handle_call(Msg, From, Table) ->
- ?LOG_ERROR("Unknown call message to ~p from ~p: ~p", [?MODULE, From, Msg]),
- {stop, error, Table}.
-
-handle_cast({config_change, Sect, Key}, Table) ->
- restart_daemons(Table, Sect, Key),
- case Sect of
- "os_daemons" -> reload_daemons(Table);
- _ -> ok
- end,
- {noreply, Table};
-handle_cast(stop, Table) ->
- {stop, normal, Table};
-handle_cast(Msg, Table) ->
- ?LOG_ERROR("Unknown cast message to ~p: ~p", [?MODULE, Msg]),
- {stop, error, Table}.
-
-handle_info({'EXIT', Port, Reason}, Table) ->
- case ets:lookup(Table, Port) of
- [] ->
- ?LOG_INFO("Port ~p exited after stopping: ~p~n", [Port, Reason]);
- [#daemon{status=stopping}] ->
- true = ets:delete(Table, Port);
- [#daemon{name=Name, status=restarting}=D] ->
- ?LOG_INFO("Daemon ~P restarting after config change.", [Name]),
- true = ets:delete(Table, Port),
- {ok, Port2} = start_port(D#daemon.cmd),
- true = ets:insert(Table, D#daemon{
- port=Port2, status=running, kill=undefined, buf=[]
- });
- [#daemon{name=Name, status=halted}] ->
- ?LOG_ERROR("Halted daemon process: ~p", [Name]);
- [D] ->
- ?LOG_ERROR("Invalid port state at exit: ~p", [D])
- end,
- {noreply, Table};
-handle_info({Port, closed}, Table) ->
- handle_info({Port, {exit_status, closed}}, Table);
-handle_info({Port, {exit_status, Status}}, Table) ->
- case ets:lookup(Table, Port) of
- [] ->
- ?LOG_ERROR("Unknown port ~p exiting ~p", [Port, Status]),
- {stop, {error, unknown_port_died, Status}, Table};
- [#daemon{name=Name, status=restarting}=D] ->
- ?LOG_INFO("Daemon ~P restarting after config change.", [Name]),
- true = ets:delete(Table, Port),
- {ok, Port2} = start_port(D#daemon.cmd),
- true = ets:insert(Table, D#daemon{
- port=Port2, status=running, kill=undefined, buf=[]
- }),
- {noreply, Table};
- [#daemon{status=stopping}=D] ->
- % The configuration changed and this daemon is no
- % longer needed.
- ?LOG_DEBUG("Port ~p shut down.", [D#daemon.name]),
- true = ets:delete(Table, Port),
- {noreply, Table};
- [D] ->
- % Port died for unknown reason. Check to see if it's
- % died too many times or if we should boot it back up.
- case should_halt([now() | D#daemon.errors]) of
- {true, _} ->
- % Halting the process. We won't try and reboot
- % until the configuration changes.
- Fmt = "Daemon ~p halted with exit_status ~p",
- ?LOG_ERROR(Fmt, [D#daemon.name, Status]),
- D2 = D#daemon{status=halted, errors=nil, buf=nil},
- true = ets:insert(Table, D2),
- {noreply, Table};
- {false, Errors} ->
- % We're guessing it was a random error, this daemon
- % has behaved so we'll give it another chance.
- Fmt = "Daemon ~p is being rebooted after exit_status ~p",
- ?LOG_INFO(Fmt, [D#daemon.name, Status]),
- true = ets:delete(Table, Port),
- {ok, Port2} = start_port(D#daemon.cmd),
- true = ets:insert(Table, D#daemon{
- port=Port2, status=running, kill=undefined,
- errors=Errors, buf=[]
- }),
- {noreply, Table}
- end;
- _Else ->
- throw(error)
- end;
-handle_info({Port, {data, {noeol, Data}}}, Table) ->
- [#daemon{buf=Buf}=D] = ets:lookup(Table, Port),
- true = ets:insert(Table, D#daemon{buf=[Data | Buf]}),
- {noreply, Table};
-handle_info({Port, {data, {eol, Data}}}, Table) ->
- [#daemon{buf=Buf}=D] = ets:lookup(Table, Port),
- Line = lists:reverse(Buf, Data),
- % The first line echoed back is the kill command
- % for when we go to get rid of the port. Lines after
- % that are considered part of the stdio API.
- case D#daemon.kill of
- undefined ->
- true = ets:insert(Table, D#daemon{kill=?b2l(Line), buf=[]});
- _Else ->
- D2 = case (catch ?JSON_DECODE(Line)) of
- {invalid_json, Rejected} ->
- ?LOG_ERROR("Ignoring OS daemon request: ~p", [Rejected]),
- D;
- JSON ->
- {ok, D3} = handle_port_message(D, JSON),
- D3
- end,
- true = ets:insert(Table, D2#daemon{buf=[]})
- end,
- {noreply, Table};
-handle_info({Port, Error}, Table) ->
- ?LOG_ERROR("Unexpectd message from port ~p: ~p", [Port, Error]),
- stop_port(Port),
- [D] = ets:lookup(Table, Port),
- true = ets:insert(Table, D#daemon{status=restarting, buf=nil}),
- {noreply, Table};
-handle_info(Msg, Table) ->
- ?LOG_ERROR("Unexpected info message to ~p: ~p", [?MODULE, Msg]),
- {stop, error, Table}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-% Internal API
-
-%
-% Port management helpers
-%
-
-start_port(Command) ->
- PrivDir = couch_util:priv_dir(),
- Spawnkiller = filename:join(PrivDir, "couchspawnkillable"),
- Port = open_port({spawn, Spawnkiller ++ " " ++ Command}, ?PORT_OPTIONS),
- {ok, Port}.
-
-
-stop_port(#daemon{port=Port, kill=undefined}=D) ->
- ?LOG_ERROR("Stopping daemon without a kill command: ~p", [D#daemon.name]),
- catch port_close(Port);
-stop_port(#daemon{port=Port}=D) ->
- ?LOG_DEBUG("Stopping daemon: ~p", [D#daemon.name]),
- os:cmd(D#daemon.kill),
- catch port_close(Port).
-
-
-handle_port_message(#daemon{port=Port}=Daemon, [<<"get">>, Section]) ->
- KVs = couch_config:get(Section),
- Data = lists:map(fun({K, V}) -> {?l2b(K), ?l2b(V)} end, KVs),
- Json = iolist_to_binary(?JSON_ENCODE({Data})),
- port_command(Port, <<Json/binary, "\n">>),
- {ok, Daemon};
-handle_port_message(#daemon{port=Port}=Daemon, [<<"get">>, Section, Key]) ->
- Value = case couch_config:get(Section, Key, null) of
- null -> null;
- String -> ?l2b(String)
- end,
- Json = iolist_to_binary(?JSON_ENCODE(Value)),
- port_command(Port, <<Json/binary, "\n">>),
- {ok, Daemon};
-handle_port_message(Daemon, [<<"register">>, Sec]) when is_binary(Sec) ->
- Patterns = lists:usort(Daemon#daemon.cfg_patterns ++ [{?b2l(Sec)}]),
- {ok, Daemon#daemon{cfg_patterns=Patterns}};
-handle_port_message(Daemon, [<<"register">>, Sec, Key])
- when is_binary(Sec) andalso is_binary(Key) ->
- Pattern = {?b2l(Sec), ?b2l(Key)},
- Patterns = lists:usort(Daemon#daemon.cfg_patterns ++ [Pattern]),
- {ok, Daemon#daemon{cfg_patterns=Patterns}};
-handle_port_message(#daemon{name=Name}=Daemon, [<<"log">>, Msg]) ->
- handle_log_message(Name, Msg, <<"info">>),
- {ok, Daemon};
-handle_port_message(#daemon{name=Name}=Daemon, [<<"log">>, Msg, {Opts}]) ->
- Level = couch_util:get_value(<<"level">>, Opts, <<"info">>),
- handle_log_message(Name, Msg, Level),
- {ok, Daemon};
-handle_port_message(#daemon{name=Name}=Daemon, Else) ->
- ?LOG_ERROR("Daemon ~p made invalid request: ~p", [Name, Else]),
- {ok, Daemon}.
-
-
-handle_log_message(Name, Msg, _Level) when not is_binary(Msg) ->
- ?LOG_ERROR("Invalid log message from daemon ~p: ~p", [Name, Msg]);
-handle_log_message(Name, Msg, <<"debug">>) ->
- ?LOG_DEBUG("Daemon ~p :: ~s", [Name, ?b2l(Msg)]);
-handle_log_message(Name, Msg, <<"info">>) ->
- ?LOG_INFO("Daemon ~p :: ~s", [Name, ?b2l(Msg)]);
-handle_log_message(Name, Msg, <<"error">>) ->
- ?LOG_ERROR("Daemon: ~p :: ~s", [Name, ?b2l(Msg)]);
-handle_log_message(Name, Msg, Level) ->
- ?LOG_ERROR("Invalid log level from daemon: ~p", [Level]),
- ?LOG_INFO("Daemon: ~p :: ~s", [Name, ?b2l(Msg)]).
-
-%
-% Daemon management helpers
-%
-
-reload_daemons(Table) ->
- % List of daemons we want to have running.
- Configured = lists:sort(couch_config:get("os_daemons")),
-
- % Remove records for daemons that were halted.
- MSpecHalted = #daemon{name='$1', cmd='$2', status=halted, _='_'},
- Halted = lists:sort([{N, C} || [N, C] <- ets:match(Table, MSpecHalted)]),
- ok = stop_os_daemons(Table, find_to_stop(Configured, Halted, [])),
-
- % Stop daemons that are running
- % Start newly configured daemons
- MSpecRunning = #daemon{name='$1', cmd='$2', status=running, _='_'},
- Running = lists:sort([{N, C} || [N, C] <- ets:match(Table, MSpecRunning)]),
- ok = stop_os_daemons(Table, find_to_stop(Configured, Running, [])),
- ok = boot_os_daemons(Table, find_to_boot(Configured, Running, [])),
- ok.
-
-
-restart_daemons(Table, Sect, Key) ->
- restart_daemons(Table, Sect, Key, ets:first(Table)).
-
-restart_daemons(_, _, _, '$end_of_table') ->
- ok;
-restart_daemons(Table, Sect, Key, Port) ->
- [D] = ets:lookup(Table, Port),
- HasSect = lists:member({Sect}, D#daemon.cfg_patterns),
- HasKey = lists:member({Sect, Key}, D#daemon.cfg_patterns),
- case HasSect or HasKey of
- true ->
- stop_port(D),
- D2 = D#daemon{status=restarting, buf=nil},
- true = ets:insert(Table, D2);
- _ ->
- ok
- end,
- restart_daemons(Table, Sect, Key, ets:next(Table, Port)).
-
-
-stop_os_daemons(_Table, []) ->
- ok;
-stop_os_daemons(Table, [{Name, Cmd} | Rest]) ->
- [[Port]] = ets:match(Table, #daemon{port='$1', name=Name, cmd=Cmd, _='_'}),
- [D] = ets:lookup(Table, Port),
- case D#daemon.status of
- halted ->
- ets:delete(Table, Port);
- _ ->
- stop_port(D),
- D2 = D#daemon{status=stopping, errors=nil, buf=nil},
- true = ets:insert(Table, D2)
- end,
- stop_os_daemons(Table, Rest).
-
-boot_os_daemons(_Table, []) ->
- ok;
-boot_os_daemons(Table, [{Name, Cmd} | Rest]) ->
- {ok, Port} = start_port(Cmd),
- true = ets:insert(Table, #daemon{port=Port, name=Name, cmd=Cmd}),
- boot_os_daemons(Table, Rest).
-
-% Elements unique to the configured set need to be booted.
-find_to_boot([], _Rest, Acc) ->
- % Nothing else configured.
- Acc;
-find_to_boot([D | R1], [D | R2], Acc) ->
- % Elements are equal, daemon already running.
- find_to_boot(R1, R2, Acc);
-find_to_boot([D1 | R1], [D2 | _]=A2, Acc) when D1 < D2 ->
- find_to_boot(R1, A2, [D1 | Acc]);
-find_to_boot(A1, [_ | R2], Acc) ->
- find_to_boot(A1, R2, Acc);
-find_to_boot(Rest, [], Acc) ->
- % No more candidates for already running. Boot all.
- Rest ++ Acc.
-
-% Elements unique to the running set need to be killed.
-find_to_stop([], Rest, Acc) ->
- % The rest haven't been found, so they must all
- % be ready to die.
- Rest ++ Acc;
-find_to_stop([D | R1], [D | R2], Acc) ->
- % Elements are equal, daemon already running.
- find_to_stop(R1, R2, Acc);
-find_to_stop([D1 | R1], [D2 | _]=A2, Acc) when D1 < D2 ->
- find_to_stop(R1, A2, Acc);
-find_to_stop(A1, [D2 | R2], Acc) ->
- find_to_stop(A1, R2, [D2 | Acc]);
-find_to_stop(_, [], Acc) ->
- % No more running daemons to worry about.
- Acc.
-
-should_halt(Errors) ->
- RetryTimeCfg = couch_config:get("os_daemon_settings", "retry_time", "5"),
- RetryTime = list_to_integer(RetryTimeCfg),
-
- Now = now(),
- RecentErrors = lists:filter(fun(Time) ->
- timer:now_diff(Now, Time) =< RetryTime * 1000000
- end, Errors),
-
- RetryCfg = couch_config:get("os_daemon_settings", "max_retries", "3"),
- Retries = list_to_integer(RetryCfg),
-
- {length(RecentErrors) >= Retries, RecentErrors}.
diff --git a/1.1.x/src/couchdb/couch_os_process.erl b/1.1.x/src/couchdb/couch_os_process.erl
deleted file mode 100644
index 5776776b..00000000
--- a/1.1.x/src/couchdb/couch_os_process.erl
+++ /dev/null
@@ -1,185 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_os_process).
--behaviour(gen_server).
-
--export([start_link/1, start_link/2, start_link/3, stop/1]).
--export([set_timeout/2, prompt/2]).
--export([send/2, writeline/2, readline/1, writejson/2, readjson/1]).
--export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, code_change/3]).
-
--include("couch_db.hrl").
-
--define(PORT_OPTIONS, [stream, {line, 1024}, binary, exit_status, hide]).
-
--record(os_proc,
- {command,
- port,
- writer,
- reader,
- timeout=5000
- }).
-
-start_link(Command) ->
- start_link(Command, []).
-start_link(Command, Options) ->
- start_link(Command, Options, ?PORT_OPTIONS).
-start_link(Command, Options, PortOptions) ->
- gen_server:start_link(couch_os_process, [Command, Options, PortOptions], []).
-
-stop(Pid) ->
- gen_server:cast(Pid, stop).
-
-% Read/Write API
-set_timeout(Pid, TimeOut) when is_integer(TimeOut) ->
- ok = gen_server:call(Pid, {set_timeout, TimeOut}).
-
-% Used by couch_db_update_notifier.erl
-send(Pid, Data) ->
- gen_server:cast(Pid, {send, Data}).
-
-prompt(Pid, Data) ->
- case gen_server:call(Pid, {prompt, Data}, infinity) of
- {ok, Result} ->
- Result;
- Error ->
- ?LOG_ERROR("OS Process Error ~p :: ~p",[Pid,Error]),
- throw(Error)
- end.
-
-% Utility functions for reading and writing
-% in custom functions
-writeline(OsProc, Data) when is_record(OsProc, os_proc) ->
- port_command(OsProc#os_proc.port, Data ++ "\n").
-
-readline(#os_proc{} = OsProc) ->
- readline(OsProc, []).
-readline(#os_proc{port = Port} = OsProc, Acc) ->
- receive
- {Port, {data, {noeol, Data}}} ->
- readline(OsProc, [Data|Acc]);
- {Port, {data, {eol, Data}}} ->
- lists:reverse(Acc, Data);
- {Port, Err} ->
- catch port_close(Port),
- throw({os_process_error, Err})
- after OsProc#os_proc.timeout ->
- catch port_close(Port),
- throw({os_process_error, "OS process timed out."})
- end.
-
-% Standard JSON functions
-writejson(OsProc, Data) when is_record(OsProc, os_proc) ->
- JsonData = ?JSON_ENCODE(Data),
- ?LOG_DEBUG("OS Process ~p Input :: ~s", [OsProc#os_proc.port, JsonData]),
- true = writeline(OsProc, JsonData).
-
-readjson(OsProc) when is_record(OsProc, os_proc) ->
- Line = readline(OsProc),
- ?LOG_DEBUG("OS Process ~p Output :: ~s", [OsProc#os_proc.port, Line]),
- case ?JSON_DECODE(Line) of
- [<<"log">>, Msg] when is_binary(Msg) ->
- % we got a message to log. Log it and continue
- ?LOG_INFO("OS Process ~p Log :: ~s", [OsProc#os_proc.port, Msg]),
- readjson(OsProc);
- [<<"error">>, Id, Reason] ->
- throw({couch_util:to_existing_atom(Id),Reason});
- [<<"fatal">>, Id, Reason] ->
- ?LOG_INFO("OS Process ~p Fatal Error :: ~s ~p",[OsProc#os_proc.port, Id, Reason]),
- throw({couch_util:to_existing_atom(Id),Reason});
- Result ->
- Result
- end.
-
-
-% gen_server API
-init([Command, Options, PortOptions]) ->
- process_flag(trap_exit, true),
- PrivDir = couch_util:priv_dir(),
- Spawnkiller = filename:join(PrivDir, "couchspawnkillable"),
- BaseProc = #os_proc{
- command=Command,
- port=open_port({spawn, Spawnkiller ++ " " ++ Command}, PortOptions),
- writer=fun writejson/2,
- reader=fun readjson/1
- },
- KillCmd = readline(BaseProc),
- Pid = self(),
- ?LOG_DEBUG("OS Process Start :: ~p", [BaseProc#os_proc.port]),
- spawn(fun() ->
- % this ensure the real os process is killed when this process dies.
- erlang:monitor(process, Pid),
- receive _ -> ok end,
- os:cmd(?b2l(KillCmd))
- end),
- OsProc =
- lists:foldl(fun(Opt, Proc) ->
- case Opt of
- {writer, Writer} when is_function(Writer) ->
- Proc#os_proc{writer=Writer};
- {reader, Reader} when is_function(Reader) ->
- Proc#os_proc{reader=Reader};
- {timeout, TimeOut} when is_integer(TimeOut) ->
- Proc#os_proc{timeout=TimeOut}
- end
- end, BaseProc, Options),
- {ok, OsProc}.
-
-terminate(_Reason, #os_proc{port=Port}) ->
- catch port_close(Port),
- ok.
-
-handle_call({set_timeout, TimeOut}, _From, OsProc) ->
- {reply, ok, OsProc#os_proc{timeout=TimeOut}};
-handle_call({prompt, Data}, _From, OsProc) ->
- #os_proc{writer=Writer, reader=Reader} = OsProc,
- try
- Writer(OsProc, Data),
- {reply, {ok, Reader(OsProc)}, OsProc}
- catch
- throw:{error, OsError} ->
- {reply, OsError, OsProc};
- throw:{fatal, OsError} ->
- {stop, normal, OsError, OsProc};
- throw:OtherError ->
- {stop, normal, OtherError, OsProc}
- end.
-
-handle_cast({send, Data}, #os_proc{writer=Writer}=OsProc) ->
- try
- Writer(OsProc, Data),
- {noreply, OsProc}
- catch
- throw:OsError ->
- ?LOG_ERROR("Failed sending data: ~p -> ~p", [Data, OsError]),
- {stop, normal, OsProc}
- end;
-handle_cast(stop, OsProc) ->
- {stop, normal, OsProc};
-handle_cast(Msg, OsProc) ->
- ?LOG_DEBUG("OS Proc: Unknown cast: ~p", [Msg]),
- {noreply, OsProc}.
-
-handle_info({Port, {exit_status, 0}}, #os_proc{port=Port}=OsProc) ->
- ?LOG_INFO("OS Process terminated normally", []),
- {stop, normal, OsProc};
-handle_info({Port, {exit_status, Status}}, #os_proc{port=Port}=OsProc) ->
- ?LOG_ERROR("OS Process died with status: ~p", [Status]),
- {stop, {exit_status, Status}, OsProc};
-handle_info(Msg, OsProc) ->
- ?LOG_DEBUG("OS Proc: Unknown info: ~p", [Msg]),
- {noreply, OsProc}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
diff --git a/1.1.x/src/couchdb/couch_query_servers.erl b/1.1.x/src/couchdb/couch_query_servers.erl
deleted file mode 100644
index b0e46937..00000000
--- a/1.1.x/src/couchdb/couch_query_servers.erl
+++ /dev/null
@@ -1,589 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_query_servers).
--behaviour(gen_server).
-
--export([start_link/0]).
-
--export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2,code_change/3]).
--export([start_doc_map/3, map_docs/2, stop_doc_map/1]).
--export([reduce/3, rereduce/3,validate_doc_update/5]).
--export([filter_docs/5]).
-
--export([with_ddoc_proc/2, proc_prompt/2, ddoc_prompt/3, ddoc_proc_prompt/3, json_doc/1]).
-
-% -export([test/0]).
-
--include("couch_db.hrl").
-
--record(proc, {
- pid,
- lang,
- ddoc_keys = [],
- prompt_fun,
- set_timeout_fun,
- stop_fun
-}).
-
--record(qserver, {
- langs, % Keyed by language name, value is {Mod,Func,Arg}
- pid_procs, % Keyed by PID, valus is a #proc record.
- lang_procs, % Keyed by language name, value is a #proc record
- lang_limits, % Keyed by language name, value is {Lang, Limit, Current}
- waitlist = [],
- config
-}).
-
-start_link() ->
- gen_server:start_link({local, couch_query_servers}, couch_query_servers, [], []).
-
-start_doc_map(Lang, Functions, Lib) ->
- Proc = get_os_process(Lang),
- case Lib of
- {[]} -> ok;
- Lib ->
- true = proc_prompt(Proc, [<<"add_lib">>, Lib])
- end,
- lists:foreach(fun(FunctionSource) ->
- true = proc_prompt(Proc, [<<"add_fun">>, FunctionSource])
- end, Functions),
- {ok, Proc}.
-
-map_docs(Proc, Docs) ->
- % send the documents
- Results = lists:map(
- fun(Doc) ->
- Json = couch_doc:to_json_obj(Doc, []),
-
- FunsResults = proc_prompt(Proc, [<<"map_doc">>, Json]),
- % the results are a json array of function map yields like this:
- % [FunResults1, FunResults2 ...]
- % where funresults is are json arrays of key value pairs:
- % [[Key1, Value1], [Key2, Value2]]
- % Convert the key, value pairs to tuples like
- % [{Key1, Value1}, {Key2, Value2}]
- lists:map(
- fun(FunRs) ->
- [list_to_tuple(FunResult) || FunResult <- FunRs]
- end,
- FunsResults)
- end,
- Docs),
- {ok, Results}.
-
-
-stop_doc_map(nil) ->
- ok;
-stop_doc_map(Proc) ->
- ok = ret_os_process(Proc).
-
-group_reductions_results([]) ->
- [];
-group_reductions_results(List) ->
- {Heads, Tails} = lists:foldl(
- fun([H|T], {HAcc,TAcc}) ->
- {[H|HAcc], [T|TAcc]}
- end, {[], []}, List),
- case Tails of
- [[]|_] -> % no tails left
- [Heads];
- _ ->
- [Heads | group_reductions_results(Tails)]
- end.
-
-rereduce(_Lang, [], _ReducedValues) ->
- {ok, []};
-rereduce(Lang, RedSrcs, ReducedValues) ->
- Grouped = group_reductions_results(ReducedValues),
- Results = lists:zipwith(
- fun
- (<<"_", _/binary>> = FunSrc, Values) ->
- {ok, [Result]} = builtin_reduce(rereduce, [FunSrc], [[[], V] || V <- Values], []),
- Result;
- (FunSrc, Values) ->
- os_rereduce(Lang, [FunSrc], Values)
- end, RedSrcs, Grouped),
- {ok, Results}.
-
-reduce(_Lang, [], _KVs) ->
- {ok, []};
-reduce(Lang, RedSrcs, KVs) ->
- {OsRedSrcs, BuiltinReds} = lists:partition(fun
- (<<"_", _/binary>>) -> false;
- (_OsFun) -> true
- end, RedSrcs),
- {ok, OsResults} = os_reduce(Lang, OsRedSrcs, KVs),
- {ok, BuiltinResults} = builtin_reduce(reduce, BuiltinReds, KVs, []),
- recombine_reduce_results(RedSrcs, OsResults, BuiltinResults, []).
-
-recombine_reduce_results([], [], [], Acc) ->
- {ok, lists:reverse(Acc)};
-recombine_reduce_results([<<"_", _/binary>>|RedSrcs], OsResults, [BRes|BuiltinResults], Acc) ->
- recombine_reduce_results(RedSrcs, OsResults, BuiltinResults, [BRes|Acc]);
-recombine_reduce_results([_OsFun|RedSrcs], [OsR|OsResults], BuiltinResults, Acc) ->
- recombine_reduce_results(RedSrcs, OsResults, BuiltinResults, [OsR|Acc]).
-
-os_reduce(_Lang, [], _KVs) ->
- {ok, []};
-os_reduce(Lang, OsRedSrcs, KVs) ->
- Proc = get_os_process(Lang),
- OsResults = try proc_prompt(Proc, [<<"reduce">>, OsRedSrcs, KVs]) of
- [true, Reductions] -> Reductions
- after
- ok = ret_os_process(Proc)
- end,
- {ok, OsResults}.
-
-os_rereduce(Lang, OsRedSrcs, KVs) ->
- Proc = get_os_process(Lang),
- try proc_prompt(Proc, [<<"rereduce">>, OsRedSrcs, KVs]) of
- [true, [Reduction]] -> Reduction
- after
- ok = ret_os_process(Proc)
- end.
-
-
-builtin_reduce(_Re, [], _KVs, Acc) ->
- {ok, lists:reverse(Acc)};
-builtin_reduce(Re, [<<"_sum",_/binary>>|BuiltinReds], KVs, Acc) ->
- Sum = builtin_sum_rows(KVs),
- builtin_reduce(Re, BuiltinReds, KVs, [Sum|Acc]);
-builtin_reduce(reduce, [<<"_count",_/binary>>|BuiltinReds], KVs, Acc) ->
- Count = length(KVs),
- builtin_reduce(reduce, BuiltinReds, KVs, [Count|Acc]);
-builtin_reduce(rereduce, [<<"_count",_/binary>>|BuiltinReds], KVs, Acc) ->
- Count = builtin_sum_rows(KVs),
- builtin_reduce(rereduce, BuiltinReds, KVs, [Count|Acc]);
-builtin_reduce(Re, [<<"_stats",_/binary>>|BuiltinReds], KVs, Acc) ->
- Stats = builtin_stats(Re, KVs),
- builtin_reduce(Re, BuiltinReds, KVs, [Stats|Acc]).
-
-builtin_sum_rows(KVs) ->
- lists:foldl(fun
- ([_Key, Value], Acc) when is_number(Value), is_number(Acc) ->
- Acc + Value;
- ([_Key, Value], Acc) when is_list(Value), is_list(Acc) ->
- sum_terms(Acc, Value);
- ([_Key, Value], Acc) when is_number(Value), is_list(Acc) ->
- sum_terms(Acc, [Value]);
- ([_Key, Value], Acc) when is_list(Value), is_number(Acc) ->
- sum_terms([Acc], Value);
- (_Else, _Acc) ->
- throw({invalid_value, <<"builtin _sum function requires map values to be numbers or lists of numbers">>})
- end, 0, KVs).
-
-sum_terms([], []) ->
- [];
-sum_terms([_|_]=Xs, []) ->
- Xs;
-sum_terms([], [_|_]=Ys) ->
- Ys;
-sum_terms([X|Xs], [Y|Ys]) when is_number(X), is_number(Y) ->
- [X+Y | sum_terms(Xs,Ys)];
-sum_terms(_, _) ->
- throw({invalid_value, <<"builtin _sum function requires map values to be numbers or lists of numbers">>}).
-
-builtin_stats(reduce, [[_,First]|Rest]) when is_number(First) ->
- Stats = lists:foldl(fun([_K,V], {S,C,Mi,Ma,Sq}) when is_number(V) ->
- {S+V, C+1, lists:min([Mi, V]), lists:max([Ma, V]), Sq+(V*V)};
- (_, _) ->
- throw({invalid_value,
- <<"builtin _stats function requires map values to be numbers">>})
- end, {First,1,First,First,First*First}, Rest),
- {Sum, Cnt, Min, Max, Sqr} = Stats,
- {[{sum,Sum}, {count,Cnt}, {min,Min}, {max,Max}, {sumsqr,Sqr}]};
-
-builtin_stats(rereduce, [[_,First]|Rest]) ->
- {[{sum,Sum0}, {count,Cnt0}, {min,Min0}, {max,Max0}, {sumsqr,Sqr0}]} = First,
- Stats = lists:foldl(fun([_K,Red], {S,C,Mi,Ma,Sq}) ->
- {[{sum,Sum}, {count,Cnt}, {min,Min}, {max,Max}, {sumsqr,Sqr}]} = Red,
- {Sum+S, Cnt+C, lists:min([Min, Mi]), lists:max([Max, Ma]), Sqr+Sq}
- end, {Sum0,Cnt0,Min0,Max0,Sqr0}, Rest),
- {Sum, Cnt, Min, Max, Sqr} = Stats,
- {[{sum,Sum}, {count,Cnt}, {min,Min}, {max,Max}, {sumsqr,Sqr}]}.
-
-% use the function stored in ddoc.validate_doc_update to test an update.
-validate_doc_update(DDoc, EditDoc, DiskDoc, Ctx, SecObj) ->
- JsonEditDoc = couch_doc:to_json_obj(EditDoc, [revs]),
- JsonDiskDoc = json_doc(DiskDoc),
- case ddoc_prompt(DDoc, [<<"validate_doc_update">>], [JsonEditDoc, JsonDiskDoc, Ctx, SecObj]) of
- 1 ->
- ok;
- {[{<<"forbidden">>, Message}]} ->
- throw({forbidden, Message});
- {[{<<"unauthorized">>, Message}]} ->
- throw({unauthorized, Message})
- end.
-
-json_doc(nil) -> null;
-json_doc(Doc) ->
- couch_doc:to_json_obj(Doc, [revs]).
-
-filter_docs(Req, Db, DDoc, FName, Docs) ->
- JsonReq = case Req of
- {json_req, JsonObj} ->
- JsonObj;
- #httpd{} = HttpReq ->
- couch_httpd_external:json_req_obj(HttpReq, Db)
- end,
- JsonDocs = [couch_doc:to_json_obj(Doc, [revs]) || Doc <- Docs],
- [true, Passes] = ddoc_prompt(DDoc, [<<"filters">>, FName], [JsonDocs, JsonReq]),
- {ok, Passes}.
-
-ddoc_proc_prompt({Proc, DDocId}, FunPath, Args) ->
- proc_prompt(Proc, [<<"ddoc">>, DDocId, FunPath, Args]).
-
-ddoc_prompt(DDoc, FunPath, Args) ->
- with_ddoc_proc(DDoc, fun({Proc, DDocId}) ->
- proc_prompt(Proc, [<<"ddoc">>, DDocId, FunPath, Args])
- end).
-
-with_ddoc_proc(#doc{id=DDocId,revs={Start, [DiskRev|_]}}=DDoc, Fun) ->
- Rev = couch_doc:rev_to_str({Start, DiskRev}),
- DDocKey = {DDocId, Rev},
- Proc = get_ddoc_process(DDoc, DDocKey),
- try Fun({Proc, DDocId})
- after
- ok = ret_os_process(Proc)
- end.
-
-init([]) ->
- % read config and register for configuration changes
-
- % just stop if one of the config settings change. couch_server_sup
- % will restart us and then we will pick up the new settings.
-
- ok = couch_config:register(
- fun("query_servers" ++ _, _) ->
- supervisor:terminate_child(couch_secondary_services, query_servers),
- supervisor:restart_child(couch_secondary_services, query_servers)
- end),
- ok = couch_config:register(
- fun("native_query_servers" ++ _, _) ->
- supervisor:terminate_child(couch_secondary_services, query_servers),
- [supervisor:restart_child(couch_secondary_services, query_servers)]
- end),
- ok = couch_config:register(
- fun("query_server_config" ++ _, _) ->
- supervisor:terminate_child(couch_secondary_services, query_servers),
- supervisor:restart_child(couch_secondary_services, query_servers)
- end),
-
- Langs = ets:new(couch_query_server_langs, [set, private]),
- LangLimits = ets:new(couch_query_server_lang_limits, [set, private]),
- PidProcs = ets:new(couch_query_server_pid_langs, [set, private]),
- LangProcs = ets:new(couch_query_server_procs, [set, private]),
-
- ProcTimeout = list_to_integer(couch_config:get(
- "couchdb", "os_process_timeout", "5000")),
- ReduceLimit = list_to_atom(
- couch_config:get("query_server_config","reduce_limit","true")),
- OsProcLimit = list_to_integer(
- couch_config:get("query_server_config","os_process_limit","10")),
-
- % 'query_servers' specifies an OS command-line to execute.
- lists:foreach(fun({Lang, Command}) ->
- true = ets:insert(LangLimits, {?l2b(Lang), OsProcLimit, 0}),
- true = ets:insert(Langs, {?l2b(Lang),
- couch_os_process, start_link, [Command]})
- end, couch_config:get("query_servers")),
- % 'native_query_servers' specifies a {Module, Func, Arg} tuple.
- lists:foreach(fun({Lang, SpecStr}) ->
- {ok, {Mod, Fun, SpecArg}} = couch_util:parse_term(SpecStr),
- true = ets:insert(LangLimits, {?l2b(Lang), 0, 0}), % 0 means no limit
- true = ets:insert(Langs, {?l2b(Lang),
- Mod, Fun, SpecArg})
- end, couch_config:get("native_query_servers")),
-
-
- process_flag(trap_exit, true),
- {ok, #qserver{
- langs = Langs, % Keyed by language name, value is {Mod,Func,Arg}
- pid_procs = PidProcs, % Keyed by PID, valus is a #proc record.
- lang_procs = LangProcs, % Keyed by language name, value is a #proc record
- lang_limits = LangLimits, % Keyed by language name, value is {Lang, Limit, Current}
- config = {[{<<"reduce_limit">>, ReduceLimit},{<<"timeout">>, ProcTimeout}]}
- }}.
-
-terminate(_Reason, #qserver{pid_procs=PidProcs}) ->
- [couch_util:shutdown_sync(P) || {P,_} <- ets:tab2list(PidProcs)],
- ok.
-
-handle_call({get_proc, #doc{body={Props}}=DDoc, DDocKey}, From, Server) ->
- Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
- case lang_proc(Lang, Server, fun(Procs) ->
- % find a proc in the set that has the DDoc
- proc_with_ddoc(DDoc, DDocKey, Procs)
- end) of
- {ok, Proc} ->
- {reply, {ok, Proc, Server#qserver.config}, Server};
- wait ->
- {noreply, add_to_waitlist({DDoc, DDocKey}, From, Server)};
- Error ->
- {reply, Error, Server}
- end;
-handle_call({get_proc, Lang}, From, Server) ->
- case lang_proc(Lang, Server, fun([P|_Procs]) ->
- {ok, P}
- end) of
- {ok, Proc} ->
- {reply, {ok, Proc, Server#qserver.config}, Server};
- wait ->
- {noreply, add_to_waitlist({Lang}, From, Server)};
- Error ->
- {reply, Error, Server}
- end;
-handle_call({unlink_proc, Pid}, _From, #qserver{pid_procs=PidProcs}=Server) ->
- rem_value(PidProcs, Pid),
- unlink(Pid),
- {reply, ok, Server};
-handle_call({ret_proc, Proc}, _From, #qserver{
- pid_procs=PidProcs,
- lang_procs=LangProcs}=Server) ->
- % Along with max process limit, here we should check
- % if we're over the limit and discard when we are.
- add_value(PidProcs, Proc#proc.pid, Proc),
- add_to_list(LangProcs, Proc#proc.lang, Proc),
- link(Proc#proc.pid),
- {reply, true, service_waitlist(Server)}.
-
-handle_cast(_Whatever, Server) ->
- {noreply, Server}.
-
-handle_info({'EXIT', Pid, Status}, #qserver{
- pid_procs=PidProcs,
- lang_procs=LangProcs,
- lang_limits=LangLimits}=Server) ->
- case ets:lookup(PidProcs, Pid) of
- [{Pid, Proc}] ->
- case Status of
- normal -> ok;
- _ -> ?LOG_DEBUG("Linked process died abnormally: ~p (reason: ~p)", [Pid, Status])
- end,
- rem_value(PidProcs, Pid),
- catch rem_from_list(LangProcs, Proc#proc.lang, Proc),
- [{Lang, Lim, Current}] = ets:lookup(LangLimits, Proc#proc.lang),
- true = ets:insert(LangLimits, {Lang, Lim, Current-1}),
- {noreply, service_waitlist(Server)};
- [] ->
- case Status of
- normal ->
- {noreply, Server};
- _ ->
- {stop, Status, Server}
- end
- end.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-% Private API
-
-add_to_waitlist(Info, From, #qserver{waitlist=Waitlist}=Server) ->
- Server#qserver{waitlist=[{Info, From}|Waitlist]}.
-
-service_waitlist(#qserver{waitlist=[]}=Server) ->
- Server;
-service_waitlist(#qserver{waitlist=Waitlist}=Server) ->
- [Oldest|RevWList] = lists:reverse(Waitlist),
- case service_waiting(Oldest, Server) of
- ok ->
- Server#qserver{waitlist=lists:reverse(RevWList)};
- wait ->
- Server#qserver{waitlist=Waitlist}
- end.
-
-% todo get rid of duplication
-service_waiting({{#doc{body={Props}}=DDoc, DDocKey}, From}, Server) ->
- Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
- case lang_proc(Lang, Server, fun(Procs) ->
- % find a proc in the set that has the DDoc
- proc_with_ddoc(DDoc, DDocKey, Procs)
- end) of
- {ok, Proc} ->
- gen_server:reply(From, {ok, Proc, Server#qserver.config}),
- ok;
- wait -> % this should never happen
- wait;
- Error ->
- gen_server:reply(From, Error),
- ok
- end;
-service_waiting({{Lang}, From}, Server) ->
- case lang_proc(Lang, Server, fun([P|_Procs]) ->
- {ok, P}
- end) of
- {ok, Proc} ->
- gen_server:reply(From, {ok, Proc, Server#qserver.config}),
- ok;
- wait -> % this should never happen
- wait;
- Error ->
- gen_server:reply(From, Error),
- ok
- end.
-
-lang_proc(Lang, #qserver{
- langs=Langs,
- pid_procs=PidProcs,
- lang_procs=LangProcs,
- lang_limits=LangLimits}, PickFun) ->
- % Note to future self. Add max process limit.
- case ets:lookup(LangProcs, Lang) of
- [{Lang, [P|Procs]}] ->
- {ok, Proc} = PickFun([P|Procs]),
- rem_from_list(LangProcs, Lang, Proc),
- {ok, Proc};
- _ ->
- case (catch new_process(Langs, LangLimits, Lang)) of
- {ok, Proc} ->
- add_value(PidProcs, Proc#proc.pid, Proc),
- PickFun([Proc]);
- ErrorOrWait ->
- ErrorOrWait
- end
- end.
-
-new_process(Langs, LangLimits, Lang) ->
- [{Lang, Lim, Current}] = ets:lookup(LangLimits, Lang),
- if (Lim == 0) or (Current < Lim) -> % Lim == 0 means no limit
- % we are below the limit for our language, make a new one
- case ets:lookup(Langs, Lang) of
- [{Lang, Mod, Func, Arg}] ->
- {ok, Pid} = apply(Mod, Func, Arg),
- true = ets:insert(LangLimits, {Lang, Lim, Current+1}),
- {ok, #proc{lang=Lang,
- pid=Pid,
- % Called via proc_prompt, proc_set_timeout, and proc_stop
- prompt_fun={Mod, prompt},
- set_timeout_fun={Mod, set_timeout},
- stop_fun={Mod, stop}}};
- _ ->
- {unknown_query_language, Lang}
- end;
- true ->
- wait
- end.
-
-proc_with_ddoc(DDoc, DDocKey, LangProcs) ->
- DDocProcs = lists:filter(fun(#proc{ddoc_keys=Keys}) ->
- lists:any(fun(Key) ->
- Key == DDocKey
- end, Keys)
- end, LangProcs),
- case DDocProcs of
- [DDocProc|_] ->
- ?LOG_DEBUG("DDocProc found for DDocKey: ~p",[DDocKey]),
- {ok, DDocProc};
- [] ->
- [TeachProc|_] = LangProcs,
- ?LOG_DEBUG("Teach ddoc to new proc ~p with DDocKey: ~p",[TeachProc, DDocKey]),
- {ok, SmartProc} = teach_ddoc(DDoc, DDocKey, TeachProc),
- {ok, SmartProc}
- end.
-
-proc_prompt(Proc, Args) ->
- {Mod, Func} = Proc#proc.prompt_fun,
- apply(Mod, Func, [Proc#proc.pid, Args]).
-
-proc_stop(Proc) ->
- {Mod, Func} = Proc#proc.stop_fun,
- apply(Mod, Func, [Proc#proc.pid]).
-
-proc_set_timeout(Proc, Timeout) ->
- {Mod, Func} = Proc#proc.set_timeout_fun,
- apply(Mod, Func, [Proc#proc.pid, Timeout]).
-
-teach_ddoc(DDoc, {DDocId, _Rev}=DDocKey, #proc{ddoc_keys=Keys}=Proc) ->
- % send ddoc over the wire
- % we only share the rev with the client we know to update code
- % but it only keeps the latest copy, per each ddoc, around.
- true = proc_prompt(Proc, [<<"ddoc">>, <<"new">>, DDocId, couch_doc:to_json_obj(DDoc, [])]),
- % we should remove any other ddocs keys for this docid
- % because the query server overwrites without the rev
- Keys2 = [{D,R} || {D,R} <- Keys, D /= DDocId],
- % add ddoc to the proc
- {ok, Proc#proc{ddoc_keys=[DDocKey|Keys2]}}.
-
-get_ddoc_process(#doc{} = DDoc, DDocKey) ->
- % remove this case statement
- case gen_server:call(couch_query_servers, {get_proc, DDoc, DDocKey}) of
- {ok, Proc, {QueryConfig}} ->
- % process knows the ddoc
- case (catch proc_prompt(Proc, [<<"reset">>, {QueryConfig}])) of
- true ->
- proc_set_timeout(Proc, couch_util:get_value(<<"timeout">>, QueryConfig)),
- link(Proc#proc.pid),
- gen_server:call(couch_query_servers, {unlink_proc, Proc#proc.pid}),
- Proc;
- _ ->
- catch proc_stop(Proc),
- get_ddoc_process(DDoc, DDocKey)
- end;
- Error ->
- throw(Error)
- end.
-
-get_os_process(Lang) ->
- case gen_server:call(couch_query_servers, {get_proc, Lang}) of
- {ok, Proc, {QueryConfig}} ->
- case (catch proc_prompt(Proc, [<<"reset">>, {QueryConfig}])) of
- true ->
- proc_set_timeout(Proc, couch_util:get_value(<<"timeout">>, QueryConfig)),
- link(Proc#proc.pid),
- gen_server:call(couch_query_servers, {unlink_proc, Proc#proc.pid}),
- Proc;
- _ ->
- catch proc_stop(Proc),
- get_os_process(Lang)
- end;
- Error ->
- throw(Error)
- end.
-
-ret_os_process(Proc) ->
- true = gen_server:call(couch_query_servers, {ret_proc, Proc}),
- catch unlink(Proc#proc.pid),
- ok.
-
-add_value(Tid, Key, Value) ->
- true = ets:insert(Tid, {Key, Value}).
-
-rem_value(Tid, Key) ->
- true = ets:delete(Tid, Key).
-
-add_to_list(Tid, Key, Value) ->
- case ets:lookup(Tid, Key) of
- [{Key, Vals}] ->
- true = ets:insert(Tid, {Key, [Value|Vals]});
- [] ->
- true = ets:insert(Tid, {Key, [Value]})
- end.
-
-rem_from_list(Tid, Key, Value) when is_record(Value, proc)->
- Pid = Value#proc.pid,
- case ets:lookup(Tid, Key) of
- [{Key, Vals}] ->
- % make a new values list that doesn't include the Value arg
- NewValues = [Val || #proc{pid=P}=Val <- Vals, P /= Pid],
- ets:insert(Tid, {Key, NewValues});
- [] -> ok
- end;
-rem_from_list(Tid, Key, Value) ->
- case ets:lookup(Tid, Key) of
- [{Key, Vals}] ->
- % make a new values list that doesn't include the Value arg
- NewValues = [Val || Val <- Vals, Val /= Value],
- ets:insert(Tid, {Key, NewValues});
- [] -> ok
- end.
diff --git a/1.1.x/src/couchdb/couch_ref_counter.erl b/1.1.x/src/couchdb/couch_ref_counter.erl
deleted file mode 100644
index 5a111ab6..00000000
--- a/1.1.x/src/couchdb/couch_ref_counter.erl
+++ /dev/null
@@ -1,111 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_ref_counter).
--behaviour(gen_server).
-
--export([start/1, init/1, terminate/2, handle_call/3, handle_cast/2, code_change/3, handle_info/2]).
--export([drop/1,drop/2,add/1,add/2,count/1]).
-
-start(ChildProcs) ->
- gen_server:start(couch_ref_counter, {self(), ChildProcs}, []).
-
-
-drop(RefCounterPid) ->
- drop(RefCounterPid, self()).
-
-drop(RefCounterPid, Pid) ->
- gen_server:call(RefCounterPid, {drop, Pid}).
-
-
-add(RefCounterPid) ->
- add(RefCounterPid, self()).
-
-add(RefCounterPid, Pid) ->
- gen_server:call(RefCounterPid, {add, Pid}).
-
-count(RefCounterPid) ->
- gen_server:call(RefCounterPid, count).
-
-% server functions
-
--record(srv,
- {
- referrers=dict:new(), % a dict of each ref counting proc.
- child_procs=[]
- }).
-
-init({Pid, ChildProcs}) ->
- [link(ChildProc) || ChildProc <- ChildProcs],
- Referrers = dict:from_list([{Pid, {erlang:monitor(process, Pid), 1}}]),
- {ok, #srv{referrers=Referrers, child_procs=ChildProcs}}.
-
-
-terminate(_Reason, #srv{child_procs=ChildProcs}) ->
- [couch_util:shutdown_sync(Pid) || Pid <- ChildProcs],
- ok.
-
-
-handle_call({add, Pid},_From, #srv{referrers=Referrers}=Srv) ->
- Referrers2 =
- case dict:find(Pid, Referrers) of
- error ->
- dict:store(Pid, {erlang:monitor(process, Pid), 1}, Referrers);
- {ok, {MonRef, RefCnt}} ->
- dict:store(Pid, {MonRef, RefCnt + 1}, Referrers)
- end,
- {reply, ok, Srv#srv{referrers=Referrers2}};
-handle_call(count, _From, Srv) ->
- {monitors, Monitors} = process_info(self(), monitors),
- {reply, length(Monitors), Srv};
-handle_call({drop, Pid}, _From, #srv{referrers=Referrers}=Srv) ->
- Referrers2 =
- case dict:find(Pid, Referrers) of
- {ok, {MonRef, 1}} ->
- erlang:demonitor(MonRef, [flush]),
- dict:erase(Pid, Referrers);
- {ok, {MonRef, Num}} ->
- dict:store(Pid, {MonRef, Num-1}, Referrers);
- error ->
- Referrers
- end,
- Srv2 = Srv#srv{referrers=Referrers2},
- case should_close() of
- true ->
- {stop,normal,ok,Srv2};
- false ->
- {reply, ok, Srv2}
- end.
-
-handle_cast(Msg, _Srv)->
- exit({unknown_msg,Msg}).
-
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-handle_info({'DOWN', MonRef, _, Pid, _}, #srv{referrers=Referrers}=Srv) ->
- {ok, {MonRef, _RefCount}} = dict:find(Pid, Referrers),
- Srv2 = Srv#srv{referrers=dict:erase(Pid, Referrers)},
- case should_close() of
- true ->
- {stop,normal,Srv2};
- false ->
- {noreply,Srv2}
- end.
-
-
-should_close() ->
- case process_info(self(), monitors) of
- {monitors, []} -> true;
- _ -> false
- end.
diff --git a/1.1.x/src/couchdb/couch_rep.erl b/1.1.x/src/couchdb/couch_rep.erl
deleted file mode 100644
index 5c9fbce6..00000000
--- a/1.1.x/src/couchdb/couch_rep.erl
+++ /dev/null
@@ -1,972 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep).
--behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
- code_change/3]).
-
--export([replicate/2, checkpoint/1]).
--export([ensure_rep_db_exists/0, make_replication_id/2]).
--export([start_replication/3, end_replication/1, get_result/4]).
--export([update_rep_doc/2]).
-
--include("couch_db.hrl").
--include("couch_js_functions.hrl").
--include("../ibrowse/ibrowse.hrl").
-
--define(REP_ID_VERSION, 2).
-
--record(state, {
- changes_feed,
- missing_revs,
- reader,
- writer,
-
- source,
- target,
- continuous,
- create_target,
- init_args,
- checkpoint_scheduled = nil,
-
- start_seq,
- history,
- session_id,
- source_log,
- target_log,
- rep_starttime,
- src_starttime,
- tgt_starttime,
- checkpoint_history = nil,
-
- listeners = [],
- complete = false,
- committed_seq = 0,
-
- stats = nil,
- rep_doc = nil,
- source_db_update_notifier = nil,
- target_db_update_notifier = nil
-}).
-
-%% convenience function to do a simple replication from the shell
-replicate(Source, Target) when is_list(Source) ->
- replicate(?l2b(Source), Target);
-replicate(Source, Target) when is_binary(Source), is_list(Target) ->
- replicate(Source, ?l2b(Target));
-replicate(Source, Target) when is_binary(Source), is_binary(Target) ->
- replicate({[{<<"source">>, Source}, {<<"target">>, Target}]}, #user_ctx{});
-
-%% function handling POST to _replicate
-replicate({Props}=PostBody, UserCtx) ->
- RepId = make_replication_id(PostBody, UserCtx),
- case couch_util:get_value(<<"cancel">>, Props, false) of
- true ->
- end_replication(RepId);
- false ->
- Server = start_replication(PostBody, RepId, UserCtx),
- get_result(Server, RepId, PostBody, UserCtx)
- end.
-
-end_replication({BaseId, Extension}) ->
- RepId = BaseId ++ Extension,
- case supervisor:terminate_child(couch_rep_sup, RepId) of
- {error, not_found} = R ->
- R;
- ok ->
- ok = supervisor:delete_child(couch_rep_sup, RepId),
- {ok, {cancelled, ?l2b(BaseId)}}
- end.
-
-start_replication(RepDoc, {BaseId, Extension}, UserCtx) ->
- Replicator = {
- BaseId ++ Extension,
- {gen_server, start_link,
- [?MODULE, [BaseId, RepDoc, UserCtx], []]},
- temporary,
- 1,
- worker,
- [?MODULE]
- },
- start_replication_server(Replicator).
-
-checkpoint(Server) ->
- gen_server:cast(Server, do_checkpoint).
-
-get_result(Server, {BaseId, _Extension}, {Props} = PostBody, UserCtx) ->
- case couch_util:get_value(<<"continuous">>, Props, false) of
- true ->
- {ok, {continuous, ?l2b(BaseId)}};
- false ->
- try gen_server:call(Server, get_result, infinity) of
- retry -> replicate(PostBody, UserCtx);
- Else -> Else
- catch
- exit:{noproc, {gen_server, call, [Server, get_result, infinity]}} ->
- %% oops, this replication just finished -- restart it.
- replicate(PostBody, UserCtx);
- exit:{normal, {gen_server, call, [Server, get_result, infinity]}} ->
- %% we made the call during terminate
- replicate(PostBody, UserCtx)
- end
- end.
-
-init(InitArgs) ->
- try
- do_init(InitArgs)
- catch
- throw:Error ->
- {stop, Error}
- end.
-
-do_init([RepId, {PostProps} = RepDoc, UserCtx] = InitArgs) ->
- process_flag(trap_exit, true),
-
- SourceProps = couch_util:get_value(<<"source">>, PostProps),
- TargetProps = couch_util:get_value(<<"target">>, PostProps),
-
- Continuous = couch_util:get_value(<<"continuous">>, PostProps, false),
- CreateTarget = couch_util:get_value(<<"create_target">>, PostProps, false),
-
- ProxyParams = parse_proxy_params(
- couch_util:get_value(<<"proxy">>, PostProps, [])),
- Source = open_db(SourceProps, UserCtx, ProxyParams),
- Target = open_db(TargetProps, UserCtx, ProxyParams, CreateTarget),
-
- SourceInfo = dbinfo(Source),
- TargetInfo = dbinfo(Target),
-
- maybe_set_triggered(RepDoc, RepId),
-
- [SourceLog, TargetLog] = find_replication_logs(
- [Source, Target], RepId, {PostProps}, UserCtx),
- {StartSeq, History} = compare_replication_logs(SourceLog, TargetLog),
-
- {ok, ChangesFeed} =
- couch_rep_changes_feed:start_link(self(), Source, StartSeq, PostProps),
- {ok, MissingRevs} =
- couch_rep_missing_revs:start_link(self(), Target, ChangesFeed, PostProps),
- {ok, Reader} =
- couch_rep_reader:start_link(self(), Source, MissingRevs, PostProps),
- {ok, Writer} =
- couch_rep_writer:start_link(self(), Target, Reader, PostProps),
-
- Stats = ets:new(replication_stats, [set, private]),
- ets:insert(Stats, {total_revs,0}),
- ets:insert(Stats, {missing_revs, 0}),
- ets:insert(Stats, {docs_read, 0}),
- ets:insert(Stats, {docs_written, 0}),
- ets:insert(Stats, {doc_write_failures, 0}),
-
- {ShortId, _} = lists:split(6, RepId),
- couch_task_status:add_task("Replication", io_lib:format("~s: ~s -> ~s",
- [ShortId, dbname(Source), dbname(Target)]), "Starting"),
-
- State = #state{
- changes_feed = ChangesFeed,
- missing_revs = MissingRevs,
- reader = Reader,
- writer = Writer,
-
- source = Source,
- target = Target,
- continuous = Continuous,
- create_target = CreateTarget,
- init_args = InitArgs,
- stats = Stats,
- checkpoint_scheduled = nil,
-
- start_seq = StartSeq,
- history = History,
- session_id = couch_uuids:random(),
- source_log = SourceLog,
- target_log = TargetLog,
- rep_starttime = httpd_util:rfc1123_date(),
- src_starttime = couch_util:get_value(instance_start_time, SourceInfo),
- tgt_starttime = couch_util:get_value(instance_start_time, TargetInfo),
- rep_doc = RepDoc,
- source_db_update_notifier = source_db_update_notifier(Source),
- target_db_update_notifier = target_db_update_notifier(Target)
- },
- {ok, State}.
-
-handle_call(get_result, From, #state{complete=true, listeners=[]} = State) ->
- {stop, normal, State#state{listeners=[From]}};
-handle_call(get_result, From, State) ->
- Listeners = State#state.listeners,
- {noreply, State#state{listeners=[From|Listeners]}};
-
-handle_call(get_source_db, _From, #state{source = Source} = State) ->
- {reply, {ok, Source}, State};
-
-handle_call(get_target_db, _From, #state{target = Target} = State) ->
- {reply, {ok, Target}, State}.
-
-handle_cast(reopen_source_db, #state{source = Source} = State) ->
- {ok, NewSource} = couch_db:reopen(Source),
- {noreply, State#state{source = NewSource}};
-
-handle_cast(reopen_target_db, #state{target = Target} = State) ->
- {ok, NewTarget} = couch_db:reopen(Target),
- {noreply, State#state{target = NewTarget}};
-
-handle_cast(do_checkpoint, State) ->
- {noreply, do_checkpoint(State)};
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info({missing_revs_checkpoint, SourceSeq}, State) ->
- couch_task_status:update("MR Processed source update #~p", [SourceSeq]),
- {noreply, schedule_checkpoint(State#state{committed_seq = SourceSeq})};
-
-handle_info({writer_checkpoint, SourceSeq}, #state{committed_seq=N} = State)
- when SourceSeq > N ->
- MissingRevs = State#state.missing_revs,
- ok = gen_server:cast(MissingRevs, {update_committed_seq, SourceSeq}),
- couch_task_status:update("W Processed source update #~p", [SourceSeq]),
- {noreply, schedule_checkpoint(State#state{committed_seq = SourceSeq})};
-handle_info({writer_checkpoint, _}, State) ->
- {noreply, State};
-
-handle_info({update_stats, Key, N}, State) ->
- ets:update_counter(State#state.stats, Key, N),
- {noreply, State};
-
-handle_info({'DOWN', _, _, _, _}, State) ->
- ?LOG_INFO("replication terminating because local DB is shutting down", []),
- timer:cancel(State#state.checkpoint_scheduled),
- {stop, shutdown, State};
-
-handle_info({'EXIT', Writer, normal}, #state{writer=Writer} = State) ->
- case State#state.listeners of
- [] ->
- {noreply, State#state{complete = true}};
- _Else ->
- {stop, normal, State}
- end;
-
-handle_info({'EXIT', _, normal}, State) ->
- {noreply, State};
-handle_info({'EXIT', _Pid, {Err, Reason}}, State) when Err == source_error;
- Err == target_error ->
- ?LOG_INFO("replication terminating due to ~p: ~p", [Err, Reason]),
- timer:cancel(State#state.checkpoint_scheduled),
- {stop, shutdown, State};
-handle_info({'EXIT', _Pid, Reason}, State) ->
- {stop, Reason, State}.
-
-terminate(normal, #state{checkpoint_scheduled=nil} = State) ->
- do_terminate(State),
- update_rep_doc(
- State#state.rep_doc, [{<<"_replication_state">>, <<"completed">>}]);
-
-terminate(normal, State) ->
- timer:cancel(State#state.checkpoint_scheduled),
- do_terminate(do_checkpoint(State)),
- update_rep_doc(
- State#state.rep_doc, [{<<"_replication_state">>, <<"completed">>}]);
-
-terminate(shutdown, #state{listeners = Listeners} = State) ->
- % continuous replication stopped
- [gen_server:reply(L, {ok, stopped}) || L <- Listeners],
- terminate_cleanup(State);
-
-terminate(Reason, #state{listeners = Listeners} = State) ->
- [gen_server:reply(L, {error, Reason}) || L <- Listeners],
- terminate_cleanup(State),
- update_rep_doc(
- State#state.rep_doc, [{<<"_replication_state">>, <<"error">>}]).
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-% internal funs
-
-start_replication_server(Replicator) ->
- RepId = element(1, Replicator),
- case supervisor:start_child(couch_rep_sup, Replicator) of
- {ok, Pid} ->
- ?LOG_INFO("starting new replication ~p at ~p", [RepId, Pid]),
- Pid;
- {error, already_present} ->
- case supervisor:restart_child(couch_rep_sup, RepId) of
- {ok, Pid} ->
- ?LOG_INFO("starting replication ~p at ~p", [RepId, Pid]),
- Pid;
- {error, running} ->
- %% this error occurs if multiple replicators are racing
- %% each other to start and somebody else won. Just grab
- %% the Pid by calling start_child again.
- {error, {already_started, Pid}} =
- supervisor:start_child(couch_rep_sup, Replicator),
- ?LOG_DEBUG("replication ~p already running at ~p", [RepId, Pid]),
- Pid;
- {error, {db_not_found, DbUrl}} ->
- throw({db_not_found, <<"could not open ", DbUrl/binary>>});
- {error, {unauthorized, DbUrl}} ->
- throw({unauthorized,
- <<"unauthorized to access database ", DbUrl/binary>>});
- {error, {'EXIT', {badarg,
- [{erlang, apply, [gen_server, start_link, undefined]} | _]}}} ->
- % Clause to deal with a change in the supervisor module introduced
- % in R14B02. For more details consult the thread at:
- % http://erlang.org/pipermail/erlang-bugs/2011-March/002273.html
- _ = supervisor:delete_child(couch_rep_sup, RepId),
- start_replication_server(Replicator)
- end;
- {error, {already_started, Pid}} ->
- ?LOG_DEBUG("replication ~p already running at ~p", [RepId, Pid]),
- Pid;
- {error, {{db_not_found, DbUrl}, _}} ->
- throw({db_not_found, <<"could not open ", DbUrl/binary>>});
- {error, {{unauthorized, DbUrl}, _}} ->
- throw({unauthorized,
- <<"unauthorized to access database ", DbUrl/binary>>})
- end.
-
-compare_replication_logs(SrcDoc, TgtDoc) ->
- #doc{body={RepRecProps}} = SrcDoc,
- #doc{body={RepRecPropsTgt}} = TgtDoc,
- case couch_util:get_value(<<"session_id">>, RepRecProps) ==
- couch_util:get_value(<<"session_id">>, RepRecPropsTgt) of
- true ->
- % if the records have the same session id,
- % then we have a valid replication history
- OldSeqNum = couch_util:get_value(<<"source_last_seq">>, RepRecProps, 0),
- OldHistory = couch_util:get_value(<<"history">>, RepRecProps, []),
- {OldSeqNum, OldHistory};
- false ->
- SourceHistory = couch_util:get_value(<<"history">>, RepRecProps, []),
- TargetHistory = couch_util:get_value(<<"history">>, RepRecPropsTgt, []),
- ?LOG_INFO("Replication records differ. "
- "Scanning histories to find a common ancestor.", []),
- ?LOG_DEBUG("Record on source:~p~nRecord on target:~p~n",
- [RepRecProps, RepRecPropsTgt]),
- compare_rep_history(SourceHistory, TargetHistory)
- end.
-
-compare_rep_history(S, T) when S =:= [] orelse T =:= [] ->
- ?LOG_INFO("no common ancestry -- performing full replication", []),
- {0, []};
-compare_rep_history([{S}|SourceRest], [{T}|TargetRest]=Target) ->
- SourceId = couch_util:get_value(<<"session_id">>, S),
- case has_session_id(SourceId, Target) of
- true ->
- RecordSeqNum = couch_util:get_value(<<"recorded_seq">>, S, 0),
- ?LOG_INFO("found a common replication record with source_seq ~p",
- [RecordSeqNum]),
- {RecordSeqNum, SourceRest};
- false ->
- TargetId = couch_util:get_value(<<"session_id">>, T),
- case has_session_id(TargetId, SourceRest) of
- true ->
- RecordSeqNum = couch_util:get_value(<<"recorded_seq">>, T, 0),
- ?LOG_INFO("found a common replication record with source_seq ~p",
- [RecordSeqNum]),
- {RecordSeqNum, TargetRest};
- false ->
- compare_rep_history(SourceRest, TargetRest)
- end
- end.
-
-close_db(#http_db{}) ->
- ok;
-close_db(Db) ->
- couch_db:close(Db).
-
-dbname(#http_db{url = Url}) ->
- couch_util:url_strip_password(Url);
-dbname(#db{name = Name}) ->
- Name.
-
-dbinfo(#http_db{} = Db) ->
- {DbProps} = couch_rep_httpc:request(Db),
- [{couch_util:to_existing_atom(K), V} || {K,V} <- DbProps];
-dbinfo(Db) ->
- {ok, Info} = couch_db:get_db_info(Db),
- Info.
-
-do_terminate(State) ->
- #state{
- checkpoint_history = CheckpointHistory,
- committed_seq = NewSeq,
- listeners = Listeners,
- source = Source,
- continuous = Continuous,
- source_log = #doc{body={OldHistory}}
- } = State,
-
- NewRepHistory = case CheckpointHistory of
- nil ->
- {[{<<"no_changes">>, true} | OldHistory]};
- _Else ->
- CheckpointHistory
- end,
-
- %% reply to original requester
- OtherListeners = case Continuous of
- true ->
- []; % continuous replications have no listeners
- _ ->
- [Original|Rest] = lists:reverse(Listeners),
- gen_server:reply(Original, {ok, NewRepHistory}),
- Rest
- end,
-
- %% maybe trigger another replication. If this replicator uses a local
- %% source Db, changes to that Db since we started will not be included in
- %% this pass.
- case up_to_date(Source, NewSeq) of
- true ->
- [gen_server:reply(R, {ok, NewRepHistory}) || R <- OtherListeners];
- false ->
- [gen_server:reply(R, retry) || R <- OtherListeners]
- end,
- couch_task_status:update("Finishing"),
- terminate_cleanup(State).
-
-terminate_cleanup(State) ->
- close_db(State#state.source),
- close_db(State#state.target),
- stop_db_update_notifier(State#state.source_db_update_notifier),
- stop_db_update_notifier(State#state.target_db_update_notifier),
- ets:delete(State#state.stats).
-
-stop_db_update_notifier(nil) ->
- ok;
-stop_db_update_notifier(Notifier) ->
- couch_db_update_notifier:stop(Notifier).
-
-has_session_id(_SessionId, []) ->
- false;
-has_session_id(SessionId, [{Props} | Rest]) ->
- case couch_util:get_value(<<"session_id">>, Props, nil) of
- SessionId ->
- true;
- _Else ->
- has_session_id(SessionId, Rest)
- end.
-
-maybe_append_options(Options, {Props}) ->
- lists:foldl(fun(Option, Acc) ->
- Acc ++
- case couch_util:get_value(Option, Props, false) of
- true ->
- "+" ++ ?b2l(Option);
- false ->
- ""
- end
- end, [], Options).
-
-make_replication_id(RepProps, UserCtx) ->
- BaseId = make_replication_id(RepProps, UserCtx, ?REP_ID_VERSION),
- Extension = maybe_append_options(
- [<<"continuous">>, <<"create_target">>], RepProps),
- {BaseId, Extension}.
-
-% Versioned clauses for generating replication ids
-% If a change is made to how replications are identified
-% add a new clause and increase ?REP_ID_VERSION at the top
-make_replication_id({Props}, UserCtx, 2) ->
- {ok, HostName} = inet:gethostname(),
- Port = mochiweb_socket_server:get(couch_httpd, port),
- Src = get_rep_endpoint(UserCtx, couch_util:get_value(<<"source">>, Props)),
- Tgt = get_rep_endpoint(UserCtx, couch_util:get_value(<<"target">>, Props)),
- maybe_append_filters({Props}, [HostName, Port, Src, Tgt], UserCtx);
-make_replication_id({Props}, UserCtx, 1) ->
- {ok, HostName} = inet:gethostname(),
- Src = get_rep_endpoint(UserCtx, couch_util:get_value(<<"source">>, Props)),
- Tgt = get_rep_endpoint(UserCtx, couch_util:get_value(<<"target">>, Props)),
- maybe_append_filters({Props}, [HostName, Src, Tgt], UserCtx).
-
-maybe_append_filters({Props}, Base, UserCtx) ->
- Base2 = Base ++
- case couch_util:get_value(<<"filter">>, Props) of
- undefined ->
- case couch_util:get_value(<<"doc_ids">>, Props) of
- undefined ->
- [];
- DocIds ->
- [DocIds]
- end;
- Filter ->
- [filter_code(Filter, Props, UserCtx),
- couch_util:get_value(<<"query_params">>, Props, {[]})]
- end,
- couch_util:to_hex(couch_util:md5(term_to_binary(Base2))).
-
-filter_code(Filter, Props, UserCtx) ->
- {match, [DDocName, FilterName]} =
- re:run(Filter, "(.*?)/(.*)", [{capture, [1, 2], binary}]),
- ProxyParams = parse_proxy_params(
- couch_util:get_value(<<"proxy">>, Props, [])),
- Source = open_db(
- couch_util:get_value(<<"source">>, Props), UserCtx, ProxyParams),
- try
- {ok, DDoc} = open_doc(Source, <<"_design/", DDocName/binary>>),
- Code = couch_util:get_nested_json_value(
- DDoc#doc.body, [<<"filters">>, FilterName]),
- re:replace(Code, "^\s*(.*?)\s*$", "\\1", [{return, binary}])
- after
- close_db(Source)
- end.
-
-maybe_add_trailing_slash(Url) ->
- re:replace(Url, "[^/]$", "&/", [{return, list}]).
-
-get_rep_endpoint(_UserCtx, {Props}) ->
- Url = maybe_add_trailing_slash(couch_util:get_value(<<"url">>, Props)),
- {BinHeaders} = couch_util:get_value(<<"headers">>, Props, {[]}),
- {Auth} = couch_util:get_value(<<"auth">>, Props, {[]}),
- case couch_util:get_value(<<"oauth">>, Auth) of
- undefined ->
- {remote, Url, [{?b2l(K),?b2l(V)} || {K,V} <- BinHeaders]};
- {OAuth} ->
- {remote, Url, [{?b2l(K),?b2l(V)} || {K,V} <- BinHeaders], OAuth}
- end;
-get_rep_endpoint(_UserCtx, <<"http://",_/binary>>=Url) ->
- {remote, maybe_add_trailing_slash(Url), []};
-get_rep_endpoint(_UserCtx, <<"https://",_/binary>>=Url) ->
- {remote, maybe_add_trailing_slash(Url), []};
-get_rep_endpoint(UserCtx, <<DbName/binary>>) ->
- {local, DbName, UserCtx}.
-
-find_replication_logs(DbList, RepId, RepProps, UserCtx) ->
- LogId = ?l2b(?LOCAL_DOC_PREFIX ++ RepId),
- fold_replication_logs(DbList, ?REP_ID_VERSION,
- LogId, LogId, RepProps, UserCtx, []).
-
-% Accumulate the replication logs
-% Falls back to older log document ids and migrates them
-fold_replication_logs([], _Vsn, _LogId, _NewId, _RepProps, _UserCtx, Acc) ->
- lists:reverse(Acc);
-fold_replication_logs([Db|Rest]=Dbs, Vsn, LogId, NewId,
- RepProps, UserCtx, Acc) ->
- case open_replication_log(Db, LogId) of
- {error, not_found} when Vsn > 1 ->
- OldRepId = make_replication_id(RepProps, UserCtx, Vsn - 1),
- fold_replication_logs(Dbs, Vsn - 1,
- ?l2b(?LOCAL_DOC_PREFIX ++ OldRepId), NewId, RepProps, UserCtx, Acc);
- {error, not_found} ->
- fold_replication_logs(Rest, ?REP_ID_VERSION, NewId, NewId,
- RepProps, UserCtx, [#doc{id=NewId}|Acc]);
- {ok, Doc} when LogId =:= NewId ->
- fold_replication_logs(Rest, ?REP_ID_VERSION, NewId, NewId,
- RepProps, UserCtx, [Doc|Acc]);
- {ok, Doc} ->
- MigratedLog = #doc{id=NewId,body=Doc#doc.body},
- fold_replication_logs(Rest, ?REP_ID_VERSION, NewId, NewId,
- RepProps, UserCtx, [MigratedLog|Acc])
- end.
-
-open_replication_log(Db, DocId) ->
- case open_doc(Db, DocId) of
- {ok, Doc} ->
- ?LOG_DEBUG("found a replication log for ~s", [dbname(Db)]),
- {ok, Doc};
- _ ->
- ?LOG_DEBUG("didn't find a replication log for ~s", [dbname(Db)]),
- {error, not_found}
- end.
-
-open_doc(#http_db{} = Db, DocId) ->
- Req = Db#http_db{resource = couch_util:encode_doc_id(DocId)},
- case couch_rep_httpc:request(Req) of
- {[{<<"error">>, _}, {<<"reason">>, _}]} ->
- {error, not_found};
- Doc ->
- {ok, couch_doc:from_json_obj(Doc)}
- end;
-open_doc(Db, DocId) ->
- couch_db:open_doc(Db, DocId).
-
-open_db(Props, UserCtx, ProxyParams) ->
- open_db(Props, UserCtx, ProxyParams, false).
-
-open_db({Props}, _UserCtx, ProxyParams, CreateTarget) ->
- Url = maybe_add_trailing_slash(couch_util:get_value(<<"url">>, Props)),
- {AuthProps} = couch_util:get_value(<<"auth">>, Props, {[]}),
- {BinHeaders} = couch_util:get_value(<<"headers">>, Props, {[]}),
- Headers = [{?b2l(K),?b2l(V)} || {K,V} <- BinHeaders],
- DefaultHeaders = (#http_db{})#http_db.headers,
- Db1 = #http_db{
- url = Url,
- auth = AuthProps,
- headers = lists:ukeymerge(1, Headers, DefaultHeaders)
- },
- Db = Db1#http_db{
- options = Db1#http_db.options ++ ProxyParams ++
- couch_rep_httpc:ssl_options(Db1)
- },
- couch_rep_httpc:db_exists(Db, CreateTarget);
-open_db(<<"http://",_/binary>>=Url, _, ProxyParams, CreateTarget) ->
- open_db({[{<<"url">>,Url}]}, [], ProxyParams, CreateTarget);
-open_db(<<"https://",_/binary>>=Url, _, ProxyParams, CreateTarget) ->
- open_db({[{<<"url">>,Url}]}, [], ProxyParams, CreateTarget);
-open_db(<<DbName/binary>>, UserCtx, _ProxyParams, CreateTarget) ->
- try
- case CreateTarget of
- true ->
- ok = couch_httpd:verify_is_server_admin(UserCtx),
- couch_server:create(DbName, [{user_ctx, UserCtx}]);
- false ->
- ok
- end,
-
- case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
- {ok, Db} ->
- couch_db:monitor(Db),
- Db;
- {not_found, no_db_file} ->
- throw({db_not_found, DbName})
- end
- catch throw:{unauthorized, _} ->
- throw({unauthorized, DbName})
- end.
-
-schedule_checkpoint(#state{checkpoint_scheduled = nil} = State) ->
- Server = self(),
- case timer:apply_after(5000, couch_rep, checkpoint, [Server]) of
- {ok, TRef} ->
- State#state{checkpoint_scheduled = TRef};
- Error ->
- ?LOG_ERROR("tried to schedule a checkpoint but got ~p", [Error]),
- State
- end;
-schedule_checkpoint(State) ->
- State.
-
-do_checkpoint(State) ->
- #state{
- source = Source,
- target = Target,
- committed_seq = NewSeqNum,
- start_seq = StartSeqNum,
- history = OldHistory,
- session_id = SessionId,
- source_log = SourceLog,
- target_log = TargetLog,
- rep_starttime = ReplicationStartTime,
- src_starttime = SrcInstanceStartTime,
- tgt_starttime = TgtInstanceStartTime,
- stats = Stats,
- rep_doc = {RepDoc}
- } = State,
- case commit_to_both(Source, Target, NewSeqNum) of
- {SrcInstanceStartTime, TgtInstanceStartTime} ->
- ?LOG_INFO("recording a checkpoint for ~s -> ~s at source update_seq ~p",
- [dbname(Source), dbname(Target), NewSeqNum]),
- EndTime = ?l2b(httpd_util:rfc1123_date()),
- StartTime = ?l2b(ReplicationStartTime),
- DocsRead = ets:lookup_element(Stats, docs_read, 2),
- DocsWritten = ets:lookup_element(Stats, docs_written, 2),
- DocWriteFailures = ets:lookup_element(Stats, doc_write_failures, 2),
- NewHistoryEntry = {[
- {<<"session_id">>, SessionId},
- {<<"start_time">>, StartTime},
- {<<"end_time">>, EndTime},
- {<<"start_last_seq">>, StartSeqNum},
- {<<"end_last_seq">>, NewSeqNum},
- {<<"recorded_seq">>, NewSeqNum},
- {<<"missing_checked">>, ets:lookup_element(Stats, total_revs, 2)},
- {<<"missing_found">>, ets:lookup_element(Stats, missing_revs, 2)},
- {<<"docs_read">>, DocsRead},
- {<<"docs_written">>, DocsWritten},
- {<<"doc_write_failures">>, DocWriteFailures}
- ]},
- BaseHistory = [
- {<<"session_id">>, SessionId},
- {<<"source_last_seq">>, NewSeqNum},
- {<<"replication_id_version">>, ?REP_ID_VERSION}
- ] ++ case couch_util:get_value(<<"doc_ids">>, RepDoc) of
- undefined ->
- [];
- DocIds when is_list(DocIds) ->
- % backwards compatibility with the result of a replication by
- % doc IDs in versions 0.11.x and 1.0.x
- [
- {<<"start_time">>, StartTime},
- {<<"end_time">>, EndTime},
- {<<"docs_read">>, DocsRead},
- {<<"docs_written">>, DocsWritten},
- {<<"doc_write_failures">>, DocWriteFailures}
- ]
- end,
- % limit history to 50 entries
- NewRepHistory = {
- BaseHistory ++
- [{<<"history">>, lists:sublist([NewHistoryEntry | OldHistory], 50)}]
- },
-
- try
- {SrcRevPos,SrcRevId} =
- update_local_doc(Source, SourceLog#doc{body=NewRepHistory}),
- {TgtRevPos,TgtRevId} =
- update_local_doc(Target, TargetLog#doc{body=NewRepHistory}),
- State#state{
- checkpoint_scheduled = nil,
- checkpoint_history = NewRepHistory,
- source_log = SourceLog#doc{revs={SrcRevPos, [SrcRevId]}},
- target_log = TargetLog#doc{revs={TgtRevPos, [TgtRevId]}}
- }
- catch throw:conflict ->
- ?LOG_ERROR("checkpoint failure: conflict (are you replicating to "
- "yourself?)", []),
- State
- end;
- _Else ->
- ?LOG_INFO("rebooting ~s -> ~s from last known replication checkpoint",
- [dbname(Source), dbname(Target)]),
- #state{
- changes_feed = CF,
- missing_revs = MR,
- reader = Reader,
- writer = Writer
- } = State,
- Pids = [Writer, Reader, MR, CF],
- [unlink(Pid) || Pid <- Pids],
- [exit(Pid, shutdown) || Pid <- Pids],
- close_db(Target),
- close_db(Source),
- {ok, NewState} = init(State#state.init_args),
- NewState#state{listeners=State#state.listeners}
- end.
-
-commit_to_both(Source, Target, RequiredSeq) ->
- % commit the src async
- ParentPid = self(),
- SrcCommitPid = spawn_link(fun() ->
- ParentPid ! {self(), ensure_full_commit(Source, RequiredSeq)} end),
-
- % commit tgt sync
- TargetStartTime = ensure_full_commit(Target),
-
- SourceStartTime =
- receive
- {SrcCommitPid, Timestamp} ->
- Timestamp;
- {'EXIT', SrcCommitPid, {http_request_failed, _}} ->
- exit(replication_link_failure)
- end,
- {SourceStartTime, TargetStartTime}.
-
-ensure_full_commit(#http_db{headers = Headers} = Target) ->
- Headers1 = [
- {"Content-Length", 0} |
- couch_util:proplist_apply_field(
- {"Content-Type", "application/json"}, Headers)
- ],
- Req = Target#http_db{
- resource = "_ensure_full_commit",
- method = post,
- headers = Headers1
- },
- {ResultProps} = couch_rep_httpc:request(Req),
- true = couch_util:get_value(<<"ok">>, ResultProps),
- couch_util:get_value(<<"instance_start_time">>, ResultProps);
-ensure_full_commit(Target) ->
- {ok, NewDb} = couch_db:open_int(Target#db.name, []),
- UpdateSeq = couch_db:get_update_seq(Target),
- CommitSeq = couch_db:get_committed_update_seq(NewDb),
- InstanceStartTime = NewDb#db.instance_start_time,
- couch_db:close(NewDb),
- if UpdateSeq > CommitSeq ->
- ?LOG_DEBUG("target needs a full commit: update ~p commit ~p",
- [UpdateSeq, CommitSeq]),
- {ok, DbStartTime} = couch_db:ensure_full_commit(Target),
- DbStartTime;
- true ->
- ?LOG_DEBUG("target doesn't need a full commit", []),
- InstanceStartTime
- end.
-
-ensure_full_commit(#http_db{headers = Headers} = Source, RequiredSeq) ->
- Headers1 = [
- {"Content-Length", 0} |
- couch_util:proplist_apply_field(
- {"Content-Type", "application/json"}, Headers)
- ],
- Req = Source#http_db{
- resource = "_ensure_full_commit",
- method = post,
- qs = [{seq, RequiredSeq}],
- headers = Headers1
- },
- {ResultProps} = couch_rep_httpc:request(Req),
- case couch_util:get_value(<<"ok">>, ResultProps) of
- true ->
- couch_util:get_value(<<"instance_start_time">>, ResultProps);
- undefined -> nil end;
-ensure_full_commit(Source, RequiredSeq) ->
- {ok, NewDb} = couch_db:open_int(Source#db.name, []),
- CommitSeq = couch_db:get_committed_update_seq(NewDb),
- InstanceStartTime = NewDb#db.instance_start_time,
- couch_db:close(NewDb),
- if RequiredSeq > CommitSeq ->
- ?LOG_DEBUG("source needs a full commit: required ~p committed ~p",
- [RequiredSeq, CommitSeq]),
- {ok, DbStartTime} = couch_db:ensure_full_commit(Source),
- DbStartTime;
- true ->
- ?LOG_DEBUG("source doesn't need a full commit", []),
- InstanceStartTime
- end.
-
-update_local_doc(#http_db{} = Db, Doc) ->
- Req = Db#http_db{
- resource = couch_util:encode_doc_id(Doc),
- method = put,
- body = couch_doc:to_json_obj(Doc, [attachments]),
- headers = [{"x-couch-full-commit", "false"} | Db#http_db.headers]
- },
- {ResponseMembers} = couch_rep_httpc:request(Req),
- Rev = couch_util:get_value(<<"rev">>, ResponseMembers),
- couch_doc:parse_rev(Rev);
-update_local_doc(Db, Doc) ->
- {ok, Result} = couch_db:update_doc(Db, Doc, [delay_commit]),
- Result.
-
-up_to_date(#http_db{}, _Seq) ->
- true;
-up_to_date(Source, Seq) ->
- {ok, NewDb} = couch_db:open_int(Source#db.name, []),
- T = NewDb#db.update_seq == Seq,
- couch_db:close(NewDb),
- T.
-
-parse_proxy_params(ProxyUrl) when is_binary(ProxyUrl) ->
- parse_proxy_params(?b2l(ProxyUrl));
-parse_proxy_params([]) ->
- [];
-parse_proxy_params(ProxyUrl) ->
- #url{
- host = Host,
- port = Port,
- username = User,
- password = Passwd
- } = ibrowse_lib:parse_url(ProxyUrl),
- [{proxy_host, Host}, {proxy_port, Port}] ++
- case is_list(User) andalso is_list(Passwd) of
- false ->
- [];
- true ->
- [{proxy_user, User}, {proxy_password, Passwd}]
- end.
-
-update_rep_doc({Props} = _RepDoc, KVs) ->
- case couch_util:get_value(<<"_id">>, Props) of
- undefined ->
- % replication triggered by POSTing to _replicate/
- ok;
- RepDocId ->
- % replication triggered by adding a Rep Doc to the replicator DB
- {ok, RepDb} = ensure_rep_db_exists(),
- case couch_db:open_doc(RepDb, RepDocId, []) of
- {ok, LatestRepDoc} ->
- update_rep_doc(RepDb, LatestRepDoc, KVs);
- _ ->
- ok
- end,
- couch_db:close(RepDb)
- end.
-
-update_rep_doc(RepDb, #doc{body = {RepDocBody}} = RepDoc, KVs) ->
- NewRepDocBody = lists:foldl(
- fun({<<"_replication_state">> = K, _V} = KV, Body) ->
- Body1 = lists:keystore(K, 1, Body, KV),
- {Mega, Secs, _} = erlang:now(),
- UnixTime = Mega * 1000000 + Secs,
- lists:keystore(
- <<"_replication_state_time">>, 1,
- Body1, {<<"_replication_state_time">>, UnixTime});
- ({K, _V} = KV, Body) ->
- lists:keystore(K, 1, Body, KV)
- end,
- RepDocBody,
- KVs
- ),
- % might not succeed - when the replication doc is deleted right
- % before this update (not an error)
- couch_db:update_doc(
- RepDb,
- RepDoc#doc{body = {NewRepDocBody}},
- []
- ).
-
-maybe_set_triggered({RepProps} = RepDoc, RepId) ->
- case couch_util:get_value(<<"_replication_state">>, RepProps) of
- <<"triggered">> ->
- ok;
- _ ->
- update_rep_doc(
- RepDoc,
- [
- {<<"_replication_state">>, <<"triggered">>},
- {<<"_replication_id">>, ?l2b(RepId)}
- ]
- )
- end.
-
-ensure_rep_db_exists() ->
- DbName = ?l2b(couch_config:get("replicator", "db", "_replicator")),
- Opts = [
- {user_ctx, #user_ctx{roles=[<<"_admin">>, <<"_replicator">>]}},
- sys_db
- ],
- case couch_db:open(DbName, Opts) of
- {ok, Db} ->
- Db;
- _Error ->
- {ok, Db} = couch_db:create(DbName, Opts)
- end,
- ok = ensure_rep_ddoc_exists(Db, <<"_design/_replicator">>),
- {ok, Db}.
-
-ensure_rep_ddoc_exists(RepDb, DDocID) ->
- case couch_db:open_doc(RepDb, DDocID, []) of
- {ok, _Doc} ->
- ok;
- _ ->
- DDoc = couch_doc:from_json_obj({[
- {<<"_id">>, DDocID},
- {<<"language">>, <<"javascript">>},
- {<<"validate_doc_update">>, ?REP_DB_DOC_VALIDATE_FUN}
- ]}),
- {ok, _Rev} = couch_db:update_doc(RepDb, DDoc, [])
- end,
- ok.
-
-source_db_update_notifier(#db{name = DbName}) ->
- Server = self(),
- {ok, Notifier} = couch_db_update_notifier:start_link(
- fun({compacted, DbName1}) when DbName1 =:= DbName ->
- ok = gen_server:cast(Server, reopen_source_db);
- (_) ->
- ok
- end),
- Notifier;
-source_db_update_notifier(_) ->
- nil.
-
-target_db_update_notifier(#db{name = DbName}) ->
- Server = self(),
- {ok, Notifier} = couch_db_update_notifier:start_link(
- fun({compacted, DbName1}) when DbName1 =:= DbName ->
- ok = gen_server:cast(Server, reopen_target_db);
- (_) ->
- ok
- end),
- Notifier;
-target_db_update_notifier(_) ->
- nil.
diff --git a/1.1.x/src/couchdb/couch_rep_att.erl b/1.1.x/src/couchdb/couch_rep_att.erl
deleted file mode 100644
index 6bb993a8..00000000
--- a/1.1.x/src/couchdb/couch_rep_att.erl
+++ /dev/null
@@ -1,119 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep_att).
-
--export([convert_stub/2, cleanup/0]).
-
--include("couch_db.hrl").
-
-convert_stub(#att{data=stub, name=Name} = Attachment,
- {#http_db{} = Db, Id, Rev}) ->
- {Pos, [RevId|_]} = Rev,
- Request = Db#http_db{
- resource = lists:flatten([couch_util:url_encode(Id), "/",
- couch_util:url_encode(Name)]),
- qs = [{rev, couch_doc:rev_to_str({Pos,RevId})}]
- },
- Ref = make_ref(),
- RcvFun = fun() -> attachment_receiver(Ref, Request) end,
- Attachment#att{data=RcvFun}.
-
-cleanup() ->
- receive
- {ibrowse_async_response, _, _} ->
- %% TODO maybe log, didn't expect to have data here
- cleanup();
- {ibrowse_async_response_end, _} ->
- cleanup();
- {ibrowse_async_headers, _, _, _} ->
- cleanup()
- after 0 ->
- erase(),
- ok
- end.
-
-% internal funs
-
-attachment_receiver(Ref, Request) ->
- try case get(Ref) of
- undefined ->
- {ReqId, ContentEncoding} = start_http_request(Request),
- put(Ref, {ReqId, ContentEncoding}),
- receive_data(Ref, ReqId, ContentEncoding);
- {ReqId, ContentEncoding} ->
- receive_data(Ref, ReqId, ContentEncoding)
- end
- catch
- throw:{attachment_request_failed, _} ->
- case {Request#http_db.retries, Request#http_db.pause} of
- {0, _} ->
- ?LOG_INFO("request for ~p failed", [Request#http_db.resource]),
- throw({attachment_request_failed, max_retries_reached});
- {N, Pause} when N > 0 ->
- ?LOG_INFO("request for ~p timed out, retrying in ~p seconds",
- [Request#http_db.resource, Pause/1000]),
- timer:sleep(Pause),
- cleanup(),
- attachment_receiver(Ref, Request#http_db{retries = N-1})
- end
- end.
-
-receive_data(Ref, ReqId, ContentEncoding) ->
- receive
- {ibrowse_async_response, ReqId, {chunk_start,_}} ->
- receive_data(Ref, ReqId, ContentEncoding);
- {ibrowse_async_response, ReqId, chunk_end} ->
- receive_data(Ref, ReqId, ContentEncoding);
- {ibrowse_async_response, ReqId, {error, Err}} ->
- ?LOG_ERROR("streaming attachment ~p failed with ~p", [ReqId, Err]),
- throw({attachment_request_failed, Err});
- {ibrowse_async_response, ReqId, Data} ->
- % ?LOG_DEBUG("got ~p bytes for ~p", [size(Data), ReqId]),
- Data;
- {ibrowse_async_response_end, ReqId} ->
- ?LOG_ERROR("streaming att. ended but more data requested ~p", [ReqId]),
- throw({attachment_request_failed, premature_end})
- after 31000 ->
- throw({attachment_request_failed, timeout})
- end.
-
-start_http_request(Req) ->
- %% set stream_to here because self() has changed
- Req2 = Req#http_db{options = [{stream_to,self()} | Req#http_db.options]},
- {ibrowse_req_id, ReqId} = couch_rep_httpc:request(Req2),
- receive {ibrowse_async_headers, ReqId, Code, Headers} ->
- case validate_headers(Req2, list_to_integer(Code), Headers) of
- {ok, ContentEncoding} ->
- {ReqId, ContentEncoding};
- {ok, ContentEncoding, NewReqId} ->
- {NewReqId, ContentEncoding}
- end
- after 10000 ->
- throw({attachment_request_failed, timeout})
- end.
-
-validate_headers(_Req, 200, Headers) ->
- MochiHeaders = mochiweb_headers:make(Headers),
- {ok, mochiweb_headers:get_value("Content-Encoding", MochiHeaders)};
-validate_headers(Req, Code, Headers) when Code > 299, Code < 400 ->
- NewReq = couch_rep_httpc:redirected_request(Code, Headers, Req),
- {ibrowse_req_id, ReqId} = couch_rep_httpc:request(NewReq),
- receive {ibrowse_async_headers, ReqId, NewCode, NewHeaders} ->
- {ok, Encoding} = validate_headers(NewReq, list_to_integer(NewCode),
- NewHeaders)
- end,
- {ok, Encoding, ReqId};
-validate_headers(Req, Code, _Headers) ->
- #http_db{url=Url, resource=Resource} = Req,
- ?LOG_ERROR("got ~p for ~s~s", [Code, Url, Resource]),
- throw({attachment_request_failed, {bad_code, Code}}).
diff --git a/1.1.x/src/couchdb/couch_rep_changes_feed.erl b/1.1.x/src/couchdb/couch_rep_changes_feed.erl
deleted file mode 100644
index 1c298937..00000000
--- a/1.1.x/src/couchdb/couch_rep_changes_feed.erl
+++ /dev/null
@@ -1,503 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep_changes_feed).
--behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
- code_change/3]).
-
--export([start_link/4, next/1, stop/1]).
-
--define(BUFFER_SIZE, 1000).
--define(DOC_IDS_FILTER_NAME, "_doc_ids").
-
--include("couch_db.hrl").
--include("../ibrowse/ibrowse.hrl").
-
--record (state, {
- changes_from = nil,
- changes_loop = nil,
- init_args,
- last_seq,
- conn = nil,
- reqid = nil,
- complete = false,
- count = 0,
- partial_chunk = <<>>,
- reply_to = nil,
- rows = queue:new(),
- doc_ids = nil
-}).
-
--import(couch_util, [
- get_value/2,
- get_value/3
-]).
-
-start_link(Parent, Source, StartSeq, PostProps) ->
- gen_server:start_link(?MODULE, [Parent, Source, StartSeq, PostProps], []).
-
-next(Server) ->
- gen_server:call(Server, next_changes, infinity).
-
-stop(Server) ->
- catch gen_server:call(Server, stop),
- ok.
-
-init([Parent, #http_db{headers = Headers0} = Source, Since, PostProps]) ->
- process_flag(trap_exit, true),
- Feed = case get_value(<<"continuous">>, PostProps, false) of
- false ->
- normal;
- true ->
- continuous
- end,
- BaseQS = [
- {"style", all_docs},
- {"heartbeat", 10000},
- {"since", Since},
- {"feed", Feed}
- ],
- {QS, Method, Body, Headers} = case get_value(<<"doc_ids">>, PostProps) of
- undefined ->
- {maybe_add_filter_qs_params(PostProps, BaseQS), get, nil, Headers0};
- DocIds when is_list(DocIds) ->
- Headers1 = [{"Content-Type", "application/json"} | Headers0],
- QS1 = [{"filter", ?l2b(?DOC_IDS_FILTER_NAME)} | BaseQS],
- {QS1, post, {[{<<"doc_ids">>, DocIds}]}, Headers1}
- end,
- Pid = couch_rep_httpc:spawn_link_worker_process(Source),
- Req = Source#http_db{
- method = Method,
- body = Body,
- resource = "_changes",
- qs = QS,
- conn = Pid,
- options = [{stream_to, {self(), once}}] ++
- lists:keydelete(inactivity_timeout, 1, Source#http_db.options),
- headers = Headers -- [{"Accept-Encoding", "gzip"}]
- },
- {ibrowse_req_id, ReqId} = couch_rep_httpc:request(Req),
- Args = [Parent, Req, Since, PostProps],
- State = #state{
- conn = Pid,
- last_seq = Since,
- reqid = ReqId,
- init_args = Args,
- doc_ids = get_value(<<"doc_ids">>, PostProps, nil)
- },
-
- receive
- {ibrowse_async_headers, ReqId, "200", _} ->
- ibrowse:stream_next(ReqId),
- {ok, State};
- {ibrowse_async_headers, ReqId, Code, Hdrs}
- when Code =:= "301"; Code =:= "302"; Code =:= "303" ->
- {ReqId2, Req2} = redirect_req(Req, Code, Hdrs),
- receive
- {ibrowse_async_headers, ReqId2, "200", _} ->
- {ok, State#state{
- conn = Req2#http_db.conn,
- reqid = ReqId2,
- init_args = [Parent, Req2, Since, PostProps]}};
- {ibrowse_async_headers, ReqId2, "405", _} when Method =:= post ->
- {ReqId3, Req3} = req_no_builtin_doc_ids(Req2, ReqId2),
- receive
- {ibrowse_async_headers, ReqId3, "200", _} ->
- {ok, State#state{
- conn = Req3#http_db.conn,
- reqid = ReqId3,
- init_args = [Parent, Req3, Since, PostProps]}}
- after 30000 ->
- {stop, changes_timeout}
- end
- after 30000 ->
- {stop, changes_timeout}
- end;
- {ibrowse_async_headers, ReqId, "404", _} ->
- stop_link_worker(Pid),
- ?LOG_INFO("source doesn't have _changes, trying _all_docs_by_seq", []),
- Self = self(),
- BySeqPid = spawn_link(fun() -> by_seq_loop(Self, Source, Since) end),
- {ok, State#state{changes_loop = BySeqPid}};
- {ibrowse_async_headers, ReqId, "405", _} when Method =:= post ->
- {ReqId2, Req2} = req_no_builtin_doc_ids(Req, ReqId),
- receive
- {ibrowse_async_headers, ReqId2, "200", _} ->
- {ok, State#state{
- conn = Req2#http_db.conn,
- reqid = ReqId2,
- init_args = [Parent, Req2, Since, PostProps]}};
- {ibrowse_async_headers, ReqId, Code, Hdrs}
- when Code =:= "301"; Code =:= "302"; Code =:= "303" ->
- {ReqId3, Req3} = redirect_req(Req2, Code, Hdrs),
- receive
- {ibrowse_async_headers, ReqId3, "200", _} ->
- {ok, State#state{
- conn = Req3#http_db.conn,
- reqid = ReqId3,
- init_args = [Parent, Req3, Since, PostProps]}}
- after 30000 ->
- {stop, changes_timeout}
- end
- after 30000 ->
- {stop, changes_timeout}
- end;
- {ibrowse_async_headers, ReqId, Code, _} ->
- {stop, {changes_error_code, list_to_integer(Code)}}
- after 10000 ->
- {stop, changes_timeout}
- end;
-
-init([_Parent, Source, Since, PostProps] = InitArgs) ->
- process_flag(trap_exit, true),
- Server = self(),
- Filter = case get_value(<<"doc_ids">>, PostProps) of
- undefined ->
- ?b2l(get_value(<<"filter">>, PostProps, <<>>));
- DocIds when is_list(DocIds) ->
- ?DOC_IDS_FILTER_NAME
- end,
- ChangesArgs = #changes_args{
- style = all_docs,
- since = Since,
- filter = Filter,
- feed = case get_value(<<"continuous">>, PostProps, false) of
- true ->
- "continuous";
- false ->
- "normal"
- end,
- timeout = infinity
- },
- ChangesPid = spawn_link(fun() ->
- ChangesFeedFun = couch_changes:handle_changes(
- ChangesArgs,
- {json_req, filter_json_req(Filter, Source, PostProps)},
- Source
- ),
- ChangesFeedFun(fun({change, Change, _}, _) ->
- gen_server:call(Server, {add_change, Change}, infinity);
- (_, _) ->
- ok
- end)
- end),
- {ok, #state{changes_loop=ChangesPid, init_args=InitArgs}}.
-
-maybe_add_filter_qs_params(PostProps, BaseQS) ->
- case get_value(<<"filter">>, PostProps) of
- undefined ->
- BaseQS;
- FilterName ->
- {Params} = get_value(<<"query_params">>, PostProps, {[]}),
- lists:foldr(
- fun({K, V}, QSAcc) ->
- Ks = couch_util:to_list(K),
- case proplists:is_defined(Ks, QSAcc) of
- true ->
- QSAcc;
- false ->
- [{Ks, V} | QSAcc]
- end
- end,
- [{"filter", FilterName} | BaseQS],
- Params
- )
- end.
-
-filter_json_req([], _Db, _PostProps) ->
- {[]};
-filter_json_req(?DOC_IDS_FILTER_NAME, _Db, PostProps) ->
- {[{<<"doc_ids">>, get_value(<<"doc_ids">>, PostProps)}]};
-filter_json_req(FilterName, Db, PostProps) ->
- {Query} = get_value(<<"query_params">>, PostProps, {[]}),
- {ok, Info} = couch_db:get_db_info(Db),
- % simulate a request to db_name/_changes
- {[
- {<<"info">>, {Info}},
- {<<"id">>, null},
- {<<"method">>, 'GET'},
- {<<"path">>, [couch_db:name(Db), <<"_changes">>]},
- {<<"query">>, {[{<<"filter">>, FilterName} | Query]}},
- {<<"headers">>, []},
- {<<"body">>, []},
- {<<"peer">>, <<"replicator">>},
- {<<"form">>, []},
- {<<"cookie">>, []},
- {<<"userCtx">>, couch_util:json_user_ctx(Db)}
- ]}.
-
-handle_call({add_change, Row}, From, State) ->
- handle_add_change(Row, From, State);
-
-handle_call(next_changes, From, State) ->
- handle_next_changes(From, State);
-
-handle_call(stop, _From, State) ->
- {stop, normal, ok, State}.
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info({ibrowse_async_headers, Id, Code, Hdrs}, #state{reqid=Id}=State) ->
- handle_headers(list_to_integer(Code), Hdrs, State);
-
-handle_info({ibrowse_async_response, Id, {error, sel_conn_closed}},
- #state{reqid=Id}=State) ->
- handle_retry(State);
-
-handle_info({ibrowse_async_response, Id, {error, connection_closed}},
- #state{reqid=Id}=State) ->
- handle_retry(State);
-
-handle_info({ibrowse_async_response, Id, {error,E}}, #state{reqid=Id}=State) ->
- {stop, {error, E}, State};
-
-handle_info({ibrowse_async_response, Id, Chunk}, #state{reqid=Id}=State) ->
- Messages = [M || M <- re:split(Chunk, ",?\n", [trim]), M =/= <<>>],
- handle_messages(Messages, State);
-
-handle_info({ibrowse_async_response_end, Id}, #state{reqid=Id} = State) ->
- handle_feed_completion(State);
-
-handle_info({'EXIT', From, normal}, #state{changes_loop=From} = State) ->
- handle_feed_completion(State);
-
-handle_info({'EXIT', From, normal}, #state{conn=From, complete=true} = State) ->
- {noreply, State};
-
-handle_info({'EXIT', From, Reason}, #state{changes_loop=From} = State) ->
- ?LOG_ERROR("changes_loop died with reason ~p", [Reason]),
- {stop, changes_loop_died, State};
-
-handle_info({'EXIT', From, Reason}, State) ->
- ?LOG_ERROR("changes loop, process ~p died with reason ~p", [From, Reason]),
- {stop, {From, Reason}, State};
-
-handle_info(Msg, #state{init_args = InitArgs} = State) ->
- case Msg of
- changes_timeout ->
- [_, #http_db{url = Url} | _] = InitArgs,
- ?LOG_ERROR("changes loop timeout, no data received from ~s",
- [couch_util:url_strip_password(Url)]);
- _ ->
- ?LOG_ERROR("changes loop received unexpected message ~p", [Msg])
- end,
- {stop, Msg, State}.
-
-terminate(_Reason, State) ->
- #state{
- changes_loop = ChangesPid,
- conn = Conn
- } = State,
- if is_pid(ChangesPid) -> exit(ChangesPid, stop); true -> ok end,
- stop_link_worker(Conn).
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%internal funs
-
-handle_add_change(Row, From, #state{reply_to=nil} = State) ->
- {Rows2, Count2} = queue_changes_row(Row, State),
- NewState = State#state{count = Count2, rows = Rows2},
- if Count2 =< ?BUFFER_SIZE ->
- {reply, ok, NewState};
- true ->
- {noreply, NewState#state{changes_from=From}}
- end;
-handle_add_change(Row, _From, #state{count=0} = State) ->
- gen_server:reply(State#state.reply_to, [Row]),
- {reply, ok, State#state{reply_to=nil}}.
-
-handle_next_changes(From, #state{count=0}=State) ->
- if State#state.complete ->
- {stop, normal, complete, State};
- true ->
- {noreply, State#state{reply_to=From}}
- end;
-handle_next_changes(_From, State) ->
- #state{
- changes_from = ChangesFrom,
- rows = Rows
- } = State,
- NewState = State#state{count=0, changes_from=nil, rows=queue:new()},
- maybe_stream_next(NewState),
- if ChangesFrom =/= nil -> gen_server:reply(ChangesFrom, ok); true -> ok end,
- {reply, queue:to_list(Rows), NewState}.
-
-handle_headers(200, _, State) ->
- maybe_stream_next(State),
- {noreply, State};
-handle_headers(Code, Hdrs, #state{init_args = InitArgs} = State)
- when Code =:= 301 ; Code =:= 302 ; Code =:= 303 ->
- stop_link_worker(State#state.conn),
- [Parent, Source, Since, PostProps] = InitArgs,
- Source2 = couch_rep_httpc:redirected_request(Code, Hdrs, Source),
- Pid2 = couch_rep_httpc:spawn_link_worker_process(Source2),
- Source3 = Source2#http_db{conn = Pid2},
- {ibrowse_req_id, ReqId} = couch_rep_httpc:request(Source3),
- InitArgs2 = [Parent, Source3, Since, PostProps],
- {noreply, State#state{conn=Pid2, reqid=ReqId, init_args=InitArgs2}};
-handle_headers(Code, Hdrs, State) ->
- ?LOG_ERROR("replicator changes feed failed with code ~s and Headers ~n~p",
- [Code,Hdrs]),
- {stop, {error, Code}, State}.
-
-handle_messages([], State) ->
- maybe_stream_next(State),
- {noreply, State};
-handle_messages([<<"{\"results\":[">>|Rest], State) ->
- handle_messages(Rest, State);
-handle_messages([<<"]">>, <<"\"last_seq\":", _/binary>>], State) ->
- handle_feed_completion(State);
-handle_messages([<<"{\"last_seq\":", _/binary>>], State) ->
- handle_feed_completion(State);
-handle_messages([Chunk|Rest], #state{partial_chunk = Partial} = State) ->
- NewState = try
- Row = {Props} = decode_row(<<Partial/binary, Chunk/binary>>),
- case State of
- #state{reply_to=nil} ->
- {Rows2, Count2} = queue_changes_row(Row, State),
- State#state{
- last_seq = couch_util:get_value(<<"seq">>, Props),
- partial_chunk = <<>>,
- rows = Rows2,
- count = Count2
- };
- #state{count=0, reply_to=From}->
- gen_server:reply(From, [Row]),
- State#state{reply_to = nil, partial_chunk = <<>>}
- end
- catch
- throw:{invalid_json, Bad} ->
- State#state{partial_chunk = Bad}
- end,
- handle_messages(Rest, NewState).
-
-handle_feed_completion(#state{reply_to=nil} = State)->
- {noreply, State#state{complete=true}};
-handle_feed_completion(#state{count=0} = State) ->
- gen_server:reply(State#state.reply_to, complete),
- {stop, normal, State}.
-
-handle_retry(State) ->
- ?LOG_DEBUG("retrying changes feed because our connection closed", []),
- #state{
- count = Count,
- init_args = [_, Source, _, PostProps],
- last_seq = Since,
- reply_to = ReplyTo,
- rows = Rows
- } = State,
- case init([nil, Source, Since, PostProps]) of
- {ok, State1} ->
- MergedState = State1#state{
- count = Count,
- reply_to = ReplyTo,
- rows = Rows
- },
- {noreply, MergedState};
- _ ->
- {stop, {error, connection_closed}, State}
- end.
-
-by_seq_loop(Server, Source, StartSeq) ->
- Req = Source#http_db{
- resource = "_all_docs_by_seq",
- qs = [{limit, 1000}, {startkey, StartSeq}]
- },
- {Results} = couch_rep_httpc:request(Req),
- Rows = couch_util:get_value(<<"rows">>, Results),
- if Rows =:= [] -> exit(normal); true -> ok end,
- EndSeq = lists:foldl(fun({RowInfoList}, _) ->
- Id = couch_util:get_value(<<"id">>, RowInfoList),
- Seq = couch_util:get_value(<<"key">>, RowInfoList),
- {RowProps} = couch_util:get_value(<<"value">>, RowInfoList),
- RawRevs = [
- couch_util:get_value(<<"rev">>, RowProps),
- couch_util:get_value(<<"conflicts">>, RowProps, []),
- couch_util:get_value(<<"deleted_conflicts">>, RowProps, [])
- ],
- ParsedRevs = couch_doc:parse_revs(lists:flatten(RawRevs)),
- Change = {[
- {<<"seq">>, Seq},
- {<<"id">>, Id},
- {<<"changes">>, [{[{<<"rev">>,R}]} || R <- ParsedRevs]}
- ]},
- gen_server:call(Server, {add_change, Change}, infinity),
- Seq
- end, 0, Rows),
- by_seq_loop(Server, Source, EndSeq).
-
-decode_row(<<",", Rest/binary>>) ->
- decode_row(Rest);
-decode_row(Row) ->
- ?JSON_DECODE(Row).
-
-maybe_stream_next(#state{reqid=nil}) ->
- ok;
-maybe_stream_next(#state{complete=false, count=N} = S) when N < ?BUFFER_SIZE ->
- timer:cancel(get(timeout)),
- {ok, Timeout} = timer:send_after(31000, changes_timeout),
- put(timeout, Timeout),
- ibrowse:stream_next(S#state.reqid);
-maybe_stream_next(_) ->
- timer:cancel(get(timeout)).
-
-stop_link_worker(Conn) when is_pid(Conn) ->
- unlink(Conn),
- receive {'EXIT', Conn, _} -> ok after 0 -> ok end,
- catch ibrowse:stop_worker_process(Conn);
-stop_link_worker(_) ->
- ok.
-
-redirect_req(#http_db{conn = WorkerPid} = Req, Code, Headers) ->
- stop_link_worker(WorkerPid),
- Req2 = couch_rep_httpc:redirected_request(Code, Headers, Req),
- WorkerPid2 = couch_rep_httpc:spawn_link_worker_process(Req2),
- Req3 = Req2#http_db{conn = WorkerPid2},
- {ibrowse_req_id, ReqId} = couch_rep_httpc:request(Req3),
- {ReqId, Req3}.
-
-req_no_builtin_doc_ids(#http_db{conn = WorkerPid, qs = QS} = Req, ReqId) ->
- % CouchDB versions prior to 1.1.0 don't have the builtin filter _doc_ids
- % and don't allow POSTing to /database/_changes
- purge_req_messages(ReqId),
- stop_link_worker(WorkerPid),
- Req2 = Req#http_db{method = get, qs = lists:keydelete("filter", 1, QS)},
- WorkerPid2 = couch_rep_httpc:spawn_link_worker_process(Req2),
- Req3 = Req2#http_db{conn = WorkerPid2},
- {ibrowse_req_id, ReqId2} = couch_rep_httpc:request(Req3),
- {ReqId2, Req3}.
-
-purge_req_messages(ReqId) ->
- ibrowse:stream_next(ReqId),
- receive
- {ibrowse_async_response, ReqId, {error, _}} ->
- ok;
- {ibrowse_async_response, ReqId, _Data} ->
- purge_req_messages(ReqId);
- {ibrowse_async_response_end, ReqId} ->
- ok
- end.
-
-queue_changes_row(Row, #state{doc_ids = nil, count = Count, rows = Rows}) ->
- {queue:in(Row, Rows), Count + 1};
-queue_changes_row({RowProps} = Row,
- #state{doc_ids = Ids, count = Count, rows = Rows}) ->
- case lists:member(get_value(<<"id">>, RowProps), Ids) of
- true ->
- {queue:in(Row, Rows), Count + 1};
- false ->
- {Rows, Count}
- end.
diff --git a/1.1.x/src/couchdb/couch_rep_httpc.erl b/1.1.x/src/couchdb/couch_rep_httpc.erl
deleted file mode 100644
index bbe390a9..00000000
--- a/1.1.x/src/couchdb/couch_rep_httpc.erl
+++ /dev/null
@@ -1,317 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep_httpc).
--include("couch_db.hrl").
--include("../ibrowse/ibrowse.hrl").
-
--export([db_exists/1, db_exists/2]).
--export([full_url/1, request/1, redirected_request/3]).
--export([spawn_worker_process/1, spawn_link_worker_process/1]).
--export([ssl_options/1]).
-
-request(#http_db{} = Req) ->
- do_request(Req).
-
-do_request(#http_db{url=Url} = Req) when is_binary(Url) ->
- do_request(Req#http_db{url = ?b2l(Url)});
-
-do_request(Req) ->
- #http_db{
- auth = Auth,
- body = B,
- conn = Conn,
- headers = Headers0,
- method = Method,
- options = Opts,
- qs = QS
- } = Req,
- Url = full_url(Req),
- Headers = case couch_util:get_value(<<"oauth">>, Auth) of
- undefined ->
- Headers0;
- {OAuthProps} ->
- [oauth_header(Url, QS, Method, OAuthProps) | Headers0]
- end,
- Body = case B of
- {Fun, InitialState} when is_function(Fun) ->
- {Fun, InitialState};
- nil ->
- [];
- _Else ->
- iolist_to_binary(?JSON_ENCODE(B))
- end,
- Resp = case Conn of
- nil ->
- ibrowse:send_req(Url, Headers, Method, Body, Opts, infinity);
- _ ->
- ibrowse:send_req_direct(Conn, Url, Headers, Method, Body, Opts, infinity)
- end,
- process_response(Resp, Req).
-
-db_exists(Req) ->
- db_exists(Req, Req#http_db.url).
-
-db_exists(Req, true) ->
- db_exists(Req, Req#http_db.url, true);
-
-db_exists(Req, false) ->
- db_exists(Req, Req#http_db.url, false);
-
-db_exists(Req, CanonicalUrl) ->
- db_exists(Req, CanonicalUrl, false).
-
-db_exists(Req, CanonicalUrl, CreateDB) ->
- #http_db{
- auth = Auth,
- headers = Headers0,
- options = Options,
- url = Url
- } = Req,
- HeadersFun = fun(Method) ->
- case couch_util:get_value(<<"oauth">>, Auth) of
- undefined ->
- Headers0;
- {OAuthProps} ->
- [oauth_header(Url, [], Method, OAuthProps) | Headers0]
- end
- end,
- case CreateDB of
- true ->
- Headers = [{"Content-Length", 0} | HeadersFun(put)],
- catch ibrowse:send_req(Url, Headers, put, [], Options);
- _Else -> ok
- end,
- case catch ibrowse:send_req(Url, HeadersFun(head), head, [], Options) of
- {ok, "200", _, _} ->
- config_http(CanonicalUrl),
- Req#http_db{url = CanonicalUrl};
- {ok, "301", RespHeaders, _} ->
- RedirectUrl = redirect_url(RespHeaders, Req#http_db.url),
- db_exists(Req#http_db{url = RedirectUrl}, RedirectUrl);
- {ok, "302", RespHeaders, _} ->
- RedirectUrl = redirect_url(RespHeaders, Req#http_db.url),
- db_exists(Req#http_db{url = RedirectUrl}, CanonicalUrl);
- {ok, "303", RespHeaders, _} ->
- RedirectUrl = redirect_url(RespHeaders, Req#http_db.url),
- db_exists(Req#http_db{method = get, url = RedirectUrl}, CanonicalUrl);
- {ok, "401", _, _} ->
- throw({unauthorized, ?l2b(Url)});
- Error ->
- ?LOG_DEBUG("DB at ~s could not be found because ~p", [Url, Error]),
- throw({db_not_found, ?l2b(Url)})
- end.
-
-config_http(Url) ->
- #url{host = Host, port = Port} = ibrowse_lib:parse_url(Url),
- ok = ibrowse:set_max_sessions(Host, Port, list_to_integer(
- couch_config:get("replicator", "max_http_sessions", "20"))),
- ok = ibrowse:set_max_pipeline_size(Host, Port, list_to_integer(
- couch_config:get("replicator", "max_http_pipeline_size", "50"))),
- ok = couch_config:register(
- fun("replicator", "max_http_sessions", MaxSessions) ->
- ibrowse:set_max_sessions(Host, Port, list_to_integer(MaxSessions));
- ("replicator", "max_http_pipeline_size", PipeSize) ->
- ibrowse:set_max_pipeline_size(Host, Port, list_to_integer(PipeSize))
- end).
-
-redirect_url(RespHeaders, OrigUrl) ->
- MochiHeaders = mochiweb_headers:make(RespHeaders),
- RedUrl = mochiweb_headers:get_value("Location", MochiHeaders),
- #url{
- host = Host, host_type = HostType, port = Port,
- path = Path, protocol = Proto
- } = ibrowse_lib:parse_url(RedUrl),
- #url{username = User, password = Passwd} = ibrowse_lib:parse_url(OrigUrl),
- Creds = case is_list(User) andalso is_list(Passwd) of
- true ->
- User ++ ":" ++ Passwd ++ "@";
- false ->
- []
- end,
- HostPart = case HostType of
- ipv6_address ->
- "[" ++ Host ++ "]";
- _ ->
- Host
- end,
- atom_to_list(Proto) ++ "://" ++ Creds ++ HostPart ++ ":" ++
- integer_to_list(Port) ++ Path.
-
-full_url(#http_db{url=Url} = Req) when is_binary(Url) ->
- full_url(Req#http_db{url = ?b2l(Url)});
-
-full_url(#http_db{qs=[]} = Req) ->
- Req#http_db.url ++ Req#http_db.resource;
-
-full_url(Req) ->
- #http_db{
- url = Url,
- resource = Resource,
- qs = QS
- } = Req,
- QStr = lists:map(fun({K,V}) -> io_lib:format("~s=~s",
- [couch_util:to_list(K), couch_util:to_list(V)]) end, QS),
- lists:flatten([Url, Resource, "?", string:join(QStr, "&")]).
-
-process_response({ok, Status, Headers, Body}, Req) ->
- Code = list_to_integer(Status),
- if Code =:= 200; Code =:= 201 ->
- ?JSON_DECODE(maybe_decompress(Headers, Body));
- Code =:= 301; Code =:= 302 ; Code =:= 303 ->
- do_request(redirected_request(Code, Headers, Req));
- Code =:= 409 ->
- throw(conflict);
- Code >= 400, Code < 500 ->
- ?JSON_DECODE(maybe_decompress(Headers, Body));
- Code =:= 500; Code =:= 502; Code =:= 503 ->
- #http_db{pause = Pause, retries = Retries} = Req,
- ?LOG_INFO("retrying couch_rep_httpc request in ~p seconds " ++
- % "due to remote server error: ~s~s", [Pause/1000, Req#http_db.url,
- "due to remote server error: ~p Body ~s", [Pause/1000, Code,
- Body]),
- timer:sleep(Pause),
- do_request(Req#http_db{retries = Retries-1, pause = 2*Pause});
- true ->
- exit({http_request_failed, ?l2b(["unhandled response code ", Status])})
- end;
-
-process_response({ibrowse_req_id, Id}, _Req) ->
- {ibrowse_req_id, Id};
-
-process_response({error, _Reason}, #http_db{url=Url, retries=0}) ->
- ?LOG_ERROR("couch_rep_httpc request failed after 10 retries: ~s", [Url]),
- exit({http_request_failed, ?l2b(["failed to replicate ", Url])});
-process_response({error, Reason}, Req) ->
- #http_db{
- method = Method,
- retries = Retries,
- pause = Pause
- } = Req,
- ShortReason = case Reason of
- sel_conn_closed ->
- connection_closed;
- {'EXIT', {noproc, _}} ->
- noproc;
- {'EXIT', {normal, _}} ->
- normal;
- Else ->
- Else
- end,
- ?LOG_DEBUG("retrying couch_rep_httpc ~p request in ~p seconds due to " ++
- "{error, ~p}", [Method, Pause/1000, ShortReason]),
- timer:sleep(Pause),
- if Reason == worker_is_dead ->
- C = spawn_link_worker_process(Req),
- do_request(Req#http_db{retries = Retries-1, pause = 2*Pause, conn=C});
- true ->
- do_request(Req#http_db{retries = Retries-1, pause = 2*Pause})
- end.
-
-redirected_request(Code, Headers, Req) ->
- RedirectUrl = redirect_url(Headers, Req#http_db.url),
- {Base, QStr, _} = mochiweb_util:urlsplit_path(RedirectUrl),
- QS = mochiweb_util:parse_qs(QStr),
- ReqHeaders = case couch_util:get_value(<<"oauth">>, Req#http_db.auth) of
- undefined ->
- Req#http_db.headers;
- _Else ->
- lists:keydelete("Authorization", 1, Req#http_db.headers)
- end,
- Req#http_db{
- method = case couch_util:to_integer(Code) of
- 303 -> get;
- _ -> Req#http_db.method
- end,
- url = Base,
- resource = "",
- qs = QS,
- headers = ReqHeaders
- }.
-
-spawn_worker_process(Req) ->
- Url = ibrowse_lib:parse_url(Req#http_db.url),
- {ok, Pid} = ibrowse_http_client:start(Url),
- Pid.
-
-spawn_link_worker_process(Req) ->
- {ok, Pid} = ibrowse:spawn_link_worker_process(Req#http_db.url),
- Pid.
-
-maybe_decompress(Headers, Body) ->
- MochiHeaders = mochiweb_headers:make(Headers),
- case mochiweb_headers:get_value("Content-Encoding", MochiHeaders) of
- "gzip" ->
- zlib:gunzip(Body);
- _ ->
- Body
- end.
-
-oauth_header(Url, QS, Action, Props) ->
- % erlang-oauth doesn't like iolists
- QSL = [{couch_util:to_list(K), ?b2l(?l2b(couch_util:to_list(V)))} ||
- {K,V} <- QS],
- ConsumerKey = ?b2l(couch_util:get_value(<<"consumer_key">>, Props)),
- Token = ?b2l(couch_util:get_value(<<"token">>, Props)),
- TokenSecret = ?b2l(couch_util:get_value(<<"token_secret">>, Props)),
- ConsumerSecret = ?b2l(couch_util:get_value(<<"consumer_secret">>, Props)),
- SignatureMethodStr = ?b2l(couch_util:get_value(<<"signature_method">>, Props, <<"HMAC-SHA1">>)),
- SignatureMethodAtom = case SignatureMethodStr of
- "PLAINTEXT" ->
- plaintext;
- "HMAC-SHA1" ->
- hmac_sha1;
- "RSA-SHA1" ->
- rsa_sha1
- end,
- Consumer = {ConsumerKey, ConsumerSecret, SignatureMethodAtom},
- Method = case Action of
- get -> "GET";
- post -> "POST";
- put -> "PUT";
- head -> "HEAD"
- end,
- Params = oauth:signed_params(Method, Url, QSL, Consumer, Token, TokenSecret)
- -- QSL,
- {"Authorization", "OAuth " ++ oauth_uri:params_to_header_string(Params)}.
-
-ssl_options(#http_db{url = Url}) ->
- case ibrowse_lib:parse_url(Url) of
- #url{protocol = https} ->
- Depth = list_to_integer(
- couch_config:get("replicator", "ssl_certificate_max_depth", "3")
- ),
- SslOpts = [{depth, Depth} |
- case couch_config:get("replicator", "verify_ssl_certificates") of
- "true" ->
- ssl_verify_options(true);
- _ ->
- ssl_verify_options(false)
- end],
- [{is_ssl, true}, {ssl_options, SslOpts}];
- #url{protocol = http} ->
- []
- end.
-
-ssl_verify_options(Value) ->
- ssl_verify_options(Value, erlang:system_info(otp_release)).
-
-ssl_verify_options(true, OTPVersion) when OTPVersion >= "R14" ->
- CAFile = couch_config:get("replicator", "ssl_trusted_certificates_file"),
- [{verify, verify_peer}, {cacertfile, CAFile}];
-ssl_verify_options(false, OTPVersion) when OTPVersion >= "R14" ->
- [{verify, verify_none}];
-ssl_verify_options(true, _OTPVersion) ->
- CAFile = couch_config:get("replicator", "ssl_trusted_certificates_file"),
- [{verify, 2}, {cacertfile, CAFile}];
-ssl_verify_options(false, _OTPVersion) ->
- [{verify, 0}].
diff --git a/1.1.x/src/couchdb/couch_rep_missing_revs.erl b/1.1.x/src/couchdb/couch_rep_missing_revs.erl
deleted file mode 100644
index 9809ca5e..00000000
--- a/1.1.x/src/couchdb/couch_rep_missing_revs.erl
+++ /dev/null
@@ -1,198 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep_missing_revs).
--behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
- code_change/3]).
-
--export([start_link/4, next/1, stop/1]).
-
--define(BUFFER_SIZE, 1000).
-
--include("couch_db.hrl").
-
--record (state, {
- changes_loop,
- changes_from = nil,
- parent,
- complete = false,
- count = 0,
- reply_to = nil,
- rows = queue:new(),
- high_source_seq = 0,
- high_missing_seq = 0,
- high_committed_seq = 0
-}).
-
-start_link(Parent, Target, ChangesFeed, PostProps) ->
- gen_server:start_link(?MODULE, [Parent, Target, ChangesFeed, PostProps], []).
-
-next(Server) ->
- gen_server:call(Server, next_missing_revs, infinity).
-
-stop(Server) ->
- gen_server:call(Server, stop).
-
-init([Parent, _Target, ChangesFeed, _PostProps]) ->
- process_flag(trap_exit, true),
- Self = self(),
- Pid = spawn_link(fun() -> changes_loop(Self, ChangesFeed, Parent) end),
- {ok, #state{changes_loop=Pid, parent=Parent}}.
-
-handle_call({add_missing_revs, {HighSeq, Revs}}, From, State) ->
- State#state.parent ! {update_stats, missing_revs, length(Revs)},
- handle_add_missing_revs(HighSeq, Revs, From, State);
-
-handle_call(next_missing_revs, From, State) ->
- handle_next_missing_revs(From, State).
-
-handle_cast({update_committed_seq, N}, State) ->
- if State#state.high_committed_seq < N ->
- ?LOG_DEBUG("missing_revs updating committed seq to ~p", [N]);
- true -> ok end,
- {noreply, State#state{high_committed_seq=N}}.
-
-handle_info({'EXIT', Pid, Reason}, #state{changes_loop=Pid} = State) ->
- handle_changes_loop_exit(Reason, State);
-
-handle_info(Msg, State) ->
- ?LOG_INFO("unexpected message ~p", [Msg]),
- {noreply, State}.
-
-terminate(_Reason, #state{changes_loop=Pid}) when is_pid(Pid) ->
- exit(Pid, shutdown),
- ok;
-terminate(_Reason, _State) ->
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%internal funs
-
-handle_add_missing_revs(HighSeq, [], _From, State) ->
- NewState = State#state{high_source_seq=HighSeq},
- maybe_checkpoint(NewState),
- {reply, ok, NewState};
-handle_add_missing_revs(HighSeq, Revs, From, #state{reply_to=nil} = State) ->
- #state{rows=Rows, count=Count} = State,
- NewState = State#state{
- rows = queue:join(Rows, queue:from_list(Revs)),
- count = Count + length(Revs),
- high_source_seq = HighSeq,
- high_missing_seq = HighSeq
- },
- if NewState#state.count < ?BUFFER_SIZE ->
- {reply, ok, NewState};
- true ->
- {noreply, NewState#state{changes_from=From}}
- end;
-handle_add_missing_revs(HighSeq, Revs, _From, #state{count=0} = State) ->
- gen_server:reply(State#state.reply_to, {HighSeq, Revs}),
- NewState = State#state{
- high_source_seq = HighSeq,
- high_missing_seq = HighSeq,
- reply_to = nil
- },
- {reply, ok, NewState}.
-
-handle_next_missing_revs(From, #state{count=0} = State) ->
- if State#state.complete ->
- {stop, normal, complete, State};
- true ->
- {noreply, State#state{reply_to=From}}
- end;
-handle_next_missing_revs(_From, State) ->
- #state{
- changes_from = ChangesFrom,
- high_missing_seq = HighSeq,
- rows = Rows
- } = State,
- if ChangesFrom =/= nil -> gen_server:reply(ChangesFrom, ok); true -> ok end,
- NewState = State#state{count=0, changes_from=nil, rows=queue:new()},
- {reply, {HighSeq, queue:to_list(Rows)}, NewState}.
-
-handle_changes_loop_exit(normal, State) ->
- if State#state.reply_to =/= nil ->
- gen_server:reply(State#state.reply_to, complete),
- {stop, normal, State};
- true ->
- {noreply, State#state{complete=true, changes_loop=nil}}
- end;
-handle_changes_loop_exit(Reason, State) ->
- {stop, Reason, State#state{changes_loop=nil}}.
-
-changes_loop(OurServer, SourceChangesServer, Parent) ->
- case couch_rep_changes_feed:next(SourceChangesServer) of
- complete ->
- exit(normal);
- Changes ->
- {ok, Target} = gen_server:call(Parent, get_target_db, infinity),
- MissingRevs = get_missing_revs(Target, Changes),
- gen_server:call(OurServer, {add_missing_revs, MissingRevs}, infinity)
- end,
- changes_loop(OurServer, SourceChangesServer, Parent).
-
-get_missing_revs(#http_db{}=Target, Changes) ->
- Transform = fun({Props}) ->
- C = couch_util:get_value(<<"changes">>, Props),
- Id = couch_util:get_value(<<"id">>, Props),
- {Id, [R || {[{<<"rev">>, R}]} <- C]}
- end,
- IdRevsList = [Transform(Change) || Change <- Changes],
- SeqDict = changes_dictionary(Changes),
- {LastProps} = lists:last(Changes),
- HighSeq = couch_util:get_value(<<"seq">>, LastProps),
- Request = Target#http_db{
- resource = "_missing_revs",
- method = post,
- body = {IdRevsList}
- },
- {Resp} = couch_rep_httpc:request(Request),
- case couch_util:get_value(<<"missing_revs">>, Resp) of
- {MissingRevs} ->
- X = [{Id, dict:fetch(Id, SeqDict), couch_doc:parse_revs(RevStrs)} ||
- {Id,RevStrs} <- MissingRevs],
- {HighSeq, X};
- _ ->
- exit({target_error, couch_util:get_value(<<"error">>, Resp)})
- end;
-
-get_missing_revs(Target, Changes) ->
- Transform = fun({Props}) ->
- C = couch_util:get_value(<<"changes">>, Props),
- Id = couch_util:get_value(<<"id">>, Props),
- {Id, [couch_doc:parse_rev(R) || {[{<<"rev">>, R}]} <- C]}
- end,
- IdRevsList = [Transform(Change) || Change <- Changes],
- SeqDict = changes_dictionary(Changes),
- {LastProps} = lists:last(Changes),
- HighSeq = couch_util:get_value(<<"seq">>, LastProps),
- {ok, Results} = couch_db:get_missing_revs(Target, IdRevsList),
- {HighSeq, [{Id, dict:fetch(Id, SeqDict), Revs} || {Id, Revs, _} <- Results]}.
-
-changes_dictionary(ChangeList) ->
- KVs = [{couch_util:get_value(<<"id">>,C), couch_util:get_value(<<"seq">>,C)}
- || {C} <- ChangeList],
- dict:from_list(KVs).
-
-%% save a checkpoint if no revs are missing on target so we don't
-%% rescan metadata unnecessarily
-maybe_checkpoint(#state{high_missing_seq=N, high_committed_seq=N} = State) ->
- #state{
- parent = Parent,
- high_source_seq = SourceSeq
- } = State,
- Parent ! {missing_revs_checkpoint, SourceSeq};
-maybe_checkpoint(_State) ->
- ok.
diff --git a/1.1.x/src/couchdb/couch_rep_reader.erl b/1.1.x/src/couchdb/couch_rep_reader.erl
deleted file mode 100644
index 0d344e5c..00000000
--- a/1.1.x/src/couchdb/couch_rep_reader.erl
+++ /dev/null
@@ -1,283 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep_reader).
--behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
- code_change/3]).
-
--export([start_link/4, next/1]).
-
--import(couch_util, [encode_doc_id/1]).
-
--define (BUFFER_SIZE, 1000).
--define (MAX_CONCURRENT_REQUESTS, 100).
-
--include("couch_db.hrl").
-
--record (state, {
- parent,
- source,
- missing_revs,
- reader_loop,
- reader_from = [],
- count = 0,
- docs = queue:new(),
- reply_to = nil,
- complete = false,
- monitor_count = 0,
- pending_doc_request = nil,
- requested_seqs = [],
- opened_seqs = []
-}).
-
-start_link(Parent, Source, MissingRevs, PostProps) ->
- gen_server:start_link(?MODULE, [Parent, Source, MissingRevs, PostProps], []).
-
-next(Pid) ->
- gen_server:call(Pid, next_docs, infinity).
-
-init([Parent, Source, MissingRevs, _PostProps]) ->
- process_flag(trap_exit, true),
- Self = self(),
- ReaderLoop = spawn_link(
- fun() -> reader_loop(Self, Parent, Source, MissingRevs) end),
- State = #state{
- parent = Parent,
- source = Source,
- missing_revs = MissingRevs,
- reader_loop = ReaderLoop
- },
- {ok, State}.
-
-handle_call({add_docs, Seq, Docs}, From, State) ->
- State#state.parent ! {update_stats, docs_read, length(Docs)},
- handle_add_docs(Seq, lists:flatten(Docs), From, State);
-
-handle_call({add_request_seqs, Seqs}, _From, State) ->
- SeqList = State#state.requested_seqs,
- {reply, ok, State#state{requested_seqs = lists:merge(Seqs, SeqList)}};
-
-handle_call(next_docs, From, State) ->
- handle_next_docs(From, State);
-
-handle_call({open_remote_doc, Id, Seq, Revs}, From, State) ->
- handle_open_remote_doc(Id, Seq, Revs, From, State).
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info({'DOWN', _, _, _, Reason}, State) ->
- handle_monitor_down(Reason, State);
-
-handle_info({'EXIT', Loop, complete}, #state{reader_loop=Loop} = State) ->
- handle_reader_loop_complete(State).
-
-terminate(_Reason, _State) ->
- % ?LOG_INFO("rep reader terminating with reason ~p", [_Reason]),
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%internal funs
-
-handle_add_docs(_Seq, [], _From, State) ->
- {reply, ok, State};
-handle_add_docs(Seq, DocsToAdd, From, #state{reply_to=nil} = State) ->
- State1 = update_sequence_lists(Seq, State),
- NewState = State1#state{
- docs = queue:join(State1#state.docs, queue:from_list(DocsToAdd)),
- count = State1#state.count + length(DocsToAdd)
- },
- if NewState#state.count < ?BUFFER_SIZE ->
- {reply, ok, NewState};
- true ->
- {noreply, NewState#state{reader_from=[From|State#state.reader_from]}}
- end;
-handle_add_docs(Seq, DocsToAdd, _From, #state{count=0} = State) ->
- NewState = update_sequence_lists(Seq, State),
- HighSeq = calculate_new_high_seq(NewState),
- gen_server:reply(State#state.reply_to, {HighSeq, DocsToAdd}),
- {reply, ok, NewState#state{reply_to=nil}}.
-
-handle_next_docs(From, #state{count=0} = State) ->
- if State#state.complete ->
- {stop, normal, {complete, calculate_new_high_seq(State)}, State};
- true ->
- {noreply, State#state{reply_to=From}}
- end;
-handle_next_docs(_From, State) ->
- #state{
- reader_from = ReaderFrom,
- docs = Docs
- } = State,
- [gen_server:reply(F, ok) || F <- ReaderFrom],
- NewState = State#state{count=0, reader_from=[], docs=queue:new()},
- {reply, {calculate_new_high_seq(State), queue:to_list(Docs)}, NewState}.
-
-handle_open_remote_doc(Id, Seq, Revs, From, #state{monitor_count=N} = State)
- when N > ?MAX_CONCURRENT_REQUESTS ->
- {noreply, State#state{pending_doc_request={From,Id,Seq,Revs}}};
-handle_open_remote_doc(Id, Seq, Revs, _, #state{source=#http_db{}} = State) ->
- #state{
- monitor_count = Count,
- source = Source
- } = State,
- {_, _Ref} = spawn_document_request(Source, Id, Seq, Revs),
- {reply, ok, State#state{monitor_count = Count+1}}.
-
-handle_monitor_down(normal, #state{pending_doc_request=nil, reply_to=nil,
- monitor_count=1, complete=waiting_on_monitors} = State) ->
- {noreply, State#state{complete=true, monitor_count=0}};
-handle_monitor_down(normal, #state{pending_doc_request=nil, reply_to=From,
- monitor_count=1, complete=waiting_on_monitors} = State) ->
- gen_server:reply(From, {complete, calculate_new_high_seq(State)}),
- {stop, normal, State#state{complete=true, monitor_count=0}};
-handle_monitor_down(normal, #state{pending_doc_request=nil} = State) ->
- #state{monitor_count = Count} = State,
- {noreply, State#state{monitor_count = Count-1}};
-handle_monitor_down(normal, State) ->
- #state{
- source = Source,
- pending_doc_request = {From, Id, Seq, Revs}
- } = State,
- gen_server:reply(From, ok),
- {_, _NewRef} = spawn_document_request(Source, Id, Seq, Revs),
- {noreply, State#state{pending_doc_request=nil}};
-handle_monitor_down(Reason, State) ->
- {stop, Reason, State}.
-
-handle_reader_loop_complete(#state{reply_to=nil, monitor_count=0} = State) ->
- {noreply, State#state{complete = true}};
-handle_reader_loop_complete(#state{monitor_count=0} = State) ->
- HighSeq = calculate_new_high_seq(State),
- gen_server:reply(State#state.reply_to, {complete, HighSeq}),
- {stop, normal, State};
-handle_reader_loop_complete(State) ->
- {noreply, State#state{complete = waiting_on_monitors}}.
-
-calculate_new_high_seq(#state{requested_seqs=[], opened_seqs=[Open|_]}) ->
- Open;
-calculate_new_high_seq(#state{requested_seqs=[Req|_], opened_seqs=[Open|_]})
- when Req < Open ->
- 0;
-calculate_new_high_seq(#state{opened_seqs=[]}) ->
- 0;
-calculate_new_high_seq(State) ->
- hd(State#state.opened_seqs).
-
-split_revlist(Rev, {[CurrentAcc|Rest], BaseLength, Length}) ->
- case Length+size(Rev)+3 > 8192 of
- false ->
- {[[Rev|CurrentAcc] | Rest], BaseLength, Length+size(Rev)+3};
- true ->
- {[[Rev],CurrentAcc|Rest], BaseLength, BaseLength}
- end.
-
-% We store outstanding requested sequences and a subset of already opened
-% sequences in 2 ordered lists. The subset of opened seqs is a) the largest
-% opened seq smaller than the smallest outstanding request seq plus b) all the
-% opened seqs greater than the smallest outstanding request. I believe its the
-% minimal set of info needed to correctly calculate which seqs have been
-% replicated (because remote docs can be opened out-of-order) -- APK
-update_sequence_lists(Seq, State) ->
- Requested = lists:delete(Seq, State#state.requested_seqs),
- AllOpened = lists:merge([Seq], State#state.opened_seqs),
- Opened = case Requested of
- [] ->
- [lists:last(AllOpened)];
- [EarliestReq|_] ->
- case lists:splitwith(fun(X) -> X < EarliestReq end, AllOpened) of
- {[], Greater} ->
- Greater;
- {Less, Greater} ->
- [lists:last(Less) | Greater]
- end
- end,
- State#state{
- requested_seqs = Requested,
- opened_seqs = Opened
- }.
-
-open_doc_revs(#http_db{url = Url} = DbS, DocId, Revs) ->
- %% all this logic just splits up revision lists that are too long for
- %% MochiWeb into multiple requests
- BaseQS = [{revs,true}, {latest,true}, {att_encoding_info,true}],
- BaseReq = DbS#http_db{resource=encode_doc_id(DocId), qs=BaseQS},
- BaseLength = length(couch_rep_httpc:full_url(BaseReq) ++ "&open_revs=[]"),
-
- {RevLists, _, _} = lists:foldl(fun split_revlist/2,
- {[[]], BaseLength, BaseLength}, couch_doc:revs_to_strs(Revs)),
-
- Requests = [BaseReq#http_db{
- qs = [{open_revs, ?JSON_ENCODE(RevList)} | BaseQS]
- } || RevList <- RevLists],
- JsonResults = lists:flatten([couch_rep_httpc:request(R) || R <- Requests]),
-
- Transform =
- fun({[{<<"ok">>, Json}]}, Acc) ->
- #doc{id=Id, revs=Rev, atts=Atts} = Doc = couch_doc:from_json_obj(Json),
- Doc1 = Doc#doc{
- atts=[couch_rep_att:convert_stub(A, {DbS,Id,Rev}) || A <- Atts]
- },
- [Doc1 | Acc];
- ({ErrorProps}, Acc) ->
- Err = couch_util:get_value(<<"error">>, ErrorProps,
- ?JSON_ENCODE({ErrorProps})),
- ?LOG_ERROR("Replicator: error accessing doc ~s at ~s, reason: ~s",
- [DocId, couch_util:url_strip_password(Url), Err]),
- Acc
- end,
- lists:reverse(lists:foldl(Transform, [], JsonResults)).
-
-reader_loop(ReaderServer, Parent, Source, MissingRevsServer) ->
- case couch_rep_missing_revs:next(MissingRevsServer) of
- complete ->
- exit(complete);
- {HighSeq, IdsRevs} ->
- % to be safe, make sure Results are sorted by source_seq
- SortedIdsRevs = lists:keysort(2, IdsRevs),
- RequestSeqs = [S || {_,S,_} <- SortedIdsRevs],
- gen_server:call(ReaderServer, {add_request_seqs, RequestSeqs}, infinity),
- case Source of
- #http_db{} ->
- [gen_server:call(ReaderServer, {open_remote_doc, Id, Seq, Revs},
- infinity) || {Id,Seq,Revs} <- SortedIdsRevs],
- reader_loop(ReaderServer, Parent, Source, MissingRevsServer);
- _Local ->
- {ok, Source1} = gen_server:call(Parent, get_source_db, infinity),
- Source2 = maybe_reopen_db(Source1, HighSeq),
- lists:foreach(fun({Id,Seq,Revs}) ->
- {ok, Docs} = couch_db:open_doc_revs(Source2, Id, Revs, [latest]),
- JustTheDocs = [Doc || {ok, Doc} <- Docs],
- gen_server:call(ReaderServer, {add_docs, Seq, JustTheDocs},
- infinity)
- end, SortedIdsRevs),
- couch_db:close(Source2),
- reader_loop(ReaderServer, Parent, Source2, MissingRevsServer)
- end
- end.
-
-maybe_reopen_db(#db{update_seq=OldSeq} = Db, HighSeq) when HighSeq > OldSeq ->
- {ok, NewDb} = couch_db:open(Db#db.name, [{user_ctx, Db#db.user_ctx}]),
- NewDb;
-maybe_reopen_db(Db, _HighSeq) ->
- Db.
-
-spawn_document_request(Source, Id, Seq, Revs) ->
- Server = self(),
- SpawnFun = fun() ->
- Results = open_doc_revs(Source, Id, Revs),
- gen_server:call(Server, {add_docs, Seq, Results}, infinity)
- end,
- spawn_monitor(SpawnFun).
diff --git a/1.1.x/src/couchdb/couch_rep_sup.erl b/1.1.x/src/couchdb/couch_rep_sup.erl
deleted file mode 100644
index 1318c598..00000000
--- a/1.1.x/src/couchdb/couch_rep_sup.erl
+++ /dev/null
@@ -1,31 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep_sup).
--behaviour(supervisor).
--export([init/1, start_link/0]).
-
--include("couch_db.hrl").
-
-start_link() ->
- supervisor:start_link({local,?MODULE}, ?MODULE, []).
-
-%%=============================================================================
-%% supervisor callbacks
-%%=============================================================================
-
-init([]) ->
- {ok, {{one_for_one, 3, 10}, []}}.
-
-%%=============================================================================
-%% internal functions
-%%=============================================================================
diff --git a/1.1.x/src/couchdb/couch_rep_writer.erl b/1.1.x/src/couchdb/couch_rep_writer.erl
deleted file mode 100644
index 12d6dec5..00000000
--- a/1.1.x/src/couchdb/couch_rep_writer.erl
+++ /dev/null
@@ -1,165 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_rep_writer).
-
--export([start_link/4]).
-
--include("couch_db.hrl").
-
-start_link(Parent, _Target, Reader, _PostProps) ->
- {ok, spawn_link(fun() -> writer_loop(Parent, Reader) end)}.
-
-writer_loop(Parent, Reader) ->
- case couch_rep_reader:next(Reader) of
- {complete, FinalSeq} ->
- Parent ! {writer_checkpoint, FinalSeq},
- ok;
- {HighSeq, Docs} ->
- DocCount = length(Docs),
- {ok, Target} = gen_server:call(Parent, get_target_db, infinity),
- try write_docs(Target, Docs) of
- {ok, []} ->
- Parent ! {update_stats, docs_written, DocCount};
- {ok, Errors} ->
- ErrorCount = length(Errors),
- Parent ! {update_stats, doc_write_failures, ErrorCount},
- Parent ! {update_stats, docs_written, DocCount - ErrorCount}
- catch
- {attachment_request_failed, Err} ->
- ?LOG_DEBUG("writer failed to write an attachment ~p", [Err]),
- exit({attachment_request_failed, Err, Docs})
- end,
- Parent ! {writer_checkpoint, HighSeq},
- couch_rep_att:cleanup(),
- couch_util:should_flush(),
- writer_loop(Parent, Reader)
- end.
-
-write_docs(#http_db{} = Db, Docs) ->
- {DocsAtts, DocsNoAtts} = lists:partition(
- fun(#doc{atts=[]}) -> false; (_) -> true end,
- Docs
- ),
- ErrorsJson0 = write_bulk_docs(Db, DocsNoAtts),
- ErrorsJson = lists:foldl(
- fun(Doc, Acc) -> write_multi_part_doc(Db, Doc) ++ Acc end,
- ErrorsJson0,
- DocsAtts
- ),
- {ok, ErrorsJson};
-write_docs(Db, Docs) ->
- couch_db:update_docs(Db, Docs, [delay_commit], replicated_changes).
-
-write_bulk_docs(_Db, []) ->
- [];
-write_bulk_docs(#http_db{headers = Headers} = Db, Docs) ->
- JsonDocs = [
- couch_doc:to_json_obj(Doc, [revs, att_gzip_length]) || Doc <- Docs
- ],
- Request = Db#http_db{
- resource = "_bulk_docs",
- method = post,
- body = {[{new_edits, false}, {docs, JsonDocs}]},
- headers = couch_util:proplist_apply_field({"Content-Type", "application/json"}, [{"X-Couch-Full-Commit", "false"} | Headers])
- },
- ErrorsJson = case couch_rep_httpc:request(Request) of
- {FailProps} ->
- exit({target_error, couch_util:get_value(<<"error">>, FailProps)});
- List when is_list(List) ->
- List
- end,
- [write_docs_1(V) || V <- ErrorsJson].
-
-write_multi_part_doc(#http_db{headers=Headers} = Db, #doc{atts=Atts} = Doc) ->
- JsonBytes = ?JSON_ENCODE(
- couch_doc:to_json_obj(
- Doc,
- [follows, att_encoding_info, attachments]
- )
- ),
- Boundary = couch_uuids:random(),
- {ContentType, Len} = couch_doc:len_doc_to_multi_part_stream(
- Boundary, JsonBytes, Atts, true
- ),
- StreamerPid = spawn_link(
- fun() -> streamer_fun(Boundary, JsonBytes, Atts) end
- ),
- BodyFun = fun(Acc) ->
- DataQueue = case Acc of
- nil ->
- StreamerPid ! {start, self()},
- receive
- {queue, Q} ->
- Q
- end;
- Queue ->
- Queue
- end,
- case couch_work_queue:dequeue(DataQueue) of
- closed ->
- eof;
- {ok, Data} ->
- {ok, iolist_to_binary(Data), DataQueue}
- end
- end,
- Request = Db#http_db{
- resource = couch_util:encode_doc_id(Doc),
- method = put,
- qs = [{new_edits, false}],
- body = {BodyFun, nil},
- headers = [
- {"x-couch-full-commit", "false"},
- {"Content-Type", ?b2l(ContentType)},
- {"Content-Length", Len} | Headers
- ]
- },
- Result = case couch_rep_httpc:request(Request) of
- {[{<<"error">>, Error}, {<<"reason">>, Reason}]} ->
- {Pos, [RevId | _]} = Doc#doc.revs,
- ErrId = couch_util:to_existing_atom(Error),
- [{Doc#doc.id, couch_doc:rev_to_str({Pos, RevId})}, {ErrId, Reason}];
- _ ->
- []
- end,
- StreamerPid ! stop,
- Result.
-
-streamer_fun(Boundary, JsonBytes, Atts) ->
- receive
- stop ->
- ok;
- {start, From} ->
- % better use a brand new queue, to ensure there's no garbage from
- % a previous (failed) iteration
- {ok, DataQueue} = couch_work_queue:new(
- [{max_size, 1024 * 1024}, {max_items, 1000}]),
- From ! {queue, DataQueue},
- couch_doc:doc_to_multi_part_stream(
- Boundary,
- JsonBytes,
- Atts,
- fun(Data) ->
- couch_work_queue:queue(DataQueue, Data)
- end,
- true
- ),
- couch_work_queue:close(DataQueue),
- streamer_fun(Boundary, JsonBytes, Atts)
- end.
-
-write_docs_1({Props}) ->
- Id = couch_util:get_value(<<"id">>, Props),
- Rev = couch_doc:parse_rev(couch_util:get_value(<<"rev">>, Props)),
- ErrId = couch_util:to_existing_atom(couch_util:get_value(<<"error">>, Props)),
- Reason = couch_util:get_value(<<"reason">>, Props),
- {{Id, Rev}, {ErrId, Reason}}.
diff --git a/1.1.x/src/couchdb/couch_replication_manager.erl b/1.1.x/src/couchdb/couch_replication_manager.erl
deleted file mode 100644
index 6101c9c5..00000000
--- a/1.1.x/src/couchdb/couch_replication_manager.erl
+++ /dev/null
@@ -1,383 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_replication_manager).
--behaviour(gen_server).
-
--export([start_link/0, init/1, handle_call/3, handle_info/2, handle_cast/2]).
--export([code_change/3, terminate/2]).
-
--include("couch_db.hrl").
-
--define(DOC_ID_TO_REP_ID, rep_doc_id_to_rep_id).
--define(REP_ID_TO_DOC_ID, rep_id_to_rep_doc_id).
--define(INITIAL_WAIT, 5).
-
--record(state, {
- changes_feed_loop = nil,
- db_notifier = nil,
- rep_db_name = nil,
- rep_start_pids = [],
- max_retries
-}).
-
--import(couch_util, [
- get_value/2,
- get_value/3
-]).
-
-
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-init(_) ->
- process_flag(trap_exit, true),
- _ = ets:new(?DOC_ID_TO_REP_ID, [named_table, set, protected]),
- _ = ets:new(?REP_ID_TO_DOC_ID, [named_table, set, private]),
- Server = self(),
- ok = couch_config:register(
- fun("replicator", "db", NewName) ->
- ok = gen_server:cast(Server, {rep_db_changed, ?l2b(NewName)});
- ("replicator", "max_replication_retry_count", NewMaxRetries1) ->
- NewMaxRetries = list_to_integer(NewMaxRetries1),
- ok = gen_server:cast(Server, {set_max_retries, NewMaxRetries})
- end
- ),
- {Loop, RepDbName} = changes_feed_loop(),
- {ok, #state{
- changes_feed_loop = Loop,
- rep_db_name = RepDbName,
- db_notifier = db_update_notifier(),
- max_retries = list_to_integer(
- couch_config:get("replicator", "max_replication_retry_count", "10"))
- }}.
-
-
-handle_call({rep_db_update, Change}, _From, State) ->
- {reply, ok, process_update(State, Change)};
-
-handle_call({triggered, {BaseId, _}}, _From, State) ->
- [{BaseId, {DocId, true}}] = ets:lookup(?REP_ID_TO_DOC_ID, BaseId),
- true = ets:insert(?REP_ID_TO_DOC_ID, {BaseId, {DocId, false}}),
- {reply, ok, State};
-
-handle_call({restart_failure, {Props} = RepDoc, Error}, _From, State) ->
- DocId = get_value(<<"_id">>, Props),
- [{DocId, {{BaseId, _} = RepId, MaxRetries}}] = ets:lookup(
- ?DOC_ID_TO_REP_ID, DocId),
- ?LOG_ERROR("Failed to start replication `~s` after ~p attempts using "
- "the document `~s`. Last error reason was: ~p",
- [pp_rep_id(RepId), MaxRetries, DocId, Error]),
- couch_rep:update_rep_doc(
- RepDoc,
- [{<<"_replication_state">>, <<"error">>},
- {<<"_replication_id">>, ?l2b(BaseId)}]),
- true = ets:delete(?REP_ID_TO_DOC_ID, BaseId),
- true = ets:delete(?DOC_ID_TO_REP_ID, DocId),
- {reply, ok, State};
-
-handle_call(Msg, From, State) ->
- ?LOG_ERROR("Replication manager received unexpected call ~p from ~p",
- [Msg, From]),
- {stop, {error, {unexpected_call, Msg}}, State}.
-
-
-handle_cast({rep_db_changed, NewName}, #state{rep_db_name = NewName} = State) ->
- {noreply, State};
-
-handle_cast({rep_db_changed, _NewName}, State) ->
- {noreply, restart(State)};
-
-handle_cast({rep_db_created, NewName}, #state{rep_db_name = NewName} = State) ->
- {noreply, State};
-
-handle_cast({rep_db_created, _NewName}, State) ->
- {noreply, restart(State)};
-
-handle_cast({set_max_retries, MaxRetries}, State) ->
- {noreply, State#state{max_retries = MaxRetries}};
-
-handle_cast(Msg, State) ->
- ?LOG_ERROR("Replication manager received unexpected cast ~p", [Msg]),
- {stop, {error, {unexpected_cast, Msg}}, State}.
-
-
-handle_info({'EXIT', From, normal}, #state{changes_feed_loop = From} = State) ->
- % replicator DB deleted
- {noreply, State#state{changes_feed_loop = nil, rep_db_name = nil}};
-
-handle_info({'EXIT', From, Reason}, #state{db_notifier = From} = State) ->
- ?LOG_ERROR("Database update notifier died. Reason: ~p", [Reason]),
- {stop, {db_update_notifier_died, Reason}, State};
-
-handle_info({'EXIT', From, normal}, #state{rep_start_pids = Pids} = State) ->
- % one of the replication start processes terminated successfully
- {noreply, State#state{rep_start_pids = Pids -- [From]}};
-
-handle_info(Msg, State) ->
- ?LOG_ERROR("Replication manager received unexpected message ~p", [Msg]),
- {stop, {unexpected_msg, Msg}, State}.
-
-
-terminate(_Reason, State) ->
- #state{
- rep_start_pids = StartPids,
- changes_feed_loop = Loop,
- db_notifier = Notifier
- } = State,
- stop_all_replications(),
- lists:foreach(
- fun(Pid) ->
- catch unlink(Pid),
- catch exit(Pid, stop)
- end,
- [Loop | StartPids]),
- true = ets:delete(?REP_ID_TO_DOC_ID),
- true = ets:delete(?DOC_ID_TO_REP_ID),
- couch_db_update_notifier:stop(Notifier).
-
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
-changes_feed_loop() ->
- {ok, RepDb} = couch_rep:ensure_rep_db_exists(),
- Server = self(),
- Pid = spawn_link(
- fun() ->
- ChangesFeedFun = couch_changes:handle_changes(
- #changes_args{
- include_docs = true,
- feed = "continuous",
- timeout = infinity,
- db_open_options = [sys_db]
- },
- {json_req, null},
- RepDb
- ),
- ChangesFeedFun(
- fun({change, Change, _}, _) ->
- case has_valid_rep_id(Change) of
- true ->
- ok = gen_server:call(
- Server, {rep_db_update, Change}, infinity);
- false ->
- ok
- end;
- (_, _) ->
- ok
- end
- )
- end
- ),
- couch_db:close(RepDb),
- {Pid, couch_db:name(RepDb)}.
-
-
-has_valid_rep_id({Change}) ->
- has_valid_rep_id(get_value(<<"id">>, Change));
-has_valid_rep_id(<<?DESIGN_DOC_PREFIX, _Rest/binary>>) ->
- false;
-has_valid_rep_id(_Else) ->
- true.
-
-
-db_update_notifier() ->
- Server = self(),
- {ok, Notifier} = couch_db_update_notifier:start_link(
- fun({created, DbName}) ->
- case ?l2b(couch_config:get("replicator", "db", "_replicator")) of
- DbName ->
- ok = gen_server:cast(Server, {rep_db_created, DbName});
- _ ->
- ok
- end;
- (_) ->
- % no need to handle the 'deleted' event - the changes feed loop
- % dies when the database is deleted
- ok
- end
- ),
- Notifier.
-
-
-restart(#state{changes_feed_loop = Loop, rep_start_pids = StartPids} = State) ->
- stop_all_replications(),
- lists:foreach(
- fun(Pid) ->
- catch unlink(Pid),
- catch exit(Pid, rep_db_changed)
- end,
- [Loop | StartPids]),
- {NewLoop, NewRepDbName} = changes_feed_loop(),
- State#state{
- changes_feed_loop = NewLoop,
- rep_db_name = NewRepDbName,
- rep_start_pids = []
- }.
-
-
-process_update(State, {Change}) ->
- {RepProps} = JsonRepDoc = get_value(doc, Change),
- DocId = get_value(<<"_id">>, RepProps),
- case get_value(<<"deleted">>, Change, false) of
- true ->
- rep_doc_deleted(DocId),
- State;
- false ->
- case get_value(<<"_replication_state">>, RepProps) of
- <<"completed">> ->
- replication_complete(DocId),
- State;
- <<"error">> ->
- stop_replication(DocId),
- State;
- <<"triggered">> ->
- maybe_start_replication(State, DocId, JsonRepDoc);
- undefined ->
- maybe_start_replication(State, DocId, JsonRepDoc)
- end
- end.
-
-
-rep_user_ctx({RepDoc}) ->
- case get_value(<<"user_ctx">>, RepDoc) of
- undefined ->
- #user_ctx{roles = [<<"_admin">>]};
- {UserCtx} ->
- #user_ctx{
- name = get_value(<<"name">>, UserCtx, null),
- roles = get_value(<<"roles">>, UserCtx, [])
- }
- end.
-
-
-maybe_start_replication(#state{max_retries = MaxRetries} = State,
- DocId, JsonRepDoc) ->
- UserCtx = rep_user_ctx(JsonRepDoc),
- {BaseId, _} = RepId = couch_rep:make_replication_id(JsonRepDoc, UserCtx),
- case ets:lookup(?REP_ID_TO_DOC_ID, BaseId) of
- [] ->
- true = ets:insert(?REP_ID_TO_DOC_ID, {BaseId, {DocId, true}}),
- true = ets:insert(?DOC_ID_TO_REP_ID, {DocId, {RepId, MaxRetries}}),
- Server = self(),
- Pid = spawn_link(fun() ->
- start_replication(Server, JsonRepDoc, RepId, UserCtx, MaxRetries)
- end),
- State#state{rep_start_pids = [Pid | State#state.rep_start_pids]};
- [{BaseId, {DocId, _}}] ->
- State;
- [{BaseId, {OtherDocId, false}}] ->
- ?LOG_INFO("The replication specified by the document `~s` was already"
- " triggered by the document `~s`", [DocId, OtherDocId]),
- maybe_tag_rep_doc(JsonRepDoc, ?l2b(BaseId)),
- State;
- [{BaseId, {OtherDocId, true}}] ->
- ?LOG_INFO("The replication specified by the document `~s` is already"
- " being triggered by the document `~s`", [DocId, OtherDocId]),
- maybe_tag_rep_doc(JsonRepDoc, ?l2b(BaseId)),
- State
- end.
-
-
-maybe_tag_rep_doc({Props} = JsonRepDoc, RepId) ->
- case get_value(<<"_replication_id">>, Props) of
- RepId ->
- ok;
- _ ->
- couch_rep:update_rep_doc(JsonRepDoc, [{<<"_replication_id">>, RepId}])
- end.
-
-
-start_replication(Server, {RepProps} = RepDoc, RepId, UserCtx, MaxRetries) ->
- case (catch couch_rep:start_replication(RepDoc, RepId, UserCtx)) of
- Pid when is_pid(Pid) ->
- ?LOG_INFO("Document `~s` triggered replication `~s`",
- [get_value(<<"_id">>, RepProps), pp_rep_id(RepId)]),
- ok = gen_server:call(Server, {triggered, RepId}, infinity),
- couch_rep:get_result(Pid, RepId, RepDoc, UserCtx);
- Error ->
- keep_retrying(
- Server, RepId, RepDoc, UserCtx, Error, ?INITIAL_WAIT, MaxRetries)
- end.
-
-
-keep_retrying(Server, _RepId, RepDoc, _UserCtx, Error, _Wait, 0) ->
- ok = gen_server:call(Server, {restart_failure, RepDoc, Error}, infinity);
-
-keep_retrying(Server, RepId, RepDoc, UserCtx, Error, Wait, RetriesLeft) ->
- {RepProps} = RepDoc,
- DocId = get_value(<<"_id">>, RepProps),
- ?LOG_ERROR("Error starting replication `~s` (document `~s`): ~p. "
- "Retrying in ~p seconds", [pp_rep_id(RepId), DocId, Error, Wait]),
- ok = timer:sleep(Wait * 1000),
- case (catch couch_rep:start_replication(RepDoc, RepId, UserCtx)) of
- Pid when is_pid(Pid) ->
- ok = gen_server:call(Server, {triggered, RepId}, infinity),
- [{DocId, {RepId, MaxRetries}}] = ets:lookup(?DOC_ID_TO_REP_ID, DocId),
- ?LOG_INFO("Document `~s` triggered replication `~s` after ~p attempts",
- [DocId, pp_rep_id(RepId), MaxRetries - RetriesLeft + 1]),
- couch_rep:get_result(Pid, RepId, RepDoc, UserCtx);
- NewError ->
- keep_retrying(
- Server, RepId, RepDoc, UserCtx, NewError, Wait * 2, RetriesLeft - 1)
- end.
-
-
-rep_doc_deleted(DocId) ->
- case stop_replication(DocId) of
- {ok, RepId} ->
- ?LOG_INFO("Stopped replication `~s` because replication document `~s`"
- " was deleted", [pp_rep_id(RepId), DocId]);
- none ->
- ok
- end.
-
-
-replication_complete(DocId) ->
- case stop_replication(DocId) of
- {ok, RepId} ->
- ?LOG_INFO("Replication `~s` finished (triggered by document `~s`)",
- [pp_rep_id(RepId), DocId]);
- none ->
- ok
- end.
-
-
-stop_replication(DocId) ->
- case ets:lookup(?DOC_ID_TO_REP_ID, DocId) of
- [{DocId, {{BaseId, _} = RepId, _MaxRetries}}] ->
- couch_rep:end_replication(RepId),
- true = ets:delete(?REP_ID_TO_DOC_ID, BaseId),
- true = ets:delete(?DOC_ID_TO_REP_ID, DocId),
- {ok, RepId};
- [] ->
- none
- end.
-
-
-stop_all_replications() ->
- ?LOG_INFO("Stopping all ongoing replications because the replicator"
- " database was deleted or changed", []),
- ets:foldl(
- fun({_, {RepId, _}}, _) ->
- couch_rep:end_replication(RepId)
- end,
- ok, ?DOC_ID_TO_REP_ID),
- true = ets:delete_all_objects(?REP_ID_TO_DOC_ID),
- true = ets:delete_all_objects(?DOC_ID_TO_REP_ID).
-
-
-% pretty-print replication id
-pp_rep_id({Base, Extension}) ->
- Base ++ Extension.
diff --git a/1.1.x/src/couchdb/couch_server.erl b/1.1.x/src/couchdb/couch_server.erl
deleted file mode 100644
index 7870d69e..00000000
--- a/1.1.x/src/couchdb/couch_server.erl
+++ /dev/null
@@ -1,405 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_server).
--behaviour(gen_server).
-
--export([open/2,create/2,delete/2,all_databases/0,get_version/0]).
--export([init/1, handle_call/3,sup_start_link/0]).
--export([handle_cast/2,code_change/3,handle_info/2,terminate/2]).
--export([dev_start/0,is_admin/2,has_admins/0,get_stats/0]).
-
--include("couch_db.hrl").
-
--record(server,{
- root_dir = [],
- dbname_regexp,
- max_dbs_open=100,
- dbs_open=0,
- start_time=""
- }).
-
-dev_start() ->
- couch:stop(),
- up_to_date = make:all([load, debug_info]),
- couch:start().
-
-get_version() ->
- Apps = application:loaded_applications(),
- case lists:keysearch(couch, 1, Apps) of
- {value, {_, _, Vsn}} ->
- Vsn;
- false ->
- "0.0.0"
- end.
-
-get_stats() ->
- {ok, #server{start_time=Time,dbs_open=Open}} =
- gen_server:call(couch_server, get_server),
- [{start_time, ?l2b(Time)}, {dbs_open, Open}].
-
-sup_start_link() ->
- gen_server:start_link({local, couch_server}, couch_server, [], []).
-
-open(DbName, Options) ->
- case gen_server:call(couch_server, {open, DbName, Options}, infinity) of
- {ok, Db} ->
- Ctx = couch_util:get_value(user_ctx, Options, #user_ctx{}),
- {ok, Db#db{user_ctx=Ctx}};
- Error ->
- Error
- end.
-
-create(DbName, Options) ->
- case gen_server:call(couch_server, {create, DbName, Options}, infinity) of
- {ok, Db} ->
- Ctx = couch_util:get_value(user_ctx, Options, #user_ctx{}),
- {ok, Db#db{user_ctx=Ctx}};
- Error ->
- Error
- end.
-
-delete(DbName, Options) ->
- gen_server:call(couch_server, {delete, DbName, Options}, infinity).
-
-check_dbname(#server{dbname_regexp=RegExp}, DbName) ->
- case re:run(DbName, RegExp, [{capture, none}]) of
- nomatch ->
- case DbName of
- "_users" -> ok;
- "_replicator" -> ok;
- _Else ->
- {error, illegal_database_name}
- end;
- match ->
- ok
- end.
-
-is_admin(User, ClearPwd) ->
- case couch_config:get("admins", User) of
- "-hashed-" ++ HashedPwdAndSalt ->
- [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","),
- couch_util:to_hex(crypto:sha(ClearPwd ++ Salt)) == HashedPwd;
- _Else ->
- false
- end.
-
-has_admins() ->
- couch_config:get("admins") /= [].
-
-get_full_filename(Server, DbName) ->
- filename:join([Server#server.root_dir, "./" ++ DbName ++ ".couch"]).
-
-hash_admin_passwords() ->
- hash_admin_passwords(true).
-
-hash_admin_passwords(Persist) ->
- lists:foreach(
- fun({_User, "-hashed-" ++ _}) ->
- ok; % already hashed
- ({User, ClearPassword}) ->
- Salt = ?b2l(couch_uuids:random()),
- Hashed = couch_util:to_hex(crypto:sha(ClearPassword ++ Salt)),
- couch_config:set("admins",
- User, "-hashed-" ++ Hashed ++ "," ++ Salt, Persist)
- end, couch_config:get("admins")).
-
-init([]) ->
- % read config and register for configuration changes
-
- % just stop if one of the config settings change. couch_server_sup
- % will restart us and then we will pick up the new settings.
-
- RootDir = couch_config:get("couchdb", "database_dir", "."),
- MaxDbsOpen = list_to_integer(
- couch_config:get("couchdb", "max_dbs_open")),
- Self = self(),
- ok = couch_config:register(
- fun("couchdb", "database_dir") ->
- exit(Self, config_change)
- end),
- ok = couch_config:register(
- fun("couchdb", "max_dbs_open", Max) ->
- gen_server:call(couch_server,
- {set_max_dbs_open, list_to_integer(Max)})
- end),
- ok = couch_file:init_delete_dir(RootDir),
- hash_admin_passwords(),
- ok = couch_config:register(
- fun("admins", _Key, _Value, Persist) ->
- % spawn here so couch_config doesn't try to call itself
- spawn(fun() -> hash_admin_passwords(Persist) end)
- end, false),
- {ok, RegExp} = re:compile("^[a-z][a-z0-9\\_\\$()\\+\\-\\/]*$"),
- ets:new(couch_dbs_by_name, [set, private, named_table]),
- ets:new(couch_dbs_by_pid, [set, private, named_table]),
- ets:new(couch_dbs_by_lru, [ordered_set, private, named_table]),
- ets:new(couch_sys_dbs, [set, private, named_table]),
- process_flag(trap_exit, true),
- {ok, #server{root_dir=RootDir,
- dbname_regexp=RegExp,
- max_dbs_open=MaxDbsOpen,
- start_time=httpd_util:rfc1123_date()}}.
-
-terminate(_Reason, _Srv) ->
- [couch_util:shutdown_sync(Pid) || {_, {Pid, _LruTime}} <-
- ets:tab2list(couch_dbs_by_name)],
- ok.
-
-all_databases() ->
- {ok, #server{root_dir=Root}} = gen_server:call(couch_server, get_server),
- NormRoot = couch_util:normpath(Root),
- Filenames =
- filelib:fold_files(Root, "^[a-z0-9\\_\\$()\\+\\-]*[\\.]couch$", true,
- fun(Filename, AccIn) ->
- NormFilename = couch_util:normpath(Filename),
- case NormFilename -- NormRoot of
- [$/ | RelativeFilename] -> ok;
- RelativeFilename -> ok
- end,
- [list_to_binary(filename:rootname(RelativeFilename, ".couch")) | AccIn]
- end, []),
- {ok, lists:usort(Filenames)}.
-
-
-maybe_close_lru_db(#server{dbs_open=NumOpen, max_dbs_open=MaxOpen}=Server)
- when NumOpen < MaxOpen ->
- {ok, Server};
-maybe_close_lru_db(#server{dbs_open=NumOpen}=Server) ->
- % must free up the lru db.
- case try_close_lru(now()) of
- ok ->
- {ok, Server#server{dbs_open=NumOpen - 1}};
- Error -> Error
- end.
-
-try_close_lru(StartTime) ->
- LruTime = get_lru(),
- if LruTime > StartTime ->
- % this means we've looped through all our opened dbs and found them
- % all in use.
- {error, all_dbs_active};
- true ->
- [{_, DbName}] = ets:lookup(couch_dbs_by_lru, LruTime),
- [{_, {opened, MainPid, LruTime}}] = ets:lookup(couch_dbs_by_name, DbName),
- case couch_db:is_idle(MainPid) of
- true ->
- ok = shutdown_idle_db(DbName, MainPid, LruTime);
- false ->
- % this still has referrers. Go ahead and give it a current lru time
- % and try the next one in the table.
- NewLruTime = now(),
- true = ets:insert(couch_dbs_by_name, {DbName, {opened, MainPid, NewLruTime}}),
- true = ets:insert(couch_dbs_by_pid, {MainPid, DbName}),
- true = ets:delete(couch_dbs_by_lru, LruTime),
- true = ets:insert(couch_dbs_by_lru, {NewLruTime, DbName}),
- try_close_lru(StartTime)
- end
- end.
-
-get_lru() ->
- get_lru(ets:first(couch_dbs_by_lru)).
-
-get_lru(LruTime) ->
- [{LruTime, DbName}] = ets:lookup(couch_dbs_by_lru, LruTime),
- case ets:member(couch_sys_dbs, DbName) of
- false ->
- LruTime;
- true ->
- [{_, {opened, MainPid, _}}] = ets:lookup(couch_dbs_by_name, DbName),
- case couch_db:is_idle(MainPid) of
- true ->
- NextLru = ets:next(couch_dbs_by_lru, LruTime),
- ok = shutdown_idle_db(DbName, MainPid, LruTime),
- get_lru(NextLru);
- false ->
- get_lru(ets:next(couch_dbs_by_lru, LruTime))
- end
- end.
-
-shutdown_idle_db(DbName, MainPid, LruTime) ->
- couch_util:shutdown_sync(MainPid),
- true = ets:delete(couch_dbs_by_lru, LruTime),
- true = ets:delete(couch_dbs_by_name, DbName),
- true = ets:delete(couch_dbs_by_pid, MainPid),
- true = ets:delete(couch_sys_dbs, DbName),
- ok.
-
-open_async(Server, From, DbName, Filepath, Options) ->
- Parent = self(),
- Opener = spawn_link(fun() ->
- Res = couch_db:start_link(DbName, Filepath, Options),
- gen_server:call(
- Parent, {open_result, DbName, Res, Options}, infinity
- ),
- unlink(Parent),
- case Res of
- {ok, DbReader} ->
- unlink(DbReader);
- _ ->
- ok
- end
- end),
- true = ets:insert(couch_dbs_by_name, {DbName, {opening, Opener, [From]}}),
- true = ets:insert(couch_dbs_by_pid, {Opener, DbName}),
- DbsOpen = case lists:member(sys_db, Options) of
- true ->
- true = ets:insert(couch_sys_dbs, {DbName, true}),
- Server#server.dbs_open;
- false ->
- Server#server.dbs_open + 1
- end,
- Server#server{dbs_open = DbsOpen}.
-
-handle_call({set_max_dbs_open, Max}, _From, Server) ->
- {reply, ok, Server#server{max_dbs_open=Max}};
-handle_call(get_server, _From, Server) ->
- {reply, {ok, Server}, Server};
-handle_call({open_result, DbName, {ok, OpenedDbPid}, Options}, _From, Server) ->
- link(OpenedDbPid),
- [{DbName, {opening,Opener,Froms}}] = ets:lookup(couch_dbs_by_name, DbName),
- lists:foreach(fun({FromPid,_}=From) ->
- gen_server:reply(From,
- catch couch_db:open_ref_counted(OpenedDbPid, FromPid))
- end, Froms),
- LruTime = now(),
- true = ets:insert(couch_dbs_by_name,
- {DbName, {opened, OpenedDbPid, LruTime}}),
- true = ets:delete(couch_dbs_by_pid, Opener),
- true = ets:insert(couch_dbs_by_pid, {OpenedDbPid, DbName}),
- true = ets:insert(couch_dbs_by_lru, {LruTime, DbName}),
- case lists:member(create, Options) of
- true ->
- couch_db_update_notifier:notify({created, DbName});
- false ->
- ok
- end,
- {reply, ok, Server};
-handle_call({open_result, DbName, Error, Options}, _From, Server) ->
- [{DbName, {opening,Opener,Froms}}] = ets:lookup(couch_dbs_by_name, DbName),
- lists:foreach(fun(From) ->
- gen_server:reply(From, Error)
- end, Froms),
- true = ets:delete(couch_dbs_by_name, DbName),
- true = ets:delete(couch_dbs_by_pid, Opener),
- DbsOpen = case lists:member(sys_db, Options) of
- true ->
- true = ets:delete(couch_sys_dbs, DbName),
- Server#server.dbs_open;
- false ->
- Server#server.dbs_open - 1
- end,
- {reply, ok, Server#server{dbs_open = DbsOpen}};
-handle_call({open, DbName, Options}, {FromPid,_}=From, Server) ->
- LruTime = now(),
- case ets:lookup(couch_dbs_by_name, DbName) of
- [] ->
- open_db(DbName, Server, Options, From);
- [{_, {opening, Opener, Froms}}] ->
- true = ets:insert(couch_dbs_by_name, {DbName, {opening, Opener, [From|Froms]}}),
- {noreply, Server};
- [{_, {opened, MainPid, PrevLruTime}}] ->
- true = ets:insert(couch_dbs_by_name, {DbName, {opened, MainPid, LruTime}}),
- true = ets:delete(couch_dbs_by_lru, PrevLruTime),
- true = ets:insert(couch_dbs_by_lru, {LruTime, DbName}),
- {reply, couch_db:open_ref_counted(MainPid, FromPid), Server}
- end;
-handle_call({create, DbName, Options}, From, Server) ->
- case ets:lookup(couch_dbs_by_name, DbName) of
- [] ->
- open_db(DbName, Server, [create | Options], From);
- [_AlreadyRunningDb] ->
- {reply, file_exists, Server}
- end;
-handle_call({delete, DbName, _Options}, _From, Server) ->
- DbNameList = binary_to_list(DbName),
- case check_dbname(Server, DbNameList) of
- ok ->
- FullFilepath = get_full_filename(Server, DbNameList),
- UpdateState =
- case ets:lookup(couch_dbs_by_name, DbName) of
- [] -> false;
- [{_, {opening, Pid, Froms}}] ->
- couch_util:shutdown_sync(Pid),
- true = ets:delete(couch_dbs_by_name, DbName),
- true = ets:delete(couch_dbs_by_pid, Pid),
- [gen_server:reply(F, not_found) || F <- Froms],
- true;
- [{_, {opened, Pid, LruTime}}] ->
- couch_util:shutdown_sync(Pid),
- true = ets:delete(couch_dbs_by_name, DbName),
- true = ets:delete(couch_dbs_by_pid, Pid),
- true = ets:delete(couch_dbs_by_lru, LruTime),
- true
- end,
- Server2 = case UpdateState of
- true ->
- DbsOpen = case ets:member(couch_sys_dbs, DbName) of
- true ->
- true = ets:delete(couch_sys_dbs, DbName),
- Server#server.dbs_open;
- false ->
- Server#server.dbs_open - 1
- end,
- Server#server{dbs_open = DbsOpen};
- false ->
- Server
- end,
-
- %% Delete any leftover .compact files. If we don't do this a subsequent
- %% request for this DB will try to open the .compact file and use it.
- couch_file:delete(Server#server.root_dir, FullFilepath ++ ".compact"),
-
- case couch_file:delete(Server#server.root_dir, FullFilepath) of
- ok ->
- couch_db_update_notifier:notify({deleted, DbName}),
- {reply, ok, Server2};
- {error, enoent} ->
- {reply, not_found, Server2};
- Else ->
- {reply, Else, Server2}
- end;
- Error ->
- {reply, Error, Server}
- end.
-
-handle_cast(Msg, _Server) ->
- exit({unknown_cast_message, Msg}).
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-handle_info({'EXIT', _Pid, config_change}, Server) ->
- {noreply, shutdown, Server};
-handle_info(Error, _Server) ->
- ?LOG_ERROR("Unexpected message, restarting couch_server: ~p", [Error]),
- exit(kill).
-
-open_db(DbName, Server, Options, From) ->
- DbNameList = binary_to_list(DbName),
- case check_dbname(Server, DbNameList) of
- ok ->
- Filepath = get_full_filename(Server, DbNameList),
- case lists:member(sys_db, Options) of
- true ->
- {noreply, open_async(Server, From, DbName, Filepath, Options)};
- false ->
- case maybe_close_lru_db(Server) of
- {ok, Server2} ->
- {noreply, open_async(Server2, From, DbName, Filepath, Options)};
- CloseError ->
- {reply, CloseError, Server}
- end
- end;
- Error ->
- {reply, Error, Server}
- end.
diff --git a/1.1.x/src/couchdb/couch_server_sup.erl b/1.1.x/src/couchdb/couch_server_sup.erl
deleted file mode 100644
index fafd83ed..00000000
--- a/1.1.x/src/couchdb/couch_server_sup.erl
+++ /dev/null
@@ -1,220 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_server_sup).
--behaviour(supervisor).
-
-
--export([start_link/1,stop/0, couch_config_start_link_wrapper/2,
- start_primary_services/0,start_secondary_services/0,
- restart_core_server/0]).
-
--include("couch_db.hrl").
-
-%% supervisor callbacks
--export([init/1]).
-
-start_link(IniFiles) ->
- case whereis(couch_server_sup) of
- undefined ->
- start_server(IniFiles);
- _Else ->
- {error, already_started}
- end.
-
-restart_core_server() ->
- init:restart().
-
-couch_config_start_link_wrapper(IniFiles, FirstConfigPid) ->
- case is_process_alive(FirstConfigPid) of
- true ->
- link(FirstConfigPid),
- {ok, FirstConfigPid};
- false -> couch_config:start_link(IniFiles)
- end.
-
-start_server(IniFiles) ->
- case init:get_argument(pidfile) of
- {ok, [PidFile]} ->
- case file:write_file(PidFile, os:getpid()) of
- ok -> ok;
- Error -> io:format("Failed to write PID file ~s, error: ~p", [PidFile, Error])
- end;
- _ -> ok
- end,
-
- {ok, ConfigPid} = couch_config:start_link(IniFiles),
-
- LogLevel = couch_config:get("log", "level", "info"),
- % announce startup
- io:format("Apache CouchDB ~s (LogLevel=~s) is starting.~n", [
- couch_server:get_version(),
- LogLevel
- ]),
- case LogLevel of
- "debug" ->
- io:format("Configuration Settings ~p:~n", [IniFiles]),
- [io:format(" [~s] ~s=~p~n", [Module, Variable, Value])
- || {{Module, Variable}, Value} <- couch_config:all()];
- _ -> ok
- end,
-
- LibDir =
- case couch_config:get("couchdb", "util_driver_dir", null) of
- null ->
- filename:join(couch_util:priv_dir(), "lib");
- LibDir0 -> LibDir0
- end,
-
- ok = couch_util:start_driver(LibDir),
-
- BaseChildSpecs =
- {{one_for_all, 10, 3600},
- [{couch_config,
- {couch_server_sup, couch_config_start_link_wrapper, [IniFiles, ConfigPid]},
- permanent,
- brutal_kill,
- worker,
- [couch_config]},
- {couch_primary_services,
- {couch_server_sup, start_primary_services, []},
- permanent,
- infinity,
- supervisor,
- [couch_server_sup]},
- {couch_secondary_services,
- {couch_server_sup, start_secondary_services, []},
- permanent,
- infinity,
- supervisor,
- [couch_server_sup]}
- ]},
-
- % ensure these applications are running
- application:start(ibrowse),
- application:start(crypto),
-
- {ok, Pid} = supervisor:start_link(
- {local, couch_server_sup}, couch_server_sup, BaseChildSpecs),
-
- % launch the icu bridge
- % just restart if one of the config settings change.
-
- couch_config:register(
- fun("couchdb", "util_driver_dir") ->
- ?MODULE:stop();
- ("daemons", _) ->
- ?MODULE:stop()
- end, Pid),
-
- unlink(ConfigPid),
-
- Ip = couch_config:get("httpd", "bind_address"),
- io:format("Apache CouchDB has started. Time to relax.~n"),
- Uris = [get_uri(Name, Ip) || Name <- [couch_httpd, https]],
- [begin
- case Uri of
- undefined -> ok;
- Uri -> ?LOG_INFO("Apache CouchDB has started on ~s", [Uri])
- end
- end
- || Uri <- Uris],
- case couch_config:get("couchdb", "uri_file", null) of
- null -> ok;
- UriFile ->
- Lines = [begin case Uri of
- undefined -> [];
- Uri -> io_lib:format("~s~n", [Uri])
- end end || Uri <- Uris],
- file:write_file(UriFile, Lines)
- end,
-
- {ok, Pid}.
-
-start_primary_services() ->
- supervisor:start_link({local, couch_primary_services}, couch_server_sup,
- {{one_for_one, 10, 3600},
- [{couch_log,
- {couch_log, start_link, []},
- permanent,
- brutal_kill,
- worker,
- [couch_log]},
- {couch_replication_supervisor,
- {couch_rep_sup, start_link, []},
- permanent,
- infinity,
- supervisor,
- [couch_rep_sup]},
- {couch_task_status,
- {couch_task_status, start_link, []},
- permanent,
- brutal_kill,
- worker,
- [couch_task_status]},
- {couch_server,
- {couch_server, sup_start_link, []},
- permanent,
- 1000,
- worker,
- [couch_server]},
- {couch_db_update_event,
- {gen_event, start_link, [{local, couch_db_update}]},
- permanent,
- brutal_kill,
- worker,
- dynamic}
- ]
- }).
-
-start_secondary_services() ->
- DaemonChildSpecs = [
- begin
- {ok, {Module, Fun, Args}} = couch_util:parse_term(SpecStr),
-
- {list_to_atom(Name),
- {Module, Fun, Args},
- permanent,
- 1000,
- worker,
- [Module]}
- end
- || {Name, SpecStr}
- <- couch_config:get("daemons"), SpecStr /= ""],
-
- supervisor:start_link({local, couch_secondary_services}, couch_server_sup,
- {{one_for_one, 10, 3600}, DaemonChildSpecs}).
-
-stop() ->
- catch exit(whereis(couch_server_sup), normal).
-
-init(ChildSpecs) ->
- {ok, ChildSpecs}.
-
-get_uri(Name, Ip) ->
- case get_port(Name) of
- undefined ->
- undefined;
- Port ->
- io_lib:format("~s://~s:~w/", [get_scheme(Name), Ip, Port])
- end.
-
-get_scheme(couch_httpd) -> "http";
-get_scheme(https) -> "https".
-
-get_port(Name) ->
- try
- mochiweb_socket_server:get(Name, port)
- catch
- exit:{noproc, _}->
- undefined
- end.
diff --git a/1.1.x/src/couchdb/couch_stats_aggregator.erl b/1.1.x/src/couchdb/couch_stats_aggregator.erl
deleted file mode 100644
index 6090355d..00000000
--- a/1.1.x/src/couchdb/couch_stats_aggregator.erl
+++ /dev/null
@@ -1,297 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_stats_aggregator).
--behaviour(gen_server).
-
--export([start/0, start/1, stop/0]).
--export([all/0, all/1, get/1, get/2, get_json/1, get_json/2, collect_sample/0]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--record(aggregate, {
- description = <<"">>,
- seconds = 0,
- count = 0,
- current = null,
- sum = null,
- mean = null,
- variance = null,
- stddev = null,
- min = null,
- max = null,
- samples = []
-}).
-
-
-start() ->
- PrivDir = couch_util:priv_dir(),
- start(filename:join(PrivDir, "stat_descriptions.cfg")).
-
-start(FileName) ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [FileName], []).
-
-stop() ->
- gen_server:cast(?MODULE, stop).
-
-all() ->
- ?MODULE:all(0).
-all(Time) when is_binary(Time) ->
- ?MODULE:all(list_to_integer(binary_to_list(Time)));
-all(Time) when is_atom(Time) ->
- ?MODULE:all(list_to_integer(atom_to_list(Time)));
-all(Time) when is_integer(Time) ->
- Aggs = ets:match(?MODULE, {{'$1', Time}, '$2'}),
- Stats = lists:map(fun([Key, Agg]) -> {Key, Agg} end, Aggs),
- case Stats of
- [] ->
- {[]};
- _ ->
- Ret = lists:foldl(fun({{Mod, Key}, Agg}, Acc) ->
- CurrKeys = case proplists:lookup(Mod, Acc) of
- none -> [];
- {Mod, {Keys}} -> Keys
- end,
- NewMod = {[{Key, to_json_term(Agg)} | CurrKeys]},
- [{Mod, NewMod} | proplists:delete(Mod, Acc)]
- end, [], Stats),
- {Ret}
- end.
-
-get(Key) ->
- ?MODULE:get(Key, 0).
-get(Key, Time) when is_binary(Time) ->
- ?MODULE:get(Key, list_to_integer(binary_to_list(Time)));
-get(Key, Time) when is_atom(Time) ->
- ?MODULE:get(Key, list_to_integer(atom_to_list(Time)));
-get(Key, Time) when is_integer(Time) ->
- case ets:lookup(?MODULE, {make_key(Key), Time}) of
- [] -> #aggregate{seconds=Time};
- [{_, Agg}] -> Agg
- end.
-
-get_json(Key) ->
- get_json(Key, 0).
-get_json(Key, Time) ->
- to_json_term(?MODULE:get(Key, Time)).
-
-collect_sample() ->
- gen_server:call(?MODULE, collect_sample, infinity).
-
-
-init(StatDescsFileName) ->
- % Create an aggregate entry for each {description, rate} pair.
- ets:new(?MODULE, [named_table, set, protected]),
- SampleStr = couch_config:get("stats", "samples", "[0]"),
- {ok, Samples} = couch_util:parse_term(SampleStr),
- {ok, Descs} = file:consult(StatDescsFileName),
- lists:foreach(fun({Sect, Key, Value}) ->
- lists:foreach(fun(Secs) ->
- Agg = #aggregate{
- description=list_to_binary(Value),
- seconds=Secs
- },
- ets:insert(?MODULE, {{{Sect, Key}, Secs}, Agg})
- end, Samples)
- end, Descs),
-
- Self = self(),
- ok = couch_config:register(
- fun("stats", _) -> exit(Self, config_change) end
- ),
-
- Rate = list_to_integer(couch_config:get("stats", "rate", "1000")),
- % TODO: Add timer_start to kernel start options.
- {ok, TRef} = timer:apply_after(Rate, ?MODULE, collect_sample, []),
- {ok, {TRef, Rate}}.
-
-terminate(_Reason, {TRef, _Rate}) ->
- timer:cancel(TRef),
- ok.
-
-handle_call(collect_sample, _, {OldTRef, SampleInterval}) ->
- timer:cancel(OldTRef),
- {ok, TRef} = timer:apply_after(SampleInterval, ?MODULE, collect_sample, []),
- % Gather new stats values to add.
- Incs = lists:map(fun({Key, Value}) ->
- {Key, {incremental, Value}}
- end, couch_stats_collector:all(incremental)),
- Abs = lists:map(fun({Key, Values}) ->
- couch_stats_collector:clear(Key),
- Values2 = case Values of
- X when is_list(X) -> X;
- Else -> [Else]
- end,
- {_, Mean} = lists:foldl(fun(Val, {Count, Curr}) ->
- {Count+1, Curr + (Val - Curr) / (Count+1)}
- end, {0, 0}, Values2),
- {Key, {absolute, Mean}}
- end, couch_stats_collector:all(absolute)),
-
- Values = Incs ++ Abs,
- Now = erlang:now(),
- lists:foreach(fun({{Key, Rate}, Agg}) ->
- NewAgg = case proplists:lookup(Key, Values) of
- none ->
- rem_values(Now, Agg);
- {Key, {Type, Value}} ->
- NewValue = new_value(Type, Value, Agg#aggregate.current),
- Agg2 = add_value(Now, NewValue, Agg),
- rem_values(Now, Agg2)
- end,
- ets:insert(?MODULE, {{Key, Rate}, NewAgg})
- end, ets:tab2list(?MODULE)),
- {reply, ok, {TRef, SampleInterval}}.
-
-handle_cast(stop, State) ->
- {stop, normal, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-code_change(_OldVersion, State, _Extra) ->
- {ok, State}.
-
-
-new_value(incremental, Value, null) ->
- Value;
-new_value(incremental, Value, Current) ->
- Value - Current;
-new_value(absolute, Value, _Current) ->
- Value.
-
-add_value(Time, Value, #aggregate{count=Count, seconds=Secs}=Agg) when Count < 1 ->
- Samples = case Secs of
- 0 -> [];
- _ -> [{Time, Value}]
- end,
- Agg#aggregate{
- count=1,
- current=Value,
- sum=Value,
- mean=Value,
- variance=0.0,
- stddev=null,
- min=Value,
- max=Value,
- samples=Samples
- };
-add_value(Time, Value, Agg) ->
- #aggregate{
- count=Count,
- current=Current,
- sum=Sum,
- mean=Mean,
- variance=Variance,
- samples=Samples
- } = Agg,
-
- NewCount = Count + 1,
- NewMean = Mean + (Value - Mean) / NewCount,
- NewVariance = Variance + (Value - Mean) * (Value - NewMean),
- StdDev = case NewCount > 1 of
- false -> null;
- _ -> math:sqrt(NewVariance / (NewCount - 1))
- end,
- Agg2 = Agg#aggregate{
- count=NewCount,
- current=Current + Value,
- sum=Sum + Value,
- mean=NewMean,
- variance=NewVariance,
- stddev=StdDev,
- min=lists:min([Agg#aggregate.min, Value]),
- max=lists:max([Agg#aggregate.max, Value])
- },
- case Agg2#aggregate.seconds of
- 0 -> Agg2;
- _ -> Agg2#aggregate{samples=[{Time, Value} | Samples]}
- end.
-
-rem_values(Time, Agg) ->
- Seconds = Agg#aggregate.seconds,
- Samples = Agg#aggregate.samples,
- Pred = fun({When, _Value}) ->
- timer:now_diff(Time, When) =< (Seconds * 1000000)
- end,
- {Keep, Remove} = lists:splitwith(Pred, Samples),
- Agg2 = lists:foldl(fun({_, Value}, Acc) ->
- rem_value(Value, Acc)
- end, Agg, Remove),
- Agg2#aggregate{samples=Keep}.
-
-rem_value(_Value, #aggregate{count=Count, seconds=Secs}) when Count =< 1 ->
- #aggregate{seconds=Secs};
-rem_value(Value, Agg) ->
- #aggregate{
- count=Count,
- sum=Sum,
- mean=Mean,
- variance=Variance
- } = Agg,
-
- OldMean = (Mean * Count - Value) / (Count - 1),
- OldVariance = Variance - (Value - OldMean) * (Value - Mean),
- OldCount = Count - 1,
- StdDev = case OldCount > 1 of
- false -> null;
- _ -> math:sqrt(clamp_value(OldVariance / (OldCount - 1)))
- end,
- Agg#aggregate{
- count=OldCount,
- sum=Sum-Value,
- mean=clamp_value(OldMean),
- variance=clamp_value(OldVariance),
- stddev=StdDev
- }.
-
-to_json_term(Agg) ->
- {Min, Max} = case Agg#aggregate.seconds > 0 of
- false ->
- {Agg#aggregate.min, Agg#aggregate.max};
- _ ->
- case length(Agg#aggregate.samples) > 0 of
- true ->
- Extract = fun({_Time, Value}) -> Value end,
- Samples = lists:map(Extract, Agg#aggregate.samples),
- {lists:min(Samples), lists:max(Samples)};
- _ ->
- {null, null}
- end
- end,
- {[
- {description, Agg#aggregate.description},
- {current, round_value(Agg#aggregate.sum)},
- {sum, round_value(Agg#aggregate.sum)},
- {mean, round_value(Agg#aggregate.mean)},
- {stddev, round_value(Agg#aggregate.stddev)},
- {min, Min},
- {max, Max}
- ]}.
-
-make_key({Mod, Val}) when is_integer(Val) ->
- {Mod, list_to_atom(integer_to_list(Val))};
-make_key(Key) ->
- Key.
-
-round_value(Val) when not is_number(Val) ->
- Val;
-round_value(Val) when Val == 0 ->
- Val;
-round_value(Val) ->
- erlang:round(Val * 1000.0) / 1000.0.
-
-clamp_value(Val) when Val > 0.00000000000001 ->
- Val;
-clamp_value(_) ->
- 0.0.
diff --git a/1.1.x/src/couchdb/couch_stats_collector.erl b/1.1.x/src/couchdb/couch_stats_collector.erl
deleted file mode 100644
index f7b9bb48..00000000
--- a/1.1.x/src/couchdb/couch_stats_collector.erl
+++ /dev/null
@@ -1,136 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-% todo
-% - remove existance check on increment(), decrement() and record(). have
-% modules initialize counters on startup.
-
--module(couch_stats_collector).
-
--behaviour(gen_server).
-
--export([start/0, stop/0]).
--export([all/0, all/1, get/1, increment/1, decrement/1, record/2, clear/1]).
--export([track_process_count/1, track_process_count/2]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--define(HIT_TABLE, stats_hit_table).
--define(ABS_TABLE, stats_abs_table).
-
-start() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-stop() ->
- gen_server:call(?MODULE, stop).
-
-all() ->
- ets:tab2list(?HIT_TABLE) ++ abs_to_list().
-
-all(Type) ->
- case Type of
- incremental -> ets:tab2list(?HIT_TABLE);
- absolute -> abs_to_list()
- end.
-
-get(Key) ->
- case ets:lookup(?HIT_TABLE, Key) of
- [] ->
- case ets:lookup(?ABS_TABLE, Key) of
- [] ->
- nil;
- AbsVals ->
- lists:map(fun({_, Value}) -> Value end, AbsVals)
- end;
- [{_, Counter}] ->
- Counter
- end.
-
-increment(Key) ->
- Key2 = make_key(Key),
- case catch ets:update_counter(?HIT_TABLE, Key2, 1) of
- {'EXIT', {badarg, _}} ->
- catch ets:insert(?HIT_TABLE, {Key2, 1}),
- ok;
- _ ->
- ok
- end.
-
-decrement(Key) ->
- Key2 = make_key(Key),
- case catch ets:update_counter(?HIT_TABLE, Key2, -1) of
- {'EXIT', {badarg, _}} ->
- catch ets:insert(?HIT_TABLE, {Key2, -1}),
- ok;
- _ -> ok
- end.
-
-record(Key, Value) ->
- catch ets:insert(?ABS_TABLE, {make_key(Key), Value}).
-
-clear(Key) ->
- catch ets:delete(?ABS_TABLE, make_key(Key)).
-
-track_process_count(Stat) ->
- track_process_count(self(), Stat).
-
-track_process_count(Pid, Stat) ->
- MonitorFun = fun() ->
- Ref = erlang:monitor(process, Pid),
- receive {'DOWN', Ref, _, _, _} -> ok end,
- couch_stats_collector:decrement(Stat)
- end,
- case (catch couch_stats_collector:increment(Stat)) of
- ok -> spawn(MonitorFun);
- _ -> ok
- end.
-
-
-init(_) ->
- ets:new(?HIT_TABLE, [named_table, set, public]),
- ets:new(?ABS_TABLE, [named_table, duplicate_bag, public]),
- {ok, nil}.
-
-terminate(_Reason, _State) ->
- ok.
-
-handle_call(stop, _, State) ->
- {stop, normal, stopped, State}.
-
-handle_cast(foo, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-code_change(_OldVersion, State, _Extra) ->
- {ok, State}.
-
-
-make_key({Module, Key}) when is_integer(Key) ->
- {Module, list_to_atom(integer_to_list(Key))};
-make_key(Key) ->
- Key.
-
-abs_to_list() ->
- SortedKVs = lists:sort(ets:tab2list(?ABS_TABLE)),
- lists:foldl(fun({Key, Val}, Acc) ->
- case Acc of
- [] ->
- [{Key, [Val]}];
- [{Key, Prev} | Rest] ->
- [{Key, [Val | Prev]} | Rest];
- Others ->
- [{Key, [Val]} | Others]
- end
- end, [], SortedKVs). \ No newline at end of file
diff --git a/1.1.x/src/couchdb/couch_stream.erl b/1.1.x/src/couchdb/couch_stream.erl
deleted file mode 100644
index 60af1c2b..00000000
--- a/1.1.x/src/couchdb/couch_stream.erl
+++ /dev/null
@@ -1,357 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_stream).
--behaviour(gen_server).
-
-
--define(FILE_POINTER_BYTES, 8).
--define(FILE_POINTER_BITS, 8*(?FILE_POINTER_BYTES)).
-
--define(STREAM_OFFSET_BYTES, 4).
--define(STREAM_OFFSET_BITS, 8*(?STREAM_OFFSET_BYTES)).
-
--define(HUGE_CHUNK, 1000000000). % Huge chuck size when reading all in one go
-
--define(DEFAULT_STREAM_CHUNK, 16#00100000). % 1 meg chunks when streaming data
-
--export([open/1, open/3, close/1, write/2, foldl/4, foldl/5, range_foldl/6, foldl_decode/6,
- old_foldl/5,old_copy_to_new_stream/4]).
--export([copy_to_new_stream/3,old_read_term/2]).
--export([init/1, terminate/2, handle_call/3]).
--export([handle_cast/2,code_change/3,handle_info/2]).
-
--include("couch_db.hrl").
-
--record(stream,
- {fd = 0,
- written_pointers=[],
- buffer_list = [],
- buffer_len = 0,
- max_buffer = 4096,
- written_len = 0,
- md5,
- % md5 of the content without any transformation applied (e.g. compression)
- % needed for the attachment upload integrity check (ticket 558)
- identity_md5,
- identity_len = 0,
- encoding_fun,
- end_encoding_fun
- }).
-
-
-%%% Interface functions %%%
-
-open(Fd) ->
- open(Fd, identity, []).
-
-open(Fd, Encoding, Options) ->
- gen_server:start_link(couch_stream, {Fd, Encoding, Options}, []).
-
-close(Pid) ->
- gen_server:call(Pid, close, infinity).
-
-copy_to_new_stream(Fd, PosList, DestFd) ->
- {ok, Dest} = open(DestFd),
- foldl(Fd, PosList,
- fun(Bin, _) ->
- ok = write(Dest, Bin)
- end, ok),
- close(Dest).
-
-
-% 09 UPGRADE CODE
-old_copy_to_new_stream(Fd, Pos, Len, DestFd) ->
- {ok, Dest} = open(DestFd),
- old_foldl(Fd, Pos, Len,
- fun(Bin, _) ->
- ok = write(Dest, Bin)
- end, ok),
- close(Dest).
-
-% 09 UPGRADE CODE
-old_foldl(_Fd, null, 0, _Fun, Acc) ->
- Acc;
-old_foldl(Fd, OldPointer, Len, Fun, Acc) when is_tuple(OldPointer)->
- {ok, Acc2, _} = old_stream_data(Fd, OldPointer, Len, ?DEFAULT_STREAM_CHUNK, Fun, Acc),
- Acc2.
-
-foldl(_Fd, [], _Fun, Acc) ->
- Acc;
-foldl(Fd, [Pos|Rest], Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- foldl(Fd, Rest, Fun, Fun(Bin, Acc)).
-
-foldl(Fd, PosList, <<>>, Fun, Acc) ->
- foldl(Fd, PosList, Fun, Acc);
-foldl(Fd, PosList, Md5, Fun, Acc) ->
- foldl(Fd, PosList, Md5, couch_util:md5_init(), Fun, Acc).
-
-foldl_decode(Fd, PosList, Md5, Enc, Fun, Acc) ->
- {DecDataFun, DecEndFun} = case Enc of
- gzip ->
- ungzip_init();
- identity ->
- identity_enc_dec_funs()
- end,
- Result = foldl_decode(
- DecDataFun, Fd, PosList, Md5, couch_util:md5_init(), Fun, Acc
- ),
- DecEndFun(),
- Result.
-
-foldl(_Fd, [], Md5, Md5Acc, _Fun, Acc) ->
- Md5 = couch_util:md5_final(Md5Acc),
- Acc;
-foldl(Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) -> % 0110 UPGRADE CODE
- foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc);
-foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, Bin)),
- Fun(Bin, Acc);
-foldl(Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
- foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
-foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- foldl(Fd, Rest, Md5, couch_util:md5_update(Md5Acc, Bin), Fun, Fun(Bin, Acc)).
-
-range_foldl(Fd, PosList, From, To, Fun, Acc) ->
- range_foldl(Fd, PosList, From, To, 0, Fun, Acc).
-
-range_foldl(_Fd, _PosList, _From, To, Off, _Fun, Acc) when Off >= To ->
- Acc;
-range_foldl(Fd, [Pos|Rest], From, To, Off, Fun, Acc) when is_integer(Pos) -> % old-style attachment
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- range_foldl(Fd, [{Pos, iolist_size(Bin)}] ++ Rest, From, To, Off, Fun, Acc);
-range_foldl(Fd, [{_Pos, Size}|Rest], From, To, Off, Fun, Acc) when From > Off + Size ->
- range_foldl(Fd, Rest, From, To, Off + Size, Fun, Acc);
-range_foldl(Fd, [{Pos, Size}|Rest], From, To, Off, Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- Bin1 = if
- From =< Off andalso To >= Off + Size -> Bin; %% the whole block is covered
- true ->
- PrefixLen = clip(From - Off, 0, Size),
- PostfixLen = clip(Off + Size - To, 0, Size),
- MatchLen = Size - PrefixLen - PostfixLen,
- <<_Prefix:PrefixLen/binary,Match:MatchLen/binary,_Postfix:PostfixLen/binary>> = iolist_to_binary(Bin),
- Match
- end,
- range_foldl(Fd, Rest, From, To, Off + Size, Fun, Fun(Bin1, Acc)).
-
-clip(Value, Lo, Hi) ->
- if
- Value < Lo -> Lo;
- Value > Hi -> Hi;
- true -> Value
- end.
-
-foldl_decode(_DecFun, _Fd, [], Md5, Md5Acc, _Fun, Acc) ->
- Md5 = couch_util:md5_final(Md5Acc),
- Acc;
-foldl_decode(DecFun, Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) ->
- foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc);
-foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
- {ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
- Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, EncBin)),
- Bin = DecFun(EncBin),
- Fun(Bin, Acc);
-foldl_decode(DecFun, Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
- foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
-foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
- {ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
- Bin = DecFun(EncBin),
- Md5Acc2 = couch_util:md5_update(Md5Acc, EncBin),
- foldl_decode(DecFun, Fd, Rest, Md5, Md5Acc2, Fun, Fun(Bin, Acc)).
-
-gzip_init(Options) ->
- case couch_util:get_value(compression_level, Options, 0) of
- Lvl when Lvl >= 1 andalso Lvl =< 9 ->
- Z = zlib:open(),
- % 15 = ?MAX_WBITS (defined in the zlib module)
- % the 16 + ?MAX_WBITS formula was obtained by inspecting zlib:gzip/1
- ok = zlib:deflateInit(Z, Lvl, deflated, 16 + 15, 8, default),
- {
- fun(Data) ->
- zlib:deflate(Z, Data)
- end,
- fun() ->
- Last = zlib:deflate(Z, [], finish),
- ok = zlib:deflateEnd(Z),
- ok = zlib:close(Z),
- Last
- end
- };
- _ ->
- identity_enc_dec_funs()
- end.
-
-ungzip_init() ->
- Z = zlib:open(),
- zlib:inflateInit(Z, 16 + 15),
- {
- fun(Data) ->
- zlib:inflate(Z, Data)
- end,
- fun() ->
- ok = zlib:inflateEnd(Z),
- ok = zlib:close(Z)
- end
- }.
-
-identity_enc_dec_funs() ->
- {
- fun(Data) -> Data end,
- fun() -> [] end
- }.
-
-write(_Pid, <<>>) ->
- ok;
-write(Pid, Bin) ->
- gen_server:call(Pid, {write, Bin}, infinity).
-
-
-init({Fd, Encoding, Options}) ->
- {EncodingFun, EndEncodingFun} = case Encoding of
- identity ->
- identity_enc_dec_funs();
- gzip ->
- gzip_init(Options)
- end,
- {ok, #stream{
- fd=Fd,
- md5=couch_util:md5_init(),
- identity_md5=couch_util:md5_init(),
- encoding_fun=EncodingFun,
- end_encoding_fun=EndEncodingFun
- }
- }.
-
-terminate(_Reason, _Stream) ->
- ok.
-
-handle_call({write, Bin}, _From, Stream) ->
- BinSize = iolist_size(Bin),
- #stream{
- fd = Fd,
- written_len = WrittenLen,
- written_pointers = Written,
- buffer_len = BufferLen,
- buffer_list = Buffer,
- max_buffer = Max,
- md5 = Md5,
- identity_md5 = IdenMd5,
- identity_len = IdenLen,
- encoding_fun = EncodingFun} = Stream,
- if BinSize + BufferLen > Max ->
- WriteBin = lists:reverse(Buffer, [Bin]),
- IdenMd5_2 = couch_util:md5_update(IdenMd5, WriteBin),
- case EncodingFun(WriteBin) of
- [] ->
- % case where the encoder did some internal buffering
- % (zlib does it for example)
- WrittenLen2 = WrittenLen,
- Md5_2 = Md5,
- Written2 = Written;
- WriteBin2 ->
- {ok, Pos} = couch_file:append_binary(Fd, WriteBin2),
- WrittenLen2 = WrittenLen + iolist_size(WriteBin2),
- Md5_2 = couch_util:md5_update(Md5, WriteBin2),
- Written2 = [{Pos, iolist_size(WriteBin2)}|Written]
- end,
-
- {reply, ok, Stream#stream{
- written_len=WrittenLen2,
- written_pointers=Written2,
- buffer_list=[],
- buffer_len=0,
- md5=Md5_2,
- identity_md5=IdenMd5_2,
- identity_len=IdenLen + BinSize}};
- true ->
- {reply, ok, Stream#stream{
- buffer_list=[Bin|Buffer],
- buffer_len=BufferLen + BinSize,
- identity_len=IdenLen + BinSize}}
- end;
-handle_call(close, _From, Stream) ->
- #stream{
- fd = Fd,
- written_len = WrittenLen,
- written_pointers = Written,
- buffer_list = Buffer,
- md5 = Md5,
- identity_md5 = IdenMd5,
- identity_len = IdenLen,
- encoding_fun = EncodingFun,
- end_encoding_fun = EndEncodingFun} = Stream,
-
- WriteBin = lists:reverse(Buffer),
- IdenMd5Final = couch_util:md5_final(couch_util:md5_update(IdenMd5, WriteBin)),
- WriteBin2 = EncodingFun(WriteBin) ++ EndEncodingFun(),
- Md5Final = couch_util:md5_final(couch_util:md5_update(Md5, WriteBin2)),
- Result = case WriteBin2 of
- [] ->
- {lists:reverse(Written), WrittenLen, IdenLen, Md5Final, IdenMd5Final};
- _ ->
- {ok, Pos} = couch_file:append_binary(Fd, WriteBin2),
- StreamInfo = lists:reverse(Written, [{Pos, iolist_size(WriteBin2)}]),
- StreamLen = WrittenLen + iolist_size(WriteBin2),
- {StreamInfo, StreamLen, IdenLen, Md5Final, IdenMd5Final}
- end,
- {stop, normal, Result, Stream}.
-
-handle_cast(_Msg, State) ->
- {noreply,State}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-
-
-% 09 UPGRADE CODE
-old_read_term(Fd, Sp) ->
- {ok, <<TermLen:(?STREAM_OFFSET_BITS)>>, Sp2}
- = old_read(Fd, Sp, ?STREAM_OFFSET_BYTES),
- {ok, Bin, _Sp3} = old_read(Fd, Sp2, TermLen),
- {ok, binary_to_term(Bin)}.
-
-old_read(Fd, Sp, Num) ->
- {ok, RevBin, Sp2} = old_stream_data(Fd, Sp, Num, ?HUGE_CHUNK, fun(Bin, Acc) -> [Bin | Acc] end, []),
- Bin = list_to_binary(lists:reverse(RevBin)),
- {ok, Bin, Sp2}.
-
-% 09 UPGRADE CODE
-old_stream_data(_Fd, Sp, 0, _MaxChunk, _Fun, Acc) ->
- {ok, Acc, Sp};
-old_stream_data(Fd, {Pos, 0}, Num, MaxChunk, Fun, Acc) ->
- {ok, <<NextPos:(?FILE_POINTER_BITS), NextOffset:(?STREAM_OFFSET_BITS)>>}
- = couch_file:old_pread(Fd, Pos, ?FILE_POINTER_BYTES + ?STREAM_OFFSET_BYTES),
- Sp = {NextPos, NextOffset},
- % Check NextPos is past current Pos (this is always true in a stream)
- % Guards against potential infinite loops caused by corruption.
- case NextPos > Pos of
- true -> ok;
- false -> throw({error, stream_corruption})
- end,
- old_stream_data(Fd, Sp, Num, MaxChunk, Fun, Acc);
-old_stream_data(Fd, {Pos, Offset}, Num, MaxChunk, Fun, Acc) ->
- ReadAmount = lists:min([MaxChunk, Num, Offset]),
- {ok, Bin} = couch_file:old_pread(Fd, Pos, ReadAmount),
- Sp = {Pos + ReadAmount, Offset - ReadAmount},
- old_stream_data(Fd, Sp, Num - ReadAmount, MaxChunk, Fun, Fun(Bin, Acc)).
-
-
-% Tests moved to tests/etap/050-stream.t
-
diff --git a/1.1.x/src/couchdb/couch_task_status.erl b/1.1.x/src/couchdb/couch_task_status.erl
deleted file mode 100644
index c4487dc4..00000000
--- a/1.1.x/src/couchdb/couch_task_status.erl
+++ /dev/null
@@ -1,124 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_task_status).
--behaviour(gen_server).
-
-% This module allows is used to track the status of long running tasks.
-% Long running tasks register (add_task/3) then update their status (update/1)
-% and the task and status is added to tasks list. When the tracked task dies
-% it will be automatically removed the tracking. To get the tasks list, use the
-% all/0 function
-
--export([start_link/0, stop/0]).
--export([all/0, add_task/3, update/1, update/2, set_update_frequency/1]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--import(couch_util, [to_binary/1]).
-
--include("couch_db.hrl").
-
-
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-
-stop() ->
- gen_server:cast(?MODULE, stop).
-
-
-all() ->
- gen_server:call(?MODULE, all).
-
-
-add_task(Type, TaskName, StatusText) ->
- put(task_status_update, {{0, 0, 0}, 0}),
- Msg = {
- add_task,
- to_binary(Type),
- to_binary(TaskName),
- to_binary(StatusText)
- },
- gen_server:call(?MODULE, Msg).
-
-
-set_update_frequency(Msecs) ->
- put(task_status_update, {{0, 0, 0}, Msecs * 1000}).
-
-
-update(StatusText) ->
- update("~s", [StatusText]).
-
-update(Format, Data) ->
- {LastUpdateTime, Frequency} = get(task_status_update),
- case timer:now_diff(Now = now(), LastUpdateTime) >= Frequency of
- true ->
- put(task_status_update, {Now, Frequency}),
- Msg = ?l2b(io_lib:format(Format, Data)),
- gen_server:cast(?MODULE, {update_status, self(), Msg});
- false ->
- ok
- end.
-
-
-init([]) ->
- % read configuration settings and register for configuration changes
- ets:new(?MODULE, [ordered_set, protected, named_table]),
- {ok, nil}.
-
-
-terminate(_Reason,_State) ->
- ok.
-
-
-handle_call({add_task, Type, TaskName, StatusText}, {From, _}, Server) ->
- case ets:lookup(?MODULE, From) of
- [] ->
- true = ets:insert(?MODULE, {From, {Type, TaskName, StatusText}}),
- erlang:monitor(process, From),
- {reply, ok, Server};
- [_] ->
- {reply, {add_task_error, already_registered}, Server}
- end;
-handle_call(all, _, Server) ->
- All = [
- [
- {type, Type},
- {task, Task},
- {status, Status},
- {pid, ?l2b(pid_to_list(Pid))}
- ]
- ||
- {Pid, {Type, Task, Status}} <- ets:tab2list(?MODULE)
- ],
- {reply, All, Server}.
-
-
-handle_cast({update_status, Pid, StatusText}, Server) ->
- [{Pid, {Type, TaskName, _StatusText}}] = ets:lookup(?MODULE, Pid),
- ?LOG_DEBUG("New task status for ~s: ~s",[TaskName, StatusText]),
- true = ets:insert(?MODULE, {Pid, {Type, TaskName, StatusText}}),
- {noreply, Server};
-handle_cast(stop, State) ->
- {stop, normal, State}.
-
-handle_info({'DOWN', _MonitorRef, _Type, Pid, _Info}, Server) ->
- %% should we also erlang:demonitor(_MonitorRef), ?
- ets:delete(?MODULE, Pid),
- {noreply, Server}.
-
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
diff --git a/1.1.x/src/couchdb/couch_util.erl b/1.1.x/src/couchdb/couch_util.erl
deleted file mode 100644
index 53dfe5e3..00000000
--- a/1.1.x/src/couchdb/couch_util.erl
+++ /dev/null
@@ -1,478 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_util).
-
--export([priv_dir/0, start_driver/1, normpath/1]).
--export([should_flush/0, should_flush/1, to_existing_atom/1]).
--export([rand32/0, implode/2, collate/2, collate/3]).
--export([abs_pathname/1,abs_pathname/2, trim/1]).
--export([encodeBase64Url/1, decodeBase64Url/1]).
--export([validate_utf8/1, to_hex/1, parse_term/1, dict_find/3]).
--export([get_nested_json_value/2, json_user_ctx/1]).
--export([proplist_apply_field/2, json_apply_field/2]).
--export([to_binary/1, to_integer/1, to_list/1, url_encode/1]).
--export([json_encode/1, json_decode/1]).
--export([verify/2,simple_call/2,shutdown_sync/1]).
--export([compressible_att_type/1]).
--export([get_value/2, get_value/3]).
--export([md5/1, md5_init/0, md5_update/2, md5_final/1]).
--export([reorder_results/2]).
--export([url_strip_password/1]).
--export([encode_doc_id/1]).
-
--include("couch_db.hrl").
-
-% arbitrarily chosen amount of memory to use before flushing to disk
--define(FLUSH_MAX_MEM, 10000000).
-
-priv_dir() ->
- case code:priv_dir(couch) of
- {error, bad_name} ->
- % small hack, in dev mode "app" is couchdb. Fixing requires
- % renaming src/couch to src/couch. Not really worth the hassle.
- % -Damien
- code:priv_dir(couchdb);
- Dir -> Dir
- end.
-
-start_driver(LibDir) ->
- case erl_ddll:load_driver(LibDir, "couch_icu_driver") of
- ok ->
- ok;
- {error, already_loaded} ->
- ok = erl_ddll:reload_driver(LibDir, "couch_icu_driver");
- {error, Error} ->
- exit(erl_ddll:format_error(Error))
- end.
-
-% Normalize a pathname by removing .. and . components.
-normpath(Path) ->
- normparts(filename:split(Path), []).
-
-normparts([], Acc) ->
- filename:join(lists:reverse(Acc));
-normparts([".." | RestParts], [_Drop | RestAcc]) ->
- normparts(RestParts, RestAcc);
-normparts(["." | RestParts], Acc) ->
- normparts(RestParts, Acc);
-normparts([Part | RestParts], Acc) ->
- normparts(RestParts, [Part | Acc]).
-
-% works like list_to_existing_atom, except can be list or binary and it
-% gives you the original value instead of an error if no existing atom.
-to_existing_atom(V) when is_list(V) ->
- try list_to_existing_atom(V) catch _:_ -> V end;
-to_existing_atom(V) when is_binary(V) ->
- try list_to_existing_atom(?b2l(V)) catch _:_ -> V end;
-to_existing_atom(V) when is_atom(V) ->
- V.
-
-shutdown_sync(Pid) when not is_pid(Pid)->
- ok;
-shutdown_sync(Pid) ->
- MRef = erlang:monitor(process, Pid),
- try
- catch unlink(Pid),
- catch exit(Pid, shutdown),
- receive
- {'DOWN', MRef, _, _, _} ->
- ok
- end
- after
- erlang:demonitor(MRef, [flush])
- end.
-
-
-simple_call(Pid, Message) ->
- MRef = erlang:monitor(process, Pid),
- try
- Pid ! {self(), Message},
- receive
- {Pid, Result} ->
- Result;
- {'DOWN', MRef, _, _, Reason} ->
- exit(Reason)
- end
- after
- erlang:demonitor(MRef, [flush])
- end.
-
-validate_utf8(Data) when is_list(Data) ->
- validate_utf8(?l2b(Data));
-validate_utf8(Bin) when is_binary(Bin) ->
- validate_utf8_fast(Bin, 0).
-
-validate_utf8_fast(B, O) ->
- case B of
- <<_:O/binary>> ->
- true;
- <<_:O/binary, C1, _/binary>> when
- C1 < 128 ->
- validate_utf8_fast(B, 1 + O);
- <<_:O/binary, C1, C2, _/binary>> when
- C1 >= 194, C1 =< 223,
- C2 >= 128, C2 =< 191 ->
- validate_utf8_fast(B, 2 + O);
- <<_:O/binary, C1, C2, C3, _/binary>> when
- C1 >= 224, C1 =< 239,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191 ->
- validate_utf8_fast(B, 3 + O);
- <<_:O/binary, C1, C2, C3, C4, _/binary>> when
- C1 >= 240, C1 =< 244,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191,
- C4 >= 128, C4 =< 191 ->
- validate_utf8_fast(B, 4 + O);
- _ ->
- false
- end.
-
-to_hex([]) ->
- [];
-to_hex(Bin) when is_binary(Bin) ->
- to_hex(binary_to_list(Bin));
-to_hex([H|T]) ->
- [to_digit(H div 16), to_digit(H rem 16) | to_hex(T)].
-
-to_digit(N) when N < 10 -> $0 + N;
-to_digit(N) -> $a + N-10.
-
-
-parse_term(Bin) when is_binary(Bin) ->
- parse_term(binary_to_list(Bin));
-parse_term(List) ->
- {ok, Tokens, _} = erl_scan:string(List ++ "."),
- erl_parse:parse_term(Tokens).
-
-get_value(Key, List) ->
- get_value(Key, List, undefined).
-
-get_value(Key, List, Default) ->
- case lists:keysearch(Key, 1, List) of
- {value, {Key,Value}} ->
- Value;
- false ->
- Default
- end.
-
-get_nested_json_value({Props}, [Key|Keys]) ->
- case couch_util:get_value(Key, Props, nil) of
- nil -> throw({not_found, <<"missing json key: ", Key/binary>>});
- Value -> get_nested_json_value(Value, Keys)
- end;
-get_nested_json_value(Value, []) ->
- Value;
-get_nested_json_value(_NotJSONObj, _) ->
- throw({not_found, json_mismatch}).
-
-proplist_apply_field(H, L) ->
- {R} = json_apply_field(H, {L}),
- R.
-
-json_apply_field(H, {L}) ->
- json_apply_field(H, L, []).
-json_apply_field({Key, NewValue}, [{Key, _OldVal} | Headers], Acc) ->
- json_apply_field({Key, NewValue}, Headers, Acc);
-json_apply_field({Key, NewValue}, [{OtherKey, OtherVal} | Headers], Acc) ->
- json_apply_field({Key, NewValue}, Headers, [{OtherKey, OtherVal} | Acc]);
-json_apply_field({Key, NewValue}, [], Acc) ->
- {[{Key, NewValue}|Acc]}.
-
-json_user_ctx(#db{name=DbName, user_ctx=Ctx}) ->
- {[{<<"db">>, DbName},
- {<<"name">>,Ctx#user_ctx.name},
- {<<"roles">>,Ctx#user_ctx.roles}]}.
-
-
-% returns a random integer
-rand32() ->
- crypto:rand_uniform(0, 16#100000000).
-
-% given a pathname "../foo/bar/" it gives back the fully qualified
-% absolute pathname.
-abs_pathname(" " ++ Filename) ->
- % strip leading whitspace
- abs_pathname(Filename);
-abs_pathname([$/ |_]=Filename) ->
- Filename;
-abs_pathname(Filename) ->
- {ok, Cwd} = file:get_cwd(),
- {Filename2, Args} = separate_cmd_args(Filename, ""),
- abs_pathname(Filename2, Cwd) ++ Args.
-
-abs_pathname(Filename, Dir) ->
- Name = filename:absname(Filename, Dir ++ "/"),
- OutFilename = filename:join(fix_path_list(filename:split(Name), [])),
- % If the filename is a dir (last char slash, put back end slash
- case string:right(Filename,1) of
- "/" ->
- OutFilename ++ "/";
- "\\" ->
- OutFilename ++ "/";
- _Else->
- OutFilename
- end.
-
-% if this as an executable with arguments, seperate out the arguments
-% ""./foo\ bar.sh -baz=blah" -> {"./foo\ bar.sh", " -baz=blah"}
-separate_cmd_args("", CmdAcc) ->
- {lists:reverse(CmdAcc), ""};
-separate_cmd_args("\\ " ++ Rest, CmdAcc) -> % handle skipped value
- separate_cmd_args(Rest, " \\" ++ CmdAcc);
-separate_cmd_args(" " ++ Rest, CmdAcc) ->
- {lists:reverse(CmdAcc), " " ++ Rest};
-separate_cmd_args([Char|Rest], CmdAcc) ->
- separate_cmd_args(Rest, [Char | CmdAcc]).
-
-% Is a character whitespace?
-is_whitespace($\s) -> true;
-is_whitespace($\t) -> true;
-is_whitespace($\n) -> true;
-is_whitespace($\r) -> true;
-is_whitespace(_Else) -> false.
-
-
-% removes leading and trailing whitespace from a string
-trim(String) ->
- String2 = lists:dropwhile(fun is_whitespace/1, String),
- lists:reverse(lists:dropwhile(fun is_whitespace/1, lists:reverse(String2))).
-
-% takes a heirarchical list of dirs and removes the dots ".", double dots
-% ".." and the corresponding parent dirs.
-fix_path_list([], Acc) ->
- lists:reverse(Acc);
-fix_path_list([".."|Rest], [_PrevAcc|RestAcc]) ->
- fix_path_list(Rest, RestAcc);
-fix_path_list(["."|Rest], Acc) ->
- fix_path_list(Rest, Acc);
-fix_path_list([Dir | Rest], Acc) ->
- fix_path_list(Rest, [Dir | Acc]).
-
-
-implode(List, Sep) ->
- implode(List, Sep, []).
-
-implode([], _Sep, Acc) ->
- lists:flatten(lists:reverse(Acc));
-implode([H], Sep, Acc) ->
- implode([], Sep, [H|Acc]);
-implode([H|T], Sep, Acc) ->
- implode(T, Sep, [Sep,H|Acc]).
-
-
-drv_port() ->
- case get(couch_drv_port) of
- undefined ->
- Port = open_port({spawn, "couch_icu_driver"}, []),
- put(couch_drv_port, Port),
- Port;
- Port ->
- Port
- end.
-
-collate(A, B) ->
- collate(A, B, []).
-
-collate(A, B, Options) when is_binary(A), is_binary(B) ->
- Operation =
- case lists:member(nocase, Options) of
- true -> 1; % Case insensitive
- false -> 0 % Case sensitive
- end,
- SizeA = byte_size(A),
- SizeB = byte_size(B),
- Bin = <<SizeA:32/native, A/binary, SizeB:32/native, B/binary>>,
- [Result] = erlang:port_control(drv_port(), Operation, Bin),
- % Result is 0 for lt, 1 for eq and 2 for gt. Subtract 1 to return the
- % expected typical -1, 0, 1
- Result - 1.
-
-should_flush() ->
- should_flush(?FLUSH_MAX_MEM).
-
-should_flush(MemThreshHold) ->
- {memory, ProcMem} = process_info(self(), memory),
- BinMem = lists:foldl(fun({_Id, Size, _NRefs}, Acc) -> Size+Acc end,
- 0, element(2,process_info(self(), binary))),
- if ProcMem+BinMem > 2*MemThreshHold ->
- garbage_collect(),
- {memory, ProcMem2} = process_info(self(), memory),
- BinMem2 = lists:foldl(fun({_Id, Size, _NRefs}, Acc) -> Size+Acc end,
- 0, element(2,process_info(self(), binary))),
- ProcMem2+BinMem2 > MemThreshHold;
- true -> false end.
-
-encodeBase64Url(Url) ->
- Url1 = iolist_to_binary(re:replace(base64:encode(Url), "=+$", "")),
- Url2 = iolist_to_binary(re:replace(Url1, "/", "_", [global])),
- iolist_to_binary(re:replace(Url2, "\\+", "-", [global])).
-
-decodeBase64Url(Url64) ->
- Url1 = re:replace(iolist_to_binary(Url64), "-", "+", [global]),
- Url2 = iolist_to_binary(
- re:replace(iolist_to_binary(Url1), "_", "/", [global])
- ),
- Padding = ?l2b(lists:duplicate((4 - size(Url2) rem 4) rem 4, $=)),
- base64:decode(<<Url2/binary, Padding/binary>>).
-
-dict_find(Key, Dict, DefaultValue) ->
- case dict:find(Key, Dict) of
- {ok, Value} ->
- Value;
- error ->
- DefaultValue
- end.
-
-to_binary(V) when is_binary(V) ->
- V;
-to_binary(V) when is_list(V) ->
- try
- list_to_binary(V)
- catch
- _:_ ->
- list_to_binary(io_lib:format("~p", [V]))
- end;
-to_binary(V) when is_atom(V) ->
- list_to_binary(atom_to_list(V));
-to_binary(V) ->
- list_to_binary(io_lib:format("~p", [V])).
-
-to_integer(V) when is_integer(V) ->
- V;
-to_integer(V) when is_list(V) ->
- erlang:list_to_integer(V);
-to_integer(V) when is_binary(V) ->
- erlang:list_to_integer(binary_to_list(V)).
-
-to_list(V) when is_list(V) ->
- V;
-to_list(V) when is_binary(V) ->
- binary_to_list(V);
-to_list(V) when is_atom(V) ->
- atom_to_list(V);
-to_list(V) ->
- lists:flatten(io_lib:format("~p", [V])).
-
-url_encode(Bin) when is_binary(Bin) ->
- url_encode(binary_to_list(Bin));
-url_encode([H|T]) ->
- if
- H >= $a, $z >= H ->
- [H|url_encode(T)];
- H >= $A, $Z >= H ->
- [H|url_encode(T)];
- H >= $0, $9 >= H ->
- [H|url_encode(T)];
- H == $_; H == $.; H == $-; H == $: ->
- [H|url_encode(T)];
- true ->
- case lists:flatten(io_lib:format("~.16.0B", [H])) of
- [X, Y] ->
- [$%, X, Y | url_encode(T)];
- [X] ->
- [$%, $0, X | url_encode(T)]
- end
- end;
-url_encode([]) ->
- [].
-
-json_encode(V) ->
- Handler =
- fun({L}) when is_list(L) ->
- {struct,L};
- (Bad) ->
- exit({json_encode, {bad_term, Bad}})
- end,
- (mochijson2:encoder([{handler, Handler}]))(V).
-
-json_decode(V) ->
- try (mochijson2:decoder([{object_hook, fun({struct,L}) -> {L} end}]))(V)
- catch
- _Type:_Error ->
- throw({invalid_json,V})
- end.
-
-verify([X|RestX], [Y|RestY], Result) ->
- verify(RestX, RestY, (X bxor Y) bor Result);
-verify([], [], Result) ->
- Result == 0.
-
-verify(<<X/binary>>, <<Y/binary>>) ->
- verify(?b2l(X), ?b2l(Y));
-verify(X, Y) when is_list(X) and is_list(Y) ->
- case length(X) == length(Y) of
- true ->
- verify(X, Y, 0);
- false ->
- false
- end;
-verify(_X, _Y) -> false.
-
-compressible_att_type(MimeType) when is_binary(MimeType) ->
- compressible_att_type(?b2l(MimeType));
-compressible_att_type(MimeType) ->
- TypeExpList = re:split(
- couch_config:get("attachments", "compressible_types", ""),
- "\\s*,\\s*",
- [{return, list}]
- ),
- lists:any(
- fun(TypeExp) ->
- Regexp = ["^\\s*", re:replace(TypeExp, "\\*", ".*"),
- "(?:\\s*;.*?)?\\s*", $$],
- re:run(MimeType, Regexp, [caseless]) =/= nomatch
- end,
- [T || T <- TypeExpList, T /= []]
- ).
-
--spec md5(Data::(iolist() | binary())) -> Digest::binary().
-md5(Data) ->
- try crypto:md5(Data) catch error:_ -> erlang:md5(Data) end.
-
--spec md5_init() -> Context::binary().
-md5_init() ->
- try crypto:md5_init() catch error:_ -> erlang:md5_init() end.
-
--spec md5_update(Context::binary(), Data::(iolist() | binary())) ->
- NewContext::binary().
-md5_update(Ctx, D) ->
- try crypto:md5_update(Ctx,D) catch error:_ -> erlang:md5_update(Ctx,D) end.
-
--spec md5_final(Context::binary()) -> Digest::binary().
-md5_final(Ctx) ->
- try crypto:md5_final(Ctx) catch error:_ -> erlang:md5_final(Ctx) end.
-
-% linear search is faster for small lists, length() is 0.5 ms for 100k list
-reorder_results(Keys, SortedResults) when length(Keys) < 100 ->
- [couch_util:get_value(Key, SortedResults) || Key <- Keys];
-reorder_results(Keys, SortedResults) ->
- KeyDict = dict:from_list(SortedResults),
- [dict:fetch(Key, KeyDict) || Key <- Keys].
-
-url_strip_password(Url) ->
- re:replace(Url,
- "http(s)?://([^:]+):[^@]+@(.*)$",
- "http\\1://\\2:*****@\\3",
- [{return, list}]).
-
-encode_doc_id(#doc{id = Id}) ->
- encode_doc_id(Id);
-encode_doc_id(Id) when is_list(Id) ->
- encode_doc_id(?l2b(Id));
-encode_doc_id(<<"_design/", Rest/binary>>) ->
- "_design/" ++ url_encode(Rest);
-encode_doc_id(<<"_local/", Rest/binary>>) ->
- "_local/" ++ url_encode(Rest);
-encode_doc_id(Id) ->
- url_encode(Id).
diff --git a/1.1.x/src/couchdb/couch_uuids.erl b/1.1.x/src/couchdb/couch_uuids.erl
deleted file mode 100644
index e1851e1d..00000000
--- a/1.1.x/src/couchdb/couch_uuids.erl
+++ /dev/null
@@ -1,95 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
--module(couch_uuids).
--include("couch_db.hrl").
-
--behaviour(gen_server).
-
--export([start/0, stop/0]).
--export([new/0, random/0, utc_random/0]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
-start() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-stop() ->
- gen_server:cast(?MODULE, stop).
-
-new() ->
- gen_server:call(?MODULE, create).
-
-random() ->
- list_to_binary(couch_util:to_hex(crypto:rand_bytes(16))).
-
-utc_random() ->
- Now = {_, _, Micro} = now(),
- Nowish = calendar:now_to_universal_time(Now),
- Nowsecs = calendar:datetime_to_gregorian_seconds(Nowish),
- Then = calendar:datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}}),
- Prefix = io_lib:format("~14.16.0b", [(Nowsecs - Then) * 1000000 + Micro]),
- list_to_binary(Prefix ++ couch_util:to_hex(crypto:rand_bytes(9))).
-
-init([]) ->
- ok = couch_config:register(
- fun("uuids", _) -> gen_server:cast(?MODULE, change) end
- ),
- {ok, state()}.
-
-terminate(_Reason, _State) ->
- ok.
-
-handle_call(create, _From, random) ->
- {reply, random(), random};
-handle_call(create, _From, utc_random) ->
- {reply, utc_random(), utc_random};
-handle_call(create, _From, {sequential, Pref, Seq}) ->
- Result = ?l2b(Pref ++ io_lib:format("~6.16.0b", [Seq])),
- case Seq >= 16#fff000 of
- true ->
- {reply, Result, {sequential, new_prefix(), inc()}};
- _ ->
- {reply, Result, {sequential, Pref, Seq + inc()}}
- end.
-
-handle_cast(change, _State) ->
- {noreply, state()};
-handle_cast(stop, State) ->
- {stop, normal, State};
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-new_prefix() ->
- couch_util:to_hex((crypto:rand_bytes(13))).
-
-inc() ->
- crypto:rand_uniform(1, 16#ffe).
-
-state() ->
- AlgoStr = couch_config:get("uuids", "algorithm", "random"),
- case couch_util:to_existing_atom(AlgoStr) of
- random ->
- random;
- utc_random ->
- utc_random;
- sequential ->
- {sequential, new_prefix(), inc()};
- Unknown ->
- throw({unknown_uuid_algorithm, Unknown})
- end.
diff --git a/1.1.x/src/couchdb/couch_view.erl b/1.1.x/src/couchdb/couch_view.erl
deleted file mode 100644
index 911f1aa6..00000000
--- a/1.1.x/src/couchdb/couch_view.erl
+++ /dev/null
@@ -1,460 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_view).
--behaviour(gen_server).
-
--export([start_link/0,fold/4,less_json/2,less_json_ids/2,expand_dups/2,
- detuple_kvs/2,init/1,terminate/2,handle_call/3,handle_cast/2,handle_info/2,
- code_change/3,get_reduce_view/4,get_temp_reduce_view/5,get_temp_map_view/4,
- get_map_view/4,get_row_count/1,reduce_to_count/1,fold_reduce/4,
- extract_map_view/1,get_group_server/2,get_group_info/2,cleanup_index_files/1]).
-
--include("couch_db.hrl").
-
-
--record(server,{
- root_dir = []}).
-
-start_link() ->
- gen_server:start_link({local, couch_view}, couch_view, [], []).
-
-get_temp_updater(DbName, Language, DesignOptions, MapSrc, RedSrc) ->
- {ok, Group} =
- couch_view_group:open_temp_group(DbName, Language, DesignOptions, MapSrc, RedSrc),
- case gen_server:call(couch_view, {get_group_server, DbName, Group}, infinity) of
- {ok, Pid} ->
- Pid;
- Error ->
- throw(Error)
- end.
-
-get_group_server(DbName, GroupId) ->
- case couch_view_group:open_db_group(DbName, GroupId) of
- {ok, Group} ->
- case gen_server:call(couch_view, {get_group_server, DbName, Group}, infinity) of
- {ok, Pid} ->
- Pid;
- Error ->
- throw(Error)
- end;
- Error ->
- throw(Error)
- end.
-
-get_group(Db, GroupId, Stale) ->
- MinUpdateSeq = case Stale of
- ok -> 0;
- update_after -> 0;
- _Else -> couch_db:get_update_seq(Db)
- end,
- GroupPid = get_group_server(couch_db:name(Db), GroupId),
- Result = couch_view_group:request_group(GroupPid, MinUpdateSeq),
- case Stale of
- update_after ->
- % best effort, process might die
- spawn(fun() ->
- LastSeq = couch_db:get_update_seq(Db),
- couch_view_group:request_group(GroupPid, LastSeq)
- end);
- _ ->
- ok
- end,
- Result.
-
-get_temp_group(Db, Language, DesignOptions, MapSrc, RedSrc) ->
- couch_view_group:request_group(
- get_temp_updater(couch_db:name(Db), Language, DesignOptions, MapSrc, RedSrc),
- couch_db:get_update_seq(Db)).
-
-get_group_info(Db, GroupId) ->
- couch_view_group:request_group_info(
- get_group_server(couch_db:name(Db), GroupId)).
-
-cleanup_index_files(Db) ->
- % load all ddocs
- {ok, DesignDocs} = couch_db:get_design_docs(Db),
-
- % make unique list of group sigs
- Sigs = lists:map(fun(#doc{id = GroupId}) ->
- {ok, Info} = get_group_info(Db, GroupId),
- ?b2l(couch_util:get_value(signature, Info))
- end, [DD||DD <- DesignDocs, DD#doc.deleted == false]),
-
- FileList = list_index_files(Db),
-
- % regex that matches all ddocs
- RegExp = "("++ string:join(Sigs, "|") ++")",
-
- % filter out the ones in use
- DeleteFiles = [FilePath
- || FilePath <- FileList,
- re:run(FilePath, RegExp, [{capture, none}]) =:= nomatch],
- % delete unused files
- ?LOG_DEBUG("deleting unused view index files: ~p",[DeleteFiles]),
- RootDir = couch_config:get("couchdb", "view_index_dir"),
- [couch_file:delete(RootDir,File,false)||File <- DeleteFiles],
- ok.
-
-list_index_files(Db) ->
- % call server to fetch the index files
- RootDir = couch_config:get("couchdb", "view_index_dir"),
- filelib:wildcard(RootDir ++ "/." ++ ?b2l(couch_db:name(Db)) ++ "_design"++"/*").
-
-
-get_row_count(#view{btree=Bt}) ->
- {ok, {Count, _Reds}} = couch_btree:full_reduce(Bt),
- {ok, Count}.
-
-get_temp_reduce_view(Db, Language, DesignOptions, MapSrc, RedSrc) ->
- {ok, #group{views=[View]}=Group} =
- get_temp_group(Db, Language, DesignOptions, MapSrc, RedSrc),
- {ok, {temp_reduce, View}, Group}.
-
-
-get_reduce_view(Db, GroupId, Name, Update) ->
- case get_group(Db, GroupId, Update) of
- {ok, #group{views=Views,def_lang=Lang}=Group} ->
- case get_reduce_view0(Name, Lang, Views) of
- {ok, View} ->
- {ok, View, Group};
- Else ->
- Else
- end;
- Error ->
- Error
- end.
-
-get_reduce_view0(_Name, _Lang, []) ->
- {not_found, missing_named_view};
-get_reduce_view0(Name, Lang, [#view{reduce_funs=RedFuns}=View|Rest]) ->
- case get_key_pos(Name, RedFuns, 0) of
- 0 -> get_reduce_view0(Name, Lang, Rest);
- N -> {ok, {reduce, N, Lang, View}}
- end.
-
-extract_map_view({reduce, _N, _Lang, View}) ->
- View.
-
-detuple_kvs([], Acc) ->
- lists:reverse(Acc);
-detuple_kvs([KV | Rest], Acc) ->
- {{Key,Id},Value} = KV,
- NKV = [[Key, Id], Value],
- detuple_kvs(Rest, [NKV | Acc]).
-
-expand_dups([], Acc) ->
- lists:reverse(Acc);
-expand_dups([{Key, {dups, Vals}} | Rest], Acc) ->
- Expanded = [{Key, Val} || Val <- Vals],
- expand_dups(Rest, Expanded ++ Acc);
-expand_dups([KV | Rest], Acc) ->
- expand_dups(Rest, [KV | Acc]).
-
-fold_reduce({temp_reduce, #view{btree=Bt}}, Fun, Acc, Options) ->
- WrapperFun = fun({GroupedKey, _}, PartialReds, Acc0) ->
- {_, [Red]} = couch_btree:final_reduce(Bt, PartialReds),
- Fun(GroupedKey, Red, Acc0)
- end,
- couch_btree:fold_reduce(Bt, WrapperFun, Acc, Options);
-
-fold_reduce({reduce, NthRed, Lang, #view{btree=Bt, reduce_funs=RedFuns}}, Fun, Acc, Options) ->
- PreResultPadding = lists:duplicate(NthRed - 1, []),
- PostResultPadding = lists:duplicate(length(RedFuns) - NthRed, []),
- {_Name, FunSrc} = lists:nth(NthRed,RedFuns),
- ReduceFun =
- fun(reduce, KVs) ->
- {ok, Reduced} = couch_query_servers:reduce(Lang, [FunSrc], detuple_kvs(expand_dups(KVs, []),[])),
- {0, PreResultPadding ++ Reduced ++ PostResultPadding};
- (rereduce, Reds) ->
- UserReds = [[lists:nth(NthRed, UserRedsList)] || {_, UserRedsList} <- Reds],
- {ok, Reduced} = couch_query_servers:rereduce(Lang, [FunSrc], UserReds),
- {0, PreResultPadding ++ Reduced ++ PostResultPadding}
- end,
- WrapperFun = fun({GroupedKey, _}, PartialReds, Acc0) ->
- {_, Reds} = couch_btree:final_reduce(ReduceFun, PartialReds),
- Fun(GroupedKey, lists:nth(NthRed, Reds), Acc0)
- end,
- couch_btree:fold_reduce(Bt, WrapperFun, Acc, Options).
-
-get_key_pos(_Key, [], _N) ->
- 0;
-get_key_pos(Key, [{Key1,_Value}|_], N) when Key == Key1 ->
- N + 1;
-get_key_pos(Key, [_|Rest], N) ->
- get_key_pos(Key, Rest, N+1).
-
-
-get_temp_map_view(Db, Language, DesignOptions, Src) ->
- {ok, #group{views=[View]}=Group} = get_temp_group(Db, Language, DesignOptions, Src, []),
- {ok, View, Group}.
-
-get_map_view(Db, GroupId, Name, Stale) ->
- case get_group(Db, GroupId, Stale) of
- {ok, #group{views=Views}=Group} ->
- case get_map_view0(Name, Views) of
- {ok, View} ->
- {ok, View, Group};
- Else ->
- Else
- end;
- Error ->
- Error
- end.
-
-get_map_view0(_Name, []) ->
- {not_found, missing_named_view};
-get_map_view0(Name, [#view{map_names=MapNames}=View|Rest]) ->
- case lists:member(Name, MapNames) of
- true -> {ok, View};
- false -> get_map_view0(Name, Rest)
- end.
-
-reduce_to_count(Reductions) ->
- {Count, _} =
- couch_btree:final_reduce(
- fun(reduce, KVs) ->
- Count = lists:sum(
- [case V of {dups, Vals} -> length(Vals); _ -> 1 end
- || {_,V} <- KVs]),
- {Count, []};
- (rereduce, Reds) ->
- {lists:sum([Count0 || {Count0, _} <- Reds]), []}
- end, Reductions),
- Count.
-
-
-
-fold_fun(_Fun, [], _, Acc) ->
- {ok, Acc};
-fold_fun(Fun, [KV|Rest], {KVReds, Reds}, Acc) ->
- case Fun(KV, {KVReds, Reds}, Acc) of
- {ok, Acc2} ->
- fold_fun(Fun, Rest, {[KV|KVReds], Reds}, Acc2);
- {stop, Acc2} ->
- {stop, Acc2}
- end.
-
-
-fold(#view{btree=Btree}, Fun, Acc, Options) ->
- WrapperFun =
- fun(KV, Reds, Acc2) ->
- fold_fun(Fun, expand_dups([KV],[]), Reds, Acc2)
- end,
- {ok, _LastReduce, _AccResult} = couch_btree:fold(Btree, WrapperFun, Acc, Options).
-
-
-init([]) ->
- % read configuration settings and register for configuration changes
- RootDir = couch_config:get("couchdb", "view_index_dir"),
- Self = self(),
- ok = couch_config:register(
- fun("couchdb", "view_index_dir")->
- exit(Self, config_change)
- end),
-
- couch_db_update_notifier:start_link(
- fun({deleted, DbName}) ->
- gen_server:cast(couch_view, {reset_indexes, DbName});
- ({created, DbName}) ->
- gen_server:cast(couch_view, {reset_indexes, DbName});
- (_Else) ->
- ok
- end),
- ets:new(couch_groups_by_db, [bag, private, named_table]),
- ets:new(group_servers_by_sig, [set, protected, named_table]),
- ets:new(couch_groups_by_updater, [set, private, named_table]),
- process_flag(trap_exit, true),
- ok = couch_file:init_delete_dir(RootDir),
- {ok, #server{root_dir=RootDir}}.
-
-
-terminate(_Reason, _Srv) ->
- [couch_util:shutdown_sync(Pid) || {Pid, _} <-
- ets:tab2list(couch_groups_by_updater)],
- ok.
-
-
-handle_call({get_group_server, DbName, #group{sig=Sig}=Group}, From,
- #server{root_dir=Root}=Server) ->
- case ets:lookup(group_servers_by_sig, {DbName, Sig}) of
- [] ->
- spawn_monitor(fun() -> new_group(Root, DbName, Group) end),
- ets:insert(group_servers_by_sig, {{DbName, Sig}, [From]}),
- {noreply, Server};
- [{_, WaitList}] when is_list(WaitList) ->
- ets:insert(group_servers_by_sig, {{DbName, Sig}, [From | WaitList]}),
- {noreply, Server};
- [{_, ExistingPid}] ->
- {reply, {ok, ExistingPid}, Server}
- end;
-
-handle_call({reset_indexes, DbName}, _From, #server{root_dir=Root}=Server) ->
- do_reset_indexes(DbName, Root),
- {reply, ok, Server}.
-
-handle_cast({reset_indexes, DbName}, #server{root_dir=Root}=Server) ->
- do_reset_indexes(DbName, Root),
- {noreply, Server}.
-
-new_group(Root, DbName, #group{name=GroupId, sig=Sig} = Group) ->
- ?LOG_DEBUG("Spawning new group server for view group ~s in database ~s.",
- [GroupId, DbName]),
- case (catch couch_view_group:start_link({Root, DbName, Group})) of
- {ok, NewPid} ->
- unlink(NewPid),
- exit({DbName, Sig, {ok, NewPid}});
- {error, invalid_view_seq} ->
- ok = gen_server:call(couch_view, {reset_indexes, DbName}),
- new_group(Root, DbName, Group);
- Error ->
- exit({DbName, Sig, Error})
- end.
-
-do_reset_indexes(DbName, Root) ->
- % shutdown all the updaters and clear the files, the db got changed
- Names = ets:lookup(couch_groups_by_db, DbName),
- lists:foreach(
- fun({_DbName, Sig}) ->
- ?LOG_DEBUG("Killing update process for view group ~s. in database ~s.", [Sig, DbName]),
- [{_, Pid}] = ets:lookup(group_servers_by_sig, {DbName, Sig}),
- couch_util:shutdown_sync(Pid),
- delete_from_ets(Pid, DbName, Sig)
- end, Names),
- delete_index_dir(Root, DbName),
- RootDelDir = couch_config:get("couchdb", "view_index_dir"),
- couch_file:delete(RootDelDir, Root ++ "/." ++ ?b2l(DbName) ++ "_temp").
-
-handle_info({'EXIT', FromPid, Reason}, Server) ->
- case ets:lookup(couch_groups_by_updater, FromPid) of
- [] ->
- if Reason /= normal ->
- % non-updater linked process died, we propagate the error
- ?LOG_ERROR("Exit on non-updater process: ~p", [Reason]),
- exit(Reason);
- true -> ok
- end;
- [{_, {DbName, GroupId}}] ->
- delete_from_ets(FromPid, DbName, GroupId)
- end,
- {noreply, Server};
-
-handle_info({'DOWN', _, _, _, {DbName, Sig, Reply}}, Server) ->
- [{_, WaitList}] = ets:lookup(group_servers_by_sig, {DbName, Sig}),
- [gen_server:reply(From, Reply) || From <- WaitList],
- case Reply of {ok, NewPid} ->
- link(NewPid),
- add_to_ets(NewPid, DbName, Sig);
- _ -> ok end,
- {noreply, Server}.
-
-add_to_ets(Pid, DbName, Sig) ->
- true = ets:insert(couch_groups_by_updater, {Pid, {DbName, Sig}}),
- true = ets:insert(group_servers_by_sig, {{DbName, Sig}, Pid}),
- true = ets:insert(couch_groups_by_db, {DbName, Sig}).
-
-delete_from_ets(Pid, DbName, Sig) ->
- true = ets:delete(couch_groups_by_updater, Pid),
- true = ets:delete(group_servers_by_sig, {DbName, Sig}),
- true = ets:delete_object(couch_groups_by_db, {DbName, Sig}).
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
-delete_index_dir(RootDir, DbName) ->
- nuke_dir(RootDir, RootDir ++ "/." ++ ?b2l(DbName) ++ "_design").
-
-nuke_dir(RootDelDir, Dir) ->
- case file:list_dir(Dir) of
- {error, enoent} -> ok; % doesn't exist
- {ok, Files} ->
- lists:foreach(
- fun(File)->
- Full = Dir ++ "/" ++ File,
- case couch_file:delete(RootDelDir, Full, false) of
- ok -> ok;
- {error, eperm} ->
- ok = nuke_dir(RootDelDir, Full)
- end
- end,
- Files),
- ok = file:del_dir(Dir)
- end.
-
-
-% keys come back in the language of btree - tuples.
-less_json_ids({JsonA, IdA}, {JsonB, IdB}) ->
- case less_json0(JsonA, JsonB) of
- 0 ->
- IdA < IdB;
- Result ->
- Result < 0
- end.
-
-less_json(A,B) ->
- less_json0(A,B) < 0.
-
-less_json0(A,A) -> 0;
-
-less_json0(A,B) when is_atom(A), is_atom(B) -> atom_sort(A) - atom_sort(B);
-less_json0(A,_) when is_atom(A) -> -1;
-less_json0(_,B) when is_atom(B) -> 1;
-
-less_json0(A,B) when is_number(A), is_number(B) -> A - B;
-less_json0(A,_) when is_number(A) -> -1;
-less_json0(_,B) when is_number(B) -> 1;
-
-less_json0(A,B) when is_binary(A), is_binary(B) -> couch_util:collate(A,B);
-less_json0(A,_) when is_binary(A) -> -1;
-less_json0(_,B) when is_binary(B) -> 1;
-
-less_json0(A,B) when is_list(A), is_list(B) -> less_list(A,B);
-less_json0(A,_) when is_list(A) -> -1;
-less_json0(_,B) when is_list(B) -> 1;
-
-less_json0({A},{B}) when is_list(A), is_list(B) -> less_props(A,B);
-less_json0({A},_) when is_list(A) -> -1;
-less_json0(_,{B}) when is_list(B) -> 1.
-
-atom_sort(null) -> 1;
-atom_sort(false) -> 2;
-atom_sort(true) -> 3.
-
-less_props([], [_|_]) ->
- -1;
-less_props(_, []) ->
- 1;
-less_props([{AKey, AValue}|RestA], [{BKey, BValue}|RestB]) ->
- case couch_util:collate(AKey, BKey) of
- 0 ->
- case less_json0(AValue, BValue) of
- 0 ->
- less_props(RestA, RestB);
- Result ->
- Result
- end;
- Result ->
- Result
- end.
-
-less_list([], [_|_]) ->
- -1;
-less_list(_, []) ->
- 1;
-less_list([A|RestA], [B|RestB]) ->
- case less_json0(A,B) of
- 0 ->
- less_list(RestA, RestB);
- Result ->
- Result
- end.
diff --git a/1.1.x/src/couchdb/couch_view_compactor.erl b/1.1.x/src/couchdb/couch_view_compactor.erl
deleted file mode 100644
index 9a47f5f8..00000000
--- a/1.1.x/src/couchdb/couch_view_compactor.erl
+++ /dev/null
@@ -1,102 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_view_compactor).
-
--include ("couch_db.hrl").
-
--export([start_compact/2]).
-
-%% @spec start_compact(DbName::binary(), GroupId:binary()) -> ok
-%% @doc Compacts the views. GroupId must not include the _design/ prefix
-start_compact(DbName, GroupId) ->
- Pid = couch_view:get_group_server(DbName, <<"_design/",GroupId/binary>>),
- gen_server:cast(Pid, {start_compact, fun compact_group/2}).
-
-%%=============================================================================
-%% internal functions
-%%=============================================================================
-
-%% @spec compact_group(Group, NewGroup) -> ok
-compact_group(Group, EmptyGroup) ->
- #group{
- current_seq = Seq,
- id_btree = IdBtree,
- name = GroupId,
- views = Views
- } = Group,
-
- #group{
- db = Db,
- id_btree = EmptyIdBtree,
- views = EmptyViews
- } = EmptyGroup,
-
- {ok, {Count, _}} = couch_btree:full_reduce(Db#db.fulldocinfo_by_id_btree),
-
- <<"_design", ShortName/binary>> = GroupId,
- DbName = couch_db:name(Db),
- TaskName = <<DbName/binary, ShortName/binary>>,
- couch_task_status:add_task(<<"View Group Compaction">>, TaskName, <<"">>),
-
- Fun = fun({DocId, _ViewIdKeys} = KV, {Bt, Acc, TotalCopied, LastId}) ->
- if DocId =:= LastId -> % COUCHDB-999
- Msg = "Duplicates of ~s detected in ~s ~s - rebuild required",
- exit(io_lib:format(Msg, [DocId, DbName, GroupId]));
- true -> ok end,
- if TotalCopied rem 10000 =:= 0 ->
- couch_task_status:update("Copied ~p of ~p Ids (~p%)",
- [TotalCopied, Count, (TotalCopied*100) div Count]),
- {ok, Bt2} = couch_btree:add(Bt, lists:reverse([KV|Acc])),
- {ok, {Bt2, [], TotalCopied+1, DocId}};
- true ->
- {ok, {Bt, [KV|Acc], TotalCopied+1, DocId}}
- end
- end,
- {ok, _, {Bt3, Uncopied, _Total, _LastId}} = couch_btree:foldl(IdBtree, Fun,
- {EmptyIdBtree, [], 0, nil}),
- {ok, NewIdBtree} = couch_btree:add(Bt3, lists:reverse(Uncopied)),
-
- NewViews = lists:map(fun({View, EmptyView}) ->
- compact_view(View, EmptyView)
- end, lists:zip(Views, EmptyViews)),
-
- NewGroup = EmptyGroup#group{
- id_btree=NewIdBtree,
- views=NewViews,
- current_seq=Seq
- },
-
- Pid = couch_view:get_group_server(DbName, GroupId),
- gen_server:cast(Pid, {compact_done, NewGroup}).
-
-%% @spec compact_view(View, EmptyView, Retry) -> CompactView
-compact_view(View, EmptyView) ->
- {ok, Count} = couch_view:get_row_count(View),
-
- %% Key is {Key,DocId}
- Fun = fun(KV, {Bt, Acc, TotalCopied}) ->
- if TotalCopied rem 10000 =:= 0 ->
- couch_task_status:update("View #~p: copied ~p of ~p KVs (~p%)",
- [View#view.id_num, TotalCopied, Count, (TotalCopied*100) div Count]),
- {ok, Bt2} = couch_btree:add(Bt, lists:reverse([KV|Acc])),
- {ok, {Bt2, [], TotalCopied + 1}};
- true ->
- {ok, {Bt, [KV|Acc], TotalCopied + 1}}
- end
- end,
-
- {ok, _, {Bt3, Uncopied, _Total}} = couch_btree:foldl(View#view.btree, Fun,
- {EmptyView#view.btree, [], 0}),
- {ok, NewBt} = couch_btree:add(Bt3, lists:reverse(Uncopied)),
- EmptyView#view{btree = NewBt}.
-
diff --git a/1.1.x/src/couchdb/couch_view_group.erl b/1.1.x/src/couchdb/couch_view_group.erl
deleted file mode 100644
index 6ef1dcb4..00000000
--- a/1.1.x/src/couchdb/couch_view_group.erl
+++ /dev/null
@@ -1,642 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_view_group).
--behaviour(gen_server).
-
-%% API
--export([start_link/1, request_group/2, request_group_info/1]).
--export([open_db_group/2, open_temp_group/5, design_doc_to_view_group/1,design_root/2]).
-
-%% gen_server callbacks
--export([init/1, handle_call/3, handle_cast/2, handle_info/2,
- terminate/2, code_change/3]).
-
--include("couch_db.hrl").
-
--record(group_state, {
- type,
- db_name,
- init_args,
- group,
- updater_pid=nil,
- compactor_pid=nil,
- waiting_commit=false,
- waiting_list=[],
- ref_counter=nil
-}).
-
-% api methods
-request_group(Pid, Seq) ->
- ?LOG_DEBUG("request_group {Pid, Seq} ~p", [{Pid, Seq}]),
- case gen_server:call(Pid, {request_group, Seq}, infinity) of
- {ok, Group, RefCounter} ->
- couch_ref_counter:add(RefCounter),
- {ok, Group};
- Error ->
- ?LOG_DEBUG("request_group Error ~p", [Error]),
- throw(Error)
- end.
-
-request_group_info(Pid) ->
- case gen_server:call(Pid, request_group_info) of
- {ok, GroupInfoList} ->
- {ok, GroupInfoList};
- Error ->
- throw(Error)
- end.
-
-% from template
-start_link(InitArgs) ->
- case gen_server:start_link(couch_view_group,
- {InitArgs, self(), Ref = make_ref()}, []) of
- {ok, Pid} ->
- {ok, Pid};
- ignore ->
- receive
- {Ref, Pid, Error} ->
- case process_info(self(), trap_exit) of
- {trap_exit, true} -> receive {'EXIT', Pid, _} -> ok end;
- {trap_exit, false} -> ok
- end,
- Error
- end;
- Error ->
- Error
- end.
-
-% init creates a closure which spawns the appropriate view_updater.
-init({{_, DbName, _} = InitArgs, ReturnPid, Ref}) ->
- process_flag(trap_exit, true),
- try prepare_group(InitArgs, false) of
- {ok, #group{db=Db, fd=Fd, current_seq=Seq}=Group} ->
- case Seq > couch_db:get_update_seq(Db) of
- true ->
- ReturnPid ! {Ref, self(), {error, invalid_view_seq}},
- ignore;
- _ ->
- couch_db:monitor(Db),
- couch_db:close(Db),
- {ok, RefCounter} = couch_ref_counter:start([Fd]),
- {ok, #group_state{
- db_name=DbName,
- init_args=InitArgs,
- group=Group#group{db=nil},
- ref_counter=RefCounter}}
- end;
- Error ->
- ReturnPid ! {Ref, self(), Error},
- ignore
- catch exit:no_db_file ->
- ReturnPid ! {Ref, self(), {error, no_db_file}},
- ignore
- end.
-
-
-
-
-% There are two sources of messages: couch_view, which requests an up to date
-% view group, and the couch_view_updater, which when spawned, updates the
-% group and sends it back here. We employ a caching mechanism, so that between
-% database writes, we don't have to spawn a couch_view_updater with every view
-% request.
-
-% The caching mechanism: each request is submitted with a seq_id for the
-% database at the time it was read. We guarantee to return a view from that
-% sequence or newer.
-
-% If the request sequence is higher than our current high_target seq, we set
-% that as the highest seqence. If the updater is not running, we launch it.
-
-handle_call({request_group, RequestSeq}, From,
- #group_state{
- db_name=DbName,
- group=#group{current_seq=Seq}=Group,
- updater_pid=nil,
- waiting_list=WaitList
- }=State) when RequestSeq > Seq ->
- {ok, Db} = couch_db:open_int(DbName, []),
- Group2 = Group#group{db=Db},
- Owner = self(),
- Pid = spawn_link(fun()-> couch_view_updater:update(Owner, Group2) end),
-
- {noreply, State#group_state{
- updater_pid=Pid,
- group=Group2,
- waiting_list=[{From,RequestSeq}|WaitList]
- }, infinity};
-
-
-% If the request seqence is less than or equal to the seq_id of a known Group,
-% we respond with that Group.
-handle_call({request_group, RequestSeq}, _From, #group_state{
- group = #group{current_seq=GroupSeq} = Group,
- ref_counter = RefCounter
- } = State) when RequestSeq =< GroupSeq ->
- {reply, {ok, Group, RefCounter}, State};
-
-% Otherwise: TargetSeq => RequestSeq > GroupSeq
-% We've already initiated the appropriate action, so just hold the response until the group is up to the RequestSeq
-handle_call({request_group, RequestSeq}, From,
- #group_state{waiting_list=WaitList}=State) ->
- {noreply, State#group_state{
- waiting_list=[{From, RequestSeq}|WaitList]
- }, infinity};
-
-handle_call(request_group_info, _From, State) ->
- GroupInfo = get_group_info(State),
- {reply, {ok, GroupInfo}, State}.
-
-handle_cast({start_compact, CompactFun}, #group_state{compactor_pid=nil}
- = State) ->
- #group_state{
- group = #group{name = GroupId, sig = GroupSig} = Group,
- init_args = {RootDir, DbName, _}
- } = State,
- ?LOG_INFO("View index compaction starting for ~s ~s", [DbName, GroupId]),
- {ok, Db} = couch_db:open_int(DbName, []),
- {ok, Fd} = open_index_file(compact, RootDir, DbName, GroupSig),
- NewGroup = reset_file(Db, Fd, DbName, Group),
- Pid = spawn_link(fun() -> CompactFun(Group, NewGroup) end),
- {noreply, State#group_state{compactor_pid = Pid}};
-handle_cast({start_compact, _}, State) ->
- %% compact already running, this is a no-op
- {noreply, State};
-
-handle_cast({compact_done, #group{current_seq=NewSeq} = NewGroup},
- #group_state{group = #group{current_seq=OldSeq}} = State)
- when NewSeq >= OldSeq ->
- #group_state{
- group = #group{name=GroupId, fd=OldFd, sig=GroupSig} = Group,
- init_args = {RootDir, DbName, _},
- updater_pid = UpdaterPid,
- compactor_pid = CompactorPid,
- ref_counter = RefCounter
- } = State,
-
- ?LOG_INFO("View index compaction complete for ~s ~s", [DbName, GroupId]),
- FileName = index_file_name(RootDir, DbName, GroupSig),
- CompactName = index_file_name(compact, RootDir, DbName, GroupSig),
- ok = couch_file:delete(RootDir, FileName),
- ok = file:rename(CompactName, FileName),
-
- %% if an updater is running, kill it and start a new one
- NewUpdaterPid =
- if is_pid(UpdaterPid) ->
- unlink(UpdaterPid),
- exit(UpdaterPid, view_compaction_complete),
- Owner = self(),
- spawn_link(fun()-> couch_view_updater:update(Owner, NewGroup) end);
- true ->
- nil
- end,
-
- %% cleanup old group
- unlink(CompactorPid),
- receive {'EXIT', CompactorPid, normal} -> ok after 0 -> ok end,
- unlink(OldFd),
- couch_ref_counter:drop(RefCounter),
- {ok, NewRefCounter} = couch_ref_counter:start([NewGroup#group.fd]),
- case Group#group.db of
- nil -> ok;
- Else -> couch_db:close(Else)
- end,
-
- case NewGroup#group.db of
- nil -> ok;
- _ -> couch_db:close(NewGroup#group.db)
- end,
-
- self() ! delayed_commit,
- {noreply, State#group_state{
- group=NewGroup#group{db = nil},
- ref_counter=NewRefCounter,
- compactor_pid=nil,
- updater_pid=NewUpdaterPid
- }};
-handle_cast({compact_done, NewGroup}, State) ->
- #group_state{
- group = #group{name = GroupId, current_seq = CurrentSeq},
- init_args={_RootDir, DbName, _}
- } = State,
- ?LOG_INFO("View index compaction still behind for ~s ~s -- current: ~p " ++
- "compact: ~p", [DbName, GroupId, CurrentSeq, NewGroup#group.current_seq]),
- couch_db:close(NewGroup#group.db),
- Pid = spawn_link(fun() ->
- {ok, Db} = couch_db:open_int(DbName, []),
- {_,Ref} = erlang:spawn_monitor(fun() ->
- couch_view_updater:update(nil, NewGroup#group{db = Db})
- end),
- receive
- {'DOWN', Ref, _, _, {new_group, NewGroup2}} ->
- couch_db:close(Db),
- #group{name=GroupId} = NewGroup2,
- Pid2 = couch_view:get_group_server(DbName, GroupId),
- gen_server:cast(Pid2, {compact_done, NewGroup2#group{db = nil}})
- end
- end),
- {noreply, State#group_state{compactor_pid = Pid}};
-
-handle_cast({partial_update, Pid, NewGroup}, #group_state{updater_pid=Pid}
- = State) ->
- #group_state{
- db_name = DbName,
- waiting_commit = WaitingCommit
- } = State,
- NewSeq = NewGroup#group.current_seq,
- ?LOG_INFO("checkpointing view update at seq ~p for ~s ~s", [NewSeq,
- DbName, NewGroup#group.name]),
- if not WaitingCommit ->
- erlang:send_after(1000, self(), delayed_commit);
- true -> ok
- end,
- {noreply, State#group_state{group=NewGroup, waiting_commit=true}};
-handle_cast({partial_update, _, _}, State) ->
- %% message from an old (probably pre-compaction) updater; ignore
- {noreply, State}.
-
-handle_info(delayed_commit, #group_state{db_name=DbName,group=Group}=State) ->
- {ok, Db} = couch_db:open_int(DbName, []),
- CommittedSeq = couch_db:get_committed_update_seq(Db),
- couch_db:close(Db),
- if CommittedSeq >= Group#group.current_seq ->
- % save the header
- Header = {Group#group.sig, get_index_header_data(Group)},
- ok = couch_file:write_header(Group#group.fd, Header),
- {noreply, State#group_state{waiting_commit=false}};
- true ->
- % We can't commit the header because the database seq that's fully
- % committed to disk is still behind us. If we committed now and the
- % database lost those changes our view could be forever out of sync
- % with the database. But a crash before we commit these changes, no big
- % deal, we only lose incremental changes since last committal.
- erlang:send_after(1000, self(), delayed_commit),
- {noreply, State#group_state{waiting_commit=true}}
- end;
-
-handle_info({'EXIT', FromPid, {new_group, #group{db=Db}=Group}},
- #group_state{db_name=DbName,
- updater_pid=UpPid,
- ref_counter=RefCounter,
- waiting_list=WaitList,
- waiting_commit=WaitingCommit}=State) when UpPid == FromPid ->
- ok = couch_db:close(Db),
- if not WaitingCommit ->
- erlang:send_after(1000, self(), delayed_commit);
- true -> ok
- end,
- case reply_with_group(Group, WaitList, [], RefCounter) of
- [] ->
- {noreply, State#group_state{waiting_commit=true, waiting_list=[],
- group=Group#group{db=nil}, updater_pid=nil}};
- StillWaiting ->
- % we still have some waiters, reopen the database and reupdate the index
- {ok, Db2} = couch_db:open_int(DbName, []),
- Group2 = Group#group{db=Db2},
- Owner = self(),
- Pid = spawn_link(fun() -> couch_view_updater:update(Owner, Group2) end),
- {noreply, State#group_state{waiting_commit=true,
- waiting_list=StillWaiting, group=Group2, updater_pid=Pid}}
- end;
-handle_info({'EXIT', _, {new_group, _}}, State) ->
- %% message from an old (probably pre-compaction) updater; ignore
- {noreply, State};
-
-handle_info({'EXIT', FromPid, reset},
- #group_state{
- init_args=InitArgs,
- updater_pid=UpPid,
- group=Group}=State) when UpPid == FromPid ->
- ok = couch_db:close(Group#group.db),
- case prepare_group(InitArgs, true) of
- {ok, ResetGroup} ->
- Owner = self(),
- Pid = spawn_link(fun()-> couch_view_updater:update(Owner, ResetGroup) end),
- {noreply, State#group_state{
- updater_pid=Pid,
- group=ResetGroup}};
- Error ->
- {stop, normal, reply_all(State, Error)}
- end;
-handle_info({'EXIT', _, reset}, State) ->
- %% message from an old (probably pre-compaction) updater; ignore
- {noreply, State};
-
-handle_info({'EXIT', _FromPid, normal}, State) ->
- {noreply, State};
-
-handle_info({'EXIT', FromPid, {{nocatch, Reason}, _Trace}}, State) ->
- ?LOG_DEBUG("Uncaught throw() in linked pid: ~p", [{FromPid, Reason}]),
- {stop, Reason, State};
-
-handle_info({'EXIT', FromPid, Reason}, State) ->
- ?LOG_DEBUG("Exit from linked pid: ~p", [{FromPid, Reason}]),
- {stop, Reason, State};
-
-handle_info({'DOWN',_,_,_,_}, State) ->
- ?LOG_INFO("Shutting down view group server, monitored db is closing.", []),
- {stop, normal, reply_all(State, shutdown)}.
-
-
-terminate(Reason, #group_state{updater_pid=Update, compactor_pid=Compact}=S) ->
- reply_all(S, Reason),
- couch_util:shutdown_sync(Update),
- couch_util:shutdown_sync(Compact),
- ok.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%% Local Functions
-
-% reply_with_group/3
-% for each item in the WaitingList {Pid, Seq}
-% if the Seq is =< GroupSeq, reply
-reply_with_group(Group=#group{current_seq=GroupSeq}, [{Pid, Seq}|WaitList],
- StillWaiting, RefCounter) when Seq =< GroupSeq ->
- gen_server:reply(Pid, {ok, Group, RefCounter}),
- reply_with_group(Group, WaitList, StillWaiting, RefCounter);
-
-% else
-% put it in the continuing waiting list
-reply_with_group(Group, [{Pid, Seq}|WaitList], StillWaiting, RefCounter) ->
- reply_with_group(Group, WaitList, [{Pid, Seq}|StillWaiting], RefCounter);
-
-% return the still waiting list
-reply_with_group(_Group, [], StillWaiting, _RefCounter) ->
- StillWaiting.
-
-reply_all(#group_state{waiting_list=WaitList}=State, Reply) ->
- [catch gen_server:reply(Pid, Reply) || {Pid, _} <- WaitList],
- State#group_state{waiting_list=[]}.
-
-prepare_group({RootDir, DbName, #group{sig=Sig}=Group}, ForceReset)->
- case couch_db:open_int(DbName, []) of
- {ok, Db} ->
- case open_index_file(RootDir, DbName, Sig) of
- {ok, Fd} ->
- if ForceReset ->
- % this can happen if we missed a purge
- {ok, reset_file(Db, Fd, DbName, Group)};
- true ->
- % 09 UPGRADE CODE
- ok = couch_file:upgrade_old_header(Fd, <<$r, $c, $k, 0>>),
- case (catch couch_file:read_header(Fd)) of
- {ok, {Sig, HeaderInfo}} ->
- % sigs match!
- {ok, init_group(Db, Fd, Group, HeaderInfo)};
- _ ->
- % this happens on a new file
- {ok, reset_file(Db, Fd, DbName, Group)}
- end
- end;
- Error ->
- catch delete_index_file(RootDir, DbName, Sig),
- Error
- end;
- Else ->
- Else
- end.
-
-get_index_header_data(#group{current_seq=Seq, purge_seq=PurgeSeq,
- id_btree=IdBtree,views=Views}) ->
- ViewStates = [
- {couch_btree:get_state(V#view.btree), V#view.update_seq, V#view.purge_seq} || V <- Views
- ],
- #index_header{
- seq=Seq,
- purge_seq=PurgeSeq,
- id_btree_state=couch_btree:get_state(IdBtree),
- view_states=ViewStates
- }.
-
-hex_sig(GroupSig) ->
- couch_util:to_hex(?b2l(GroupSig)).
-
-design_root(RootDir, DbName) ->
- RootDir ++ "/." ++ ?b2l(DbName) ++ "_design/".
-
-index_file_name(RootDir, DbName, GroupSig) ->
- design_root(RootDir, DbName) ++ hex_sig(GroupSig) ++".view".
-
-index_file_name(compact, RootDir, DbName, GroupSig) ->
- design_root(RootDir, DbName) ++ hex_sig(GroupSig) ++".compact.view".
-
-
-open_index_file(RootDir, DbName, GroupSig) ->
- FileName = index_file_name(RootDir, DbName, GroupSig),
- case couch_file:open(FileName) of
- {ok, Fd} -> {ok, Fd};
- {error, enoent} -> couch_file:open(FileName, [create]);
- Error -> Error
- end.
-
-open_index_file(compact, RootDir, DbName, GroupSig) ->
- FileName = index_file_name(compact, RootDir, DbName, GroupSig),
- case couch_file:open(FileName) of
- {ok, Fd} -> {ok, Fd};
- {error, enoent} -> couch_file:open(FileName, [create]);
- Error -> Error
- end.
-
-open_temp_group(DbName, Language, DesignOptions, MapSrc, RedSrc) ->
- case couch_db:open_int(DbName, []) of
- {ok, Db} ->
- View = #view{map_names=[<<"_temp">>],
- id_num=0,
- btree=nil,
- def=MapSrc,
- reduce_funs= if RedSrc==[] -> []; true -> [{<<"_temp">>, RedSrc}] end,
- options=DesignOptions},
- couch_db:close(Db),
- {ok, set_view_sig(#group{name = <<"_temp">>,lib={[]}, views=[View],
- def_lang=Language, design_options=DesignOptions})};
- Error ->
- Error
- end.
-
-set_view_sig(#group{
- views=Views,
- lib={[]},
- def_lang=Language,
- design_options=DesignOptions}=G) ->
- ViewInfo = [old_view_format(V) || V <- Views],
- G#group{sig=couch_util:md5(term_to_binary({ViewInfo, Language, DesignOptions}))};
-set_view_sig(#group{
- views=Views,
- lib=Lib,
- def_lang=Language,
- design_options=DesignOptions}=G) ->
- ViewInfo = [old_view_format(V) || V <- Views],
- G#group{sig=couch_util:md5(term_to_binary({ViewInfo, Language, DesignOptions, sort_lib(Lib)}))}.
-
-% Use the old view record format so group sig's don't change
-old_view_format(View) ->
- {
- view,
- View#view.id_num,
- View#view.map_names,
- View#view.def,
- View#view.btree,
- View#view.reduce_funs,
- View#view.options
- }.
-
-sort_lib({Lib}) ->
- sort_lib(Lib, []).
-sort_lib([], LAcc) ->
- lists:keysort(1, LAcc);
-sort_lib([{LName, {LObj}}|Rest], LAcc) ->
- LSorted = sort_lib(LObj, []), % descend into nested object
- sort_lib(Rest, [{LName, LSorted}|LAcc]);
-sort_lib([{LName, LCode}|Rest], LAcc) ->
- sort_lib(Rest, [{LName, LCode}|LAcc]).
-
-open_db_group(DbName, GroupId) ->
- case couch_db:open_int(DbName, []) of
- {ok, Db} ->
- case couch_db:open_doc(Db, GroupId) of
- {ok, Doc} ->
- couch_db:close(Db),
- {ok, design_doc_to_view_group(Doc)};
- Else ->
- couch_db:close(Db),
- Else
- end;
- Else ->
- Else
- end.
-
-get_group_info(State) ->
- #group_state{
- group=Group,
- updater_pid=UpdaterPid,
- compactor_pid=CompactorPid,
- waiting_commit=WaitingCommit,
- waiting_list=WaitersList
- } = State,
- #group{
- fd = Fd,
- sig = GroupSig,
- def_lang = Lang,
- current_seq=CurrentSeq,
- purge_seq=PurgeSeq
- } = Group,
- {ok, Size} = couch_file:bytes(Fd),
- [
- {signature, ?l2b(hex_sig(GroupSig))},
- {language, Lang},
- {disk_size, Size},
- {updater_running, UpdaterPid /= nil},
- {compact_running, CompactorPid /= nil},
- {waiting_commit, WaitingCommit},
- {waiting_clients, length(WaitersList)},
- {update_seq, CurrentSeq},
- {purge_seq, PurgeSeq}
- ].
-
-% maybe move to another module
-design_doc_to_view_group(#doc{id=Id,body={Fields}}) ->
- Language = couch_util:get_value(<<"language">>, Fields, <<"javascript">>),
- {DesignOptions} = couch_util:get_value(<<"options">>, Fields, {[]}),
- {RawViews} = couch_util:get_value(<<"views">>, Fields, {[]}),
- Lib = couch_util:get_value(<<"lib">>, RawViews, {[]}),
- % add the views to a dictionary object, with the map source as the key
- DictBySrc =
- lists:foldl(
- fun({Name, {MRFuns}}, DictBySrcAcc) ->
- case couch_util:get_value(<<"map">>, MRFuns) of
- undefined -> DictBySrcAcc;
- MapSrc ->
- RedSrc = couch_util:get_value(<<"reduce">>, MRFuns, null),
- {ViewOptions} = couch_util:get_value(<<"options">>, MRFuns, {[]}),
- View =
- case dict:find({MapSrc, ViewOptions}, DictBySrcAcc) of
- {ok, View0} -> View0;
- error -> #view{def=MapSrc, options=ViewOptions} % create new view object
- end,
- View2 =
- if RedSrc == null ->
- View#view{map_names=[Name|View#view.map_names]};
- true ->
- View#view{reduce_funs=[{Name,RedSrc}|View#view.reduce_funs]}
- end,
- dict:store({MapSrc, ViewOptions}, View2, DictBySrcAcc)
- end
- end, dict:new(), RawViews),
- % number the views
- {Views, _N} = lists:mapfoldl(
- fun({_Src, View}, N) ->
- {View#view{id_num=N},N+1}
- end, 0, lists:sort(dict:to_list(DictBySrc))),
- set_view_sig(#group{name=Id, lib=Lib, views=Views, def_lang=Language, design_options=DesignOptions}).
-
-reset_group(#group{views=Views}=Group) ->
- Views2 = [View#view{btree=nil} || View <- Views],
- Group#group{db=nil,fd=nil,query_server=nil,current_seq=0,
- id_btree=nil,views=Views2}.
-
-reset_file(Db, Fd, DbName, #group{sig=Sig,name=Name} = Group) ->
- ?LOG_DEBUG("Resetting group index \"~s\" in db ~s", [Name, DbName]),
- ok = couch_file:truncate(Fd, 0),
- ok = couch_file:write_header(Fd, {Sig, nil}),
- init_group(Db, Fd, reset_group(Group), nil).
-
-delete_index_file(RootDir, DbName, GroupSig) ->
- couch_file:delete(RootDir, index_file_name(RootDir, DbName, GroupSig)).
-
-init_group(Db, Fd, #group{views=Views}=Group, nil) ->
- init_group(Db, Fd, Group,
- #index_header{seq=0, purge_seq=couch_db:get_purge_seq(Db),
- id_btree_state=nil, view_states=[{nil, 0, 0} || _ <- Views]});
-init_group(Db, Fd, #group{def_lang=Lang,views=Views}=
- Group, IndexHeader) ->
- #index_header{seq=Seq, purge_seq=PurgeSeq,
- id_btree_state=IdBtreeState, view_states=ViewStates} = IndexHeader,
- StateUpdate = fun
- ({_, _, _}=State) -> State;
- (State) -> {State, 0, 0}
- end,
- ViewStates2 = lists:map(StateUpdate, ViewStates),
- {ok, IdBtree} = couch_btree:open(IdBtreeState, Fd),
- Views2 = lists:zipwith(
- fun({BTState, USeq, PSeq}, #view{reduce_funs=RedFuns,options=Options}=View) ->
- FunSrcs = [FunSrc || {_Name, FunSrc} <- RedFuns],
- ReduceFun =
- fun(reduce, KVs) ->
- KVs2 = couch_view:expand_dups(KVs,[]),
- KVs3 = couch_view:detuple_kvs(KVs2,[]),
- {ok, Reduced} = couch_query_servers:reduce(Lang, FunSrcs,
- KVs3),
- {length(KVs3), Reduced};
- (rereduce, Reds) ->
- Count = lists:sum([Count0 || {Count0, _} <- Reds]),
- UserReds = [UserRedsList || {_, UserRedsList} <- Reds],
- {ok, Reduced} = couch_query_servers:rereduce(Lang, FunSrcs,
- UserReds),
- {Count, Reduced}
- end,
-
- case couch_util:get_value(<<"collation">>, Options, <<"default">>) of
- <<"default">> ->
- Less = fun couch_view:less_json_ids/2;
- <<"raw">> ->
- Less = fun(A,B) -> A < B end
- end,
- {ok, Btree} = couch_btree:open(BTState, Fd,
- [{less, Less}, {reduce, ReduceFun}]
- ),
- View#view{btree=Btree, update_seq=USeq, purge_seq=PSeq}
- end,
- ViewStates2, Views),
- Group#group{db=Db, fd=Fd, current_seq=Seq, purge_seq=PurgeSeq,
- id_btree=IdBtree, views=Views2}.
diff --git a/1.1.x/src/couchdb/couch_view_updater.erl b/1.1.x/src/couchdb/couch_view_updater.erl
deleted file mode 100644
index 8e089fa9..00000000
--- a/1.1.x/src/couchdb/couch_view_updater.erl
+++ /dev/null
@@ -1,265 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_view_updater).
-
--export([update/2]).
-
--include("couch_db.hrl").
-
--spec update(_, #group{}) -> no_return().
-
-update(Owner, Group) ->
- #group{
- db = #db{name=DbName} = Db,
- name = GroupName,
- current_seq = Seq,
- purge_seq = PurgeSeq
- } = Group,
- couch_task_status:add_task(<<"View Group Indexer">>, <<DbName/binary," ",GroupName/binary>>, <<"Starting index update">>),
-
- DbPurgeSeq = couch_db:get_purge_seq(Db),
- Group2 =
- if DbPurgeSeq == PurgeSeq ->
- Group;
- DbPurgeSeq == PurgeSeq + 1 ->
- couch_task_status:update(<<"Removing purged entries from view index.">>),
- purge_index(Group);
- true ->
- couch_task_status:update(<<"Resetting view index due to lost purge entries.">>),
- exit(reset)
- end,
- {ok, MapQueue} = couch_work_queue:new(
- [{max_size, 100000}, {max_items, 500}]),
- {ok, WriteQueue} = couch_work_queue:new(
- [{max_size, 100000}, {max_items, 500}]),
- Self = self(),
- ViewEmptyKVs = [{View, []} || View <- Group2#group.views],
- spawn_link(fun() -> do_maps(Group, MapQueue, WriteQueue, ViewEmptyKVs) end),
- spawn_link(fun() -> do_writes(Self, Owner, Group2, WriteQueue, Seq == 0) end),
- % compute on all docs modified since we last computed.
- TotalChanges = couch_db:count_changes_since(Db, Seq),
- % update status every half second
- couch_task_status:set_update_frequency(500),
- #group{ design_options = DesignOptions } = Group,
- IncludeDesign = couch_util:get_value(<<"include_design">>,
- DesignOptions, false),
- LocalSeq = couch_util:get_value(<<"local_seq">>, DesignOptions, false),
- DocOpts =
- case LocalSeq of
- true -> [conflicts, deleted_conflicts, local_seq];
- _ -> [conflicts, deleted_conflicts]
- end,
- {ok, _, _}
- = couch_db:enum_docs_since(
- Db,
- Seq,
- fun(DocInfo, _, ChangesProcessed) ->
- couch_task_status:update("Processed ~p of ~p changes (~p%)",
- [ChangesProcessed, TotalChanges, (ChangesProcessed*100) div TotalChanges]),
- load_doc(Db, DocInfo, MapQueue, DocOpts, IncludeDesign),
- {ok, ChangesProcessed+1}
- end,
- 0, []),
- couch_task_status:set_update_frequency(0),
- couch_task_status:update("Finishing."),
- couch_work_queue:close(MapQueue),
- receive {new_group, NewGroup} ->
- exit({new_group,
- NewGroup#group{current_seq=couch_db:get_update_seq(Db)}})
- end.
-
-
-purge_index(#group{db=Db, views=Views, id_btree=IdBtree}=Group) ->
- {ok, PurgedIdsRevs} = couch_db:get_last_purged(Db),
- Ids = [Id || {Id, _Revs} <- PurgedIdsRevs],
- {ok, Lookups, IdBtree2} = couch_btree:query_modify(IdBtree, Ids, [], Ids),
-
- % now populate the dictionary with all the keys to delete
- ViewKeysToRemoveDict = lists:foldl(
- fun({ok,{DocId,ViewNumRowKeys}}, ViewDictAcc) ->
- lists:foldl(
- fun({ViewNum, RowKey}, ViewDictAcc2) ->
- dict:append(ViewNum, {RowKey, DocId}, ViewDictAcc2)
- end, ViewDictAcc, ViewNumRowKeys);
- ({not_found, _}, ViewDictAcc) ->
- ViewDictAcc
- end, dict:new(), Lookups),
-
- % Now remove the values from the btrees
- PurgeSeq = couch_db:get_purge_seq(Db),
- Views2 = lists:map(
- fun(#view{id_num=Num,btree=Btree}=View) ->
- case dict:find(Num, ViewKeysToRemoveDict) of
- {ok, RemoveKeys} ->
- {ok, ViewBtree2} = couch_btree:add_remove(Btree, [], RemoveKeys),
- case ViewBtree2 =/= Btree of
- true ->
- View#view{btree=ViewBtree2, purge_seq=PurgeSeq};
- _ ->
- View#view{btree=ViewBtree2}
- end;
- error -> % no keys to remove in this view
- View
- end
- end, Views),
- Group#group{id_btree=IdBtree2,
- views=Views2,
- purge_seq=PurgeSeq}.
-
-
-load_doc(Db, DocInfo, MapQueue, DocOpts, IncludeDesign) ->
- #doc_info{id=DocId, high_seq=Seq, revs=[#rev_info{deleted=Deleted}|_]} = DocInfo,
- case {IncludeDesign, DocId} of
- {false, <<?DESIGN_DOC_PREFIX, _/binary>>} -> % we skip design docs
- ok;
- _ ->
- if Deleted ->
- couch_work_queue:queue(MapQueue, {Seq, #doc{id=DocId, deleted=true}});
- true ->
- {ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts),
- couch_work_queue:queue(MapQueue, {Seq, Doc})
- end
- end.
-
-do_maps(Group, MapQueue, WriteQueue, ViewEmptyKVs) ->
- case couch_work_queue:dequeue(MapQueue) of
- closed ->
- couch_work_queue:close(WriteQueue),
- couch_query_servers:stop_doc_map(Group#group.query_server);
- {ok, Queue} ->
- Docs = [Doc || {_,#doc{deleted=false}=Doc} <- Queue],
- DelKVs = [{Id, []} || {_, #doc{deleted=true,id=Id}} <- Queue],
- LastSeq = lists:max([Seq || {Seq, _Doc} <- Queue]),
- {Group1, Results} = view_compute(Group, Docs),
- {ViewKVs, DocIdViewIdKeys} = view_insert_query_results(Docs,
- Results, ViewEmptyKVs, DelKVs),
- couch_work_queue:queue(WriteQueue, {LastSeq, ViewKVs, DocIdViewIdKeys}),
- do_maps(Group1, MapQueue, WriteQueue, ViewEmptyKVs)
- end.
-
-do_writes(Parent, Owner, Group, WriteQueue, InitialBuild) ->
- case couch_work_queue:dequeue(WriteQueue) of
- closed ->
- Parent ! {new_group, Group};
- {ok, Queue} ->
- {NewSeq, ViewKeyValues, DocIdViewIdKeys} = lists:foldl(
- fun({Seq, ViewKVs, DocIdViewIdKeys}, nil) ->
- {Seq, ViewKVs, DocIdViewIdKeys};
- ({Seq, ViewKVs, DocIdViewIdKeys}, Acc) ->
- {Seq2, AccViewKVs, AccDocIdViewIdKeys} = Acc,
- AccViewKVs2 = lists:zipwith(
- fun({View, KVsIn}, {_View, KVsAcc}) ->
- {View, KVsIn ++ KVsAcc}
- end, ViewKVs, AccViewKVs),
- {lists:max([Seq, Seq2]),
- AccViewKVs2, DocIdViewIdKeys ++ AccDocIdViewIdKeys}
- end, nil, Queue),
- Group2 = write_changes(Group, ViewKeyValues, DocIdViewIdKeys, NewSeq,
- InitialBuild),
- case Owner of
- nil -> ok;
- _ -> ok = gen_server:cast(Owner, {partial_update, Parent, Group2})
- end,
- do_writes(Parent, Owner, Group2, WriteQueue, InitialBuild)
- end.
-
-view_insert_query_results([], [], ViewKVs, DocIdViewIdKeysAcc) ->
- {ViewKVs, DocIdViewIdKeysAcc};
-view_insert_query_results([Doc|RestDocs], [QueryResults | RestResults], ViewKVs, DocIdViewIdKeysAcc) ->
- {NewViewKVs, NewViewIdKeys} = view_insert_doc_query_results(Doc, QueryResults, ViewKVs, [], []),
- NewDocIdViewIdKeys = [{Doc#doc.id, NewViewIdKeys} | DocIdViewIdKeysAcc],
- view_insert_query_results(RestDocs, RestResults, NewViewKVs, NewDocIdViewIdKeys).
-
-
-view_insert_doc_query_results(_Doc, [], [], ViewKVsAcc, ViewIdKeysAcc) ->
- {lists:reverse(ViewKVsAcc), lists:reverse(ViewIdKeysAcc)};
-view_insert_doc_query_results(#doc{id=DocId}=Doc, [ResultKVs|RestResults], [{View, KVs}|RestViewKVs], ViewKVsAcc, ViewIdKeysAcc) ->
- % Take any identical keys and combine the values
- ResultKVs2 = lists:foldl(
- fun({Key,Value}, [{PrevKey,PrevVal}|AccRest]) ->
- case Key == PrevKey of
- true ->
- case PrevVal of
- {dups, Dups} ->
- [{PrevKey, {dups, [Value|Dups]}} | AccRest];
- _ ->
- [{PrevKey, {dups, [Value,PrevVal]}} | AccRest]
- end;
- false ->
- [{Key,Value},{PrevKey,PrevVal}|AccRest]
- end;
- (KV, []) ->
- [KV]
- end, [], lists:sort(ResultKVs)),
- NewKVs = [{{Key, DocId}, Value} || {Key, Value} <- ResultKVs2],
- NewViewKVsAcc = [{View, NewKVs ++ KVs} | ViewKVsAcc],
- NewViewIdKeys = [{View#view.id_num, Key} || {Key, _Value} <- ResultKVs2],
- NewViewIdKeysAcc = NewViewIdKeys ++ ViewIdKeysAcc,
- view_insert_doc_query_results(Doc, RestResults, RestViewKVs, NewViewKVsAcc, NewViewIdKeysAcc).
-
-view_compute(Group, []) ->
- {Group, []};
-view_compute(#group{def_lang=DefLang, lib=Lib, query_server=QueryServerIn}=Group, Docs) ->
- {ok, QueryServer} =
- case QueryServerIn of
- nil -> % doc map not started
- Definitions = [View#view.def || View <- Group#group.views],
- couch_query_servers:start_doc_map(DefLang, Definitions, Lib);
- _ ->
- {ok, QueryServerIn}
- end,
- {ok, Results} = couch_query_servers:map_docs(QueryServer, Docs),
- {Group#group{query_server=QueryServer}, Results}.
-
-
-
-write_changes(Group, ViewKeyValuesToAdd, DocIdViewIdKeys, NewSeq, InitialBuild) ->
- #group{id_btree=IdBtree} = Group,
-
- AddDocIdViewIdKeys = [{DocId, ViewIdKeys} || {DocId, ViewIdKeys} <- DocIdViewIdKeys, ViewIdKeys /= []],
- if InitialBuild ->
- RemoveDocIds = [],
- LookupDocIds = [];
- true ->
- RemoveDocIds = [DocId || {DocId, ViewIdKeys} <- DocIdViewIdKeys, ViewIdKeys == []],
- LookupDocIds = [DocId || {DocId, _ViewIdKeys} <- DocIdViewIdKeys]
- end,
- {ok, LookupResults, IdBtree2}
- = couch_btree:query_modify(IdBtree, LookupDocIds, AddDocIdViewIdKeys, RemoveDocIds),
- KeysToRemoveByView = lists:foldl(
- fun(LookupResult, KeysToRemoveByViewAcc) ->
- case LookupResult of
- {ok, {DocId, ViewIdKeys}} ->
- lists:foldl(
- fun({ViewId, Key}, KeysToRemoveByViewAcc2) ->
- dict:append(ViewId, {Key, DocId}, KeysToRemoveByViewAcc2)
- end,
- KeysToRemoveByViewAcc, ViewIdKeys);
- {not_found, _} ->
- KeysToRemoveByViewAcc
- end
- end,
- dict:new(), LookupResults),
- Views2 = lists:zipwith(fun(View, {_View, AddKeyValues}) ->
- KeysToRemove = couch_util:dict_find(View#view.id_num, KeysToRemoveByView, []),
- {ok, ViewBtree2} = couch_btree:add_remove(View#view.btree, AddKeyValues, KeysToRemove),
- case ViewBtree2 =/= View#view.btree of
- true ->
- View#view{btree=ViewBtree2, update_seq=NewSeq};
- _ ->
- View#view{btree=ViewBtree2}
- end
- end, Group#group.views, ViewKeyValuesToAdd),
- Group#group{views=Views2, current_seq=NewSeq, id_btree=IdBtree2}.
-
-
diff --git a/1.1.x/src/couchdb/couch_work_queue.erl b/1.1.x/src/couchdb/couch_work_queue.erl
deleted file mode 100644
index 13ec7335..00000000
--- a/1.1.x/src/couchdb/couch_work_queue.erl
+++ /dev/null
@@ -1,155 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_work_queue).
--behaviour(gen_server).
-
-% public API
--export([new/1, queue/2, dequeue/1, dequeue/2, close/1]).
-
-% gen_server callbacks
--export([init/1, terminate/2]).
--export([handle_call/3, handle_cast/2, code_change/3, handle_info/2]).
-
--record(q, {
- queue = queue:new(),
- blocked = [],
- max_size,
- max_items,
- items = 0,
- size = 0,
- work_waiters = [],
- close_on_dequeue = false,
- multi_workers = false
-}).
-
-
-new(Options) ->
- gen_server:start_link(couch_work_queue, Options, []).
-
-
-queue(Wq, Item) ->
- gen_server:call(Wq, {queue, Item}, infinity).
-
-
-dequeue(Wq) ->
- dequeue(Wq, all).
-
-
-dequeue(Wq, MaxItems) ->
- try
- gen_server:call(Wq, {dequeue, MaxItems}, infinity)
- catch
- _:_ -> closed
- end.
-
-
-close(Wq) ->
- gen_server:cast(Wq, close).
-
-
-init(Options) ->
- Q = #q{
- max_size = couch_util:get_value(max_size, Options),
- max_items = couch_util:get_value(max_items, Options),
- multi_workers = couch_util:get_value(multi_workers, Options, false)
- },
- {ok, Q}.
-
-
-terminate(_Reason, #q{work_waiters=Workers}) ->
- lists:foreach(fun({W, _}) -> gen_server:reply(W, closed) end, Workers).
-
-
-handle_call({queue, Item}, From, #q{work_waiters = []} = Q0) ->
- Q = Q0#q{size = Q0#q.size + byte_size(term_to_binary(Item)),
- items = Q0#q.items + 1,
- queue = queue:in(Item, Q0#q.queue)},
- case (Q#q.size >= Q#q.max_size) orelse
- (Q#q.items >= Q#q.max_items) of
- true ->
- {noreply, Q#q{blocked = [From | Q#q.blocked]}};
- false ->
- {reply, ok, Q}
- end;
-
-handle_call({queue, Item}, _From, #q{work_waiters = [{W, _Max} | Rest]} = Q) ->
- gen_server:reply(W, {ok, [Item]}),
- {reply, ok, Q#q{work_waiters = Rest}};
-
-handle_call({dequeue, Max}, From, Q) ->
- #q{work_waiters = Workers, multi_workers = Multi, items = Count} = Q,
- case {Workers, Multi} of
- {[_ | _], false} ->
- exit("Only one caller allowed to wait for this work at a time");
- {[_ | _], true} ->
- {noreply, Q#q{work_waiters=Workers ++ [{From, Max}]}};
- _ ->
- case Count of
- 0 ->
- {noreply, Q#q{work_waiters=Workers ++ [{From, Max}]}};
- C when C > 0 ->
- deliver_queue_items(Max, Q)
- end
- end.
-
-
-deliver_queue_items(Max, Q) ->
- #q{
- queue = Queue,
- items = Count,
- close_on_dequeue = Close,
- blocked = Blocked
- } = Q,
- case (Max =:= all) orelse (Max >= Count) of
- false ->
- {Items, Queue2, Blocked2} = dequeue_items(Max, Queue, Blocked, []),
- Q2 = Q#q{items = Count - Max, blocked = Blocked2, queue = Queue2},
- {reply, {ok, Items}, Q2};
- true ->
- lists:foreach(fun(F) -> gen_server:reply(F, ok) end, Blocked),
- Q2 = Q#q{items = 0, size = 0, blocked = [], queue = queue:new()},
- case Close of
- false ->
- {reply, {ok, queue:to_list(Queue)}, Q2};
- true ->
- {stop, normal, {ok, queue:to_list(Queue)}, Q2}
- end
- end.
-
-
-dequeue_items(0, Queue, Blocked, DequeuedAcc) ->
- {lists:reverse(DequeuedAcc), Queue, Blocked};
-
-dequeue_items(NumItems, Queue, Blocked, DequeuedAcc) ->
- {{value, Item}, Queue2} = queue:out(Queue),
- case Blocked of
- [] ->
- Blocked2 = Blocked;
- [From | Blocked2] ->
- gen_server:reply(From, ok)
- end,
- dequeue_items(NumItems - 1, Queue2, Blocked2, [Item | DequeuedAcc]).
-
-
-handle_cast(close, #q{items = 0} = Q) ->
- {stop, normal, Q};
-
-handle_cast(close, Q) ->
- {noreply, Q#q{close_on_dequeue = true}}.
-
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-handle_info(X, Q) ->
- {stop, X, Q}.
diff --git a/1.1.x/src/couchdb/priv/Makefile.am b/1.1.x/src/couchdb/priv/Makefile.am
deleted file mode 100644
index b36d828d..00000000
--- a/1.1.x/src/couchdb/priv/Makefile.am
+++ /dev/null
@@ -1,93 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-## http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-couchlibdir = $(localerlanglibdir)/couch-$(version)
-couchprivdir = $(couchlibdir)/priv
-couchprivlibdir = $(couchlibdir)/priv/lib
-
-EXTRA_DIST = \
- spawnkillable/couchspawnkillable.sh \
- stat_descriptions.cfg.in
-
-CLEANFILES = stat_descriptions.cfg
-
-ICU_LOCAL_FLAGS = $(ICU_LOCAL_CFLAGS) $(ICU_LOCAL_LDFLAGS)
-if WINDOWS
-ICU_LOCAL_LIBS=-licuuc -licudt -licuin
-else
-ICU_LOCAL_LIBS=-licuuc -licudata -licui18n
-endif
-
-couchprivlib_LTLIBRARIES = couch_icu_driver.la
-couch_icu_driver_la_SOURCES = icu_driver/couch_icu_driver.c
-couch_icu_driver_la_LDFLAGS = -module -avoid-version $(ICU_LOCAL_FLAGS)
-couch_icu_driver_la_CFLAGS = $(ICU_LOCAL_FLAGS)
-couch_icu_driver_la_LIBADD = $(ICU_LOCAL_LIBS)
-
-if WINDOWS
-couch_icu_driver_la_LDFLAGS += -no-undefined
-endif
-
-COUCHJS_SRCS = \
- couch_js/http.c \
- couch_js/http.h \
- couch_js/main.c \
- couch_js/utf8.c \
- couch_js/utf8.h
-
-locallibbin_PROGRAMS = couchjs
-couchjs_SOURCES = $(COUCHJS_SRCS)
-couchjs_LDFLAGS = $(CURL_LDFLAGS)
-couchjs_CFLAGS = -D_BSD_SOURCE $(CURL_CFLAGS)
-couchjs_LDADD = $(CURL_LDFLAGS) @JSLIB@
-
-couchpriv_DATA = stat_descriptions.cfg
-couchpriv_PROGRAMS = couchspawnkillable
-
-%.cfg: %.cfg.in
- cp $< $@
-
-if WINDOWS
-couchspawnkillable_SOURCES = spawnkillable/couchspawnkillable_win.c
-endif
-
-if !WINDOWS
-couchspawnkillable: spawnkillable/couchspawnkillable.sh
- cp $< $@
- chmod +x $@
-endif
-
-# libtool and automake have defeated markh. For each of our executables
-# we end up with 2 copies - one directly in the 'target' folder (eg, 'priv')
-# and another - the correct one - in .libs. The former doesn't work but is
-# what gets installed for 'couchspawnkillable' - but the correct one for
-# couchjs.exe *does* get copied. *shrug* So just clobber it with the
-# correct one as the last step. See bug COUCHDB-439
-install-data-hook:
- if test -f "$(DESTDIR)$(couchprivlibdir)/couch_icu_driver"; then \
- rm -f "$(DESTDIR)$(couchprivlibdir)/couch_icu_driver.so"; \
- cd "$(DESTDIR)$(couchprivlibdir)" && \
- $(LN_S) couch_icu_driver couch_icu_driver.so; \
- fi
-if WINDOWS
- $(INSTALL) $(ICU_LOCAL_BIN)/icuuc42.dll $(bindir)
- $(INSTALL) $(ICU_LOCAL_BIN)/icudt42.dll $(bindir)
- $(INSTALL) $(ICU_LOCAL_BIN)/icuin42.dll $(bindir)
- $(INSTALL) $(JS_LIB_BINARY) $(bindir)
- $(INSTALL) .libs/couchspawnkillable.exe \
- "$(DESTDIR)$(couchprivdir)/couchspawnkillable.exe"
-endif
-
-uninstall-local:
- if test -f "$(DESTDIR)$(couchprivlibdir)/couch_erl_driver"; then \
- rm -f "$(DESTDIR)$(couchprivlibdir)/couch_erl_driver.so"; \
- fi
diff --git a/1.1.x/src/couchdb/priv/couch_js/http.c b/1.1.x/src/couchdb/priv/couch_js/http.c
deleted file mode 100644
index 6c2a8a82..00000000
--- a/1.1.x/src/couchdb/priv/couch_js/http.c
+++ /dev/null
@@ -1,675 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <jsapi.h>
-#include <curl/curl.h>
-
-#include "utf8.h"
-
-#ifdef XP_WIN
-// Map some of the string function names to things which exist on Windows
-#define strcasecmp _strcmpi
-#define strncasecmp _strnicmp
-#define snprintf _snprintf
-#endif
-
-typedef struct curl_slist CurlHeaders;
-
-typedef struct {
- int method;
- char* url;
- CurlHeaders* req_headers;
- jsint last_status;
-} HTTPData;
-
-char* METHODS[] = {"GET", "HEAD", "POST", "PUT", "DELETE", "COPY", NULL};
-
-#define GET 0
-#define HEAD 1
-#define POST 2
-#define PUT 3
-#define DELETE 4
-#define COPY 5
-
-static JSBool
-go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t blen);
-
-static JSString*
-str_from_binary(JSContext* cx, char* data, size_t length);
-
-static JSBool
-constructor(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
-{
- HTTPData* http = NULL;
- JSBool ret = JS_FALSE;
-
- http = (HTTPData*) malloc(sizeof(HTTPData));
- if(!http)
- {
- JS_ReportError(cx, "Failed to create CouchHTTP instance.");
- goto error;
- }
-
- http->method = -1;
- http->url = NULL;
- http->req_headers = NULL;
- http->last_status = -1;
-
- if(!JS_SetPrivate(cx, obj, http))
- {
- JS_ReportError(cx, "Failed to set private CouchHTTP data.");
- goto error;
- }
-
- ret = JS_TRUE;
- goto success;
-
-error:
- if(http) free(http);
-
-success:
- return ret;
-}
-
-static void
-destructor(JSContext* cx, JSObject* obj)
-{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
- if(!http)
- {
- fprintf(stderr, "Unable to destroy invalid CouchHTTP instance.\n");
- }
- else
- {
- if(http->url) free(http->url);
- if(http->req_headers) curl_slist_free_all(http->req_headers);
- free(http);
- }
-}
-
-static JSBool
-open(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
-{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
- char* method = NULL;
- char* url = NULL;
- JSBool ret = JS_FALSE;
- int methid;
-
- if(!http)
- {
- JS_ReportError(cx, "Invalid CouchHTTP instance.");
- goto done;
- }
-
- if(argv[0] == JSVAL_VOID)
- {
- JS_ReportError(cx, "You must specify a method.");
- goto done;
- }
-
- method = enc_string(cx, argv[0], NULL);
- if(!method)
- {
- JS_ReportError(cx, "Failed to encode method.");
- goto done;
- }
-
- for(methid = 0; METHODS[methid] != NULL; methid++)
- {
- if(strcasecmp(METHODS[methid], method) == 0) break;
- }
-
- if(methid > COPY)
- {
- JS_ReportError(cx, "Invalid method specified.");
- goto done;
- }
-
- http->method = methid;
-
- if(argv[1] == JSVAL_VOID)
- {
- JS_ReportError(cx, "You must specify a URL.");
- goto done;
- }
-
- if(http->url)
- {
- free(http->url);
- http->url = NULL;
- }
-
- http->url = enc_string(cx, argv[1], NULL);
- if(!http->url)
- {
- JS_ReportError(cx, "Failed to encode URL.");
- goto done;
- }
-
- if(argv[2] != JSVAL_VOID && argv[2] != JSVAL_FALSE)
- {
- JS_ReportError(cx, "Synchronous flag must be false if specified.");
- goto done;
- }
-
- if(http->req_headers)
- {
- curl_slist_free_all(http->req_headers);
- http->req_headers = NULL;
- }
-
- // Disable Expect: 100-continue
- http->req_headers = curl_slist_append(http->req_headers, "Expect:");
-
- ret = JS_TRUE;
-
-done:
- if(method) free(method);
- return ret;
-}
-
-static JSBool
-setheader(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
-{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
- char* keystr = NULL;
- char* valstr = NULL;
- char* hdrbuf = NULL;
- size_t hdrlen = -1;
- JSBool ret = JS_FALSE;
-
- if(!http)
- {
- JS_ReportError(cx, "Invalid CouchHTTP instance.");
- goto done;
- }
-
- if(argv[0] == JSVAL_VOID)
- {
- JS_ReportError(cx, "You must speciy a header name.");
- goto done;
- }
-
- keystr = enc_string(cx, argv[0], NULL);
- if(!keystr)
- {
- JS_ReportError(cx, "Failed to encode header name.");
- goto done;
- }
-
- if(argv[1] == JSVAL_VOID)
- {
- JS_ReportError(cx, "You must specify a header value.");
- goto done;
- }
-
- valstr = enc_string(cx, argv[1], NULL);
- if(!valstr)
- {
- JS_ReportError(cx, "Failed to encode header value.");
- goto done;
- }
-
- hdrlen = strlen(keystr) + strlen(valstr) + 3;
- hdrbuf = (char*) malloc(hdrlen * sizeof(char));
- if(!hdrbuf)
- {
- JS_ReportError(cx, "Failed to allocate header buffer.");
- goto done;
- }
-
- snprintf(hdrbuf, hdrlen, "%s: %s", keystr, valstr);
- http->req_headers = curl_slist_append(http->req_headers, hdrbuf);
-
- ret = JS_TRUE;
-
-done:
- if(keystr) free(keystr);
- if(valstr) free(valstr);
- if(hdrbuf) free(hdrbuf);
-
- return ret;
-}
-
-static JSBool
-sendreq(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval)
-{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
- char* body = NULL;
- size_t bodylen = 0;
- JSBool ret = JS_FALSE;
-
- if(!http)
- {
- JS_ReportError(cx, "Invalid CouchHTTP instance.");
- goto done;
- }
-
- if(argv[0] != JSVAL_VOID && argv[0] != JS_GetEmptyStringValue(cx))
- {
- body = enc_string(cx, argv[0], &bodylen);
- if(!body)
- {
- JS_ReportError(cx, "Failed to encode body.");
- goto done;
- }
- }
-
- ret = go(cx, obj, http, body, bodylen);
-
-done:
- if(body) free(body);
- return ret;
-}
-
-static JSBool
-status(JSContext* cx, JSObject* obj, jsval idval, jsval* vp)
-{
- HTTPData* http = (HTTPData*) JS_GetPrivate(cx, obj);
-
- if(!http)
- {
- JS_ReportError(cx, "Invalid CouchHTTP instance.");
- return JS_FALSE;
- }
-
- if(INT_FITS_IN_JSVAL(http->last_status))
- {
- *vp = INT_TO_JSVAL(http->last_status);
- return JS_TRUE;
- }
- else
- {
- JS_ReportError(cx, "INTERNAL: Invalid last_status");
- return JS_FALSE;
- }
-}
-
-JSClass CouchHTTPClass = {
- "CouchHTTP",
- JSCLASS_HAS_PRIVATE
- | JSCLASS_CONSTRUCT_PROTOTYPE
- | JSCLASS_HAS_RESERVED_SLOTS(2),
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_EnumerateStub,
- JS_ResolveStub,
- JS_ConvertStub,
- destructor,
- JSCLASS_NO_OPTIONAL_MEMBERS
-};
-
-JSPropertySpec CouchHTTPProperties[] = {
- {"status", 0, JSPROP_READONLY, status, NULL},
- {0, 0, 0, 0, 0}
-};
-
-JSFunctionSpec CouchHTTPFunctions[] = {
- {"_open", open, 3, 0, 0},
- {"_setRequestHeader", setheader, 2, 0, 0},
- {"_send", sendreq, 1, 0, 0},
- {0, 0, 0, 0, 0}
-};
-
-JSObject*
-install_http(JSContext* cx, JSObject* glbl)
-{
- JSObject* klass = NULL;
- HTTPData* http = NULL;
-
- klass = JS_InitClass(
- cx,
- glbl,
- NULL,
- &CouchHTTPClass,
- constructor,
- 0,
- CouchHTTPProperties,
- CouchHTTPFunctions,
- NULL,
- NULL
- );
-
- if(!klass)
- {
- fprintf(stderr, "Failed to initialize CouchHTTP class.\n");
- return NULL;
- }
-
- return klass;
-}
-
-
-// Curl Helpers
-
-typedef struct {
- HTTPData* http;
- JSContext* cx;
- JSObject* resp_headers;
- char* sendbuf;
- size_t sendlen;
- size_t sent;
- char* recvbuf;
- size_t recvlen;
- size_t read;
-} CurlState;
-
-/*
- * I really hate doing this but this doesn't have to be
- * uber awesome, it just has to work.
- */
-CURL* HTTP_HANDLE = NULL;
-char ERRBUF[CURL_ERROR_SIZE];
-
-static size_t send_body(void *ptr, size_t size, size_t nmem, void *data);
-static int seek_body(void *ptr, curl_off_t offset, int origin);
-static size_t recv_body(void *ptr, size_t size, size_t nmem, void *data);
-static size_t recv_header(void *ptr, size_t size, size_t nmem, void *data);
-
-static JSBool
-go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen)
-{
- CurlState state;
- JSString* jsbody;
- JSBool ret = JS_FALSE;
- jsval tmp;
-
- state.cx = cx;
- state.http = http;
-
- state.sendbuf = body;
- state.sendlen = bodylen;
- state.sent = 0;
-
- state.recvbuf = NULL;
- state.recvlen = 0;
- state.read = 0;
-
- if(HTTP_HANDLE == NULL)
- {
- HTTP_HANDLE = curl_easy_init();
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_READFUNCTION, send_body);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKFUNCTION,
- (curl_seek_callback) seek_body);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_HEADERFUNCTION, recv_header);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEFUNCTION, recv_body);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOPROGRESS, 1);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_ERRORBUFFER, ERRBUF);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_COOKIEFILE, "");
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_USERAGENT,
- "CouchHTTP Client - Relax");
- }
-
- if(!HTTP_HANDLE)
- {
- JS_ReportError(cx, "Failed to initialize cURL handle.");
- goto done;
- }
-
- if(http->method < 0 || http->method > COPY)
- {
- JS_ReportError(cx, "INTERNAL: Unknown method.");
- goto done;
- }
-
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_CUSTOMREQUEST, METHODS[http->method]);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOBODY, 0);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 1);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_UPLOAD, 0);
-
- if(http->method == HEAD)
- {
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOBODY, 1);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 0);
- }
- else if(http->method == POST || http->method == PUT)
- {
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_UPLOAD, 1);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 0);
- }
-
- if(body && bodylen)
- {
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, bodylen);
- }
- else
- {
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, 0);
- }
-
- //curl_easy_setopt(HTTP_HANDLE, CURLOPT_VERBOSE, 1);
-
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_URL, http->url);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_HTTPHEADER, http->req_headers);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_READDATA, &state);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKDATA, &state);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEHEADER, &state);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEDATA, &state);
-
- if(curl_easy_perform(HTTP_HANDLE) != 0)
- {
- JS_ReportError(cx, "Failed to execute HTTP request: %s", ERRBUF);
- goto done;
- }
-
- if(!state.resp_headers)
- {
- JS_ReportError(cx, "Failed to recieve HTTP headers.");
- goto done;
- }
-
- tmp = OBJECT_TO_JSVAL(state.resp_headers);
- if(!JS_DefineProperty(
- cx,
- obj,
- "_headers",
- tmp,
- NULL,
- NULL,
- JSPROP_READONLY
- ))
- {
- JS_ReportError(cx, "INTERNAL: Failed to set response headers.");
- goto done;
- }
-
- if(state.recvbuf) // Is good enough?
- {
- state.recvbuf[state.read] = '\0';
- jsbody = dec_string(cx, state.recvbuf, state.read+1);
- if(!jsbody)
- {
- // If we can't decode the body as UTF-8 we forcefully
- // convert it to a string by just forcing each byte
- // to a jschar.
- jsbody = str_from_binary(cx, state.recvbuf, state.read);
- if(!jsbody) {
- if(!JS_IsExceptionPending(cx)) {
- JS_ReportError(cx, "INTERNAL: Failed to decode body.");
- }
- goto done;
- }
- }
- tmp = STRING_TO_JSVAL(jsbody);
- }
- else
- {
- tmp = JS_GetEmptyStringValue(cx);
- }
-
- if(!JS_DefineProperty(
- cx,
- obj,
- "responseText",
- tmp,
- NULL,
- NULL,
- JSPROP_READONLY
- ))
- {
- JS_ReportError(cx, "INTERNAL: Failed to set responseText.");
- goto done;
- }
-
- ret = JS_TRUE;
-
-done:
- if(state.recvbuf) JS_free(cx, state.recvbuf);
- return ret;
-}
-
-static size_t
-send_body(void *ptr, size_t size, size_t nmem, void *data)
-{
- CurlState* state = (CurlState*) data;
- size_t length = size * nmem;
- size_t towrite = state->sendlen - state->sent;
- if(towrite == 0)
- {
- return 0;
- }
-
- if(length < towrite) towrite = length;
-
- //fprintf(stderr, "%lu %lu %lu %lu\n", state->bodyused, state->bodyread, length, towrite);
-
- memcpy(ptr, state->sendbuf + state->sent, towrite);
- state->sent += towrite;
-
- return towrite;
-}
-
-static int
-seek_body(void* ptr, curl_off_t offset, int origin)
-{
- CurlState* state = (CurlState*) ptr;
- if(origin != SEEK_SET) return -1;
-
- state->sent = (size_t) offset;
- return (int) state->sent;
-}
-
-static size_t
-recv_header(void *ptr, size_t size, size_t nmem, void *data)
-{
- CurlState* state = (CurlState*) data;
- char code[4];
- char* header = (char*) ptr;
- size_t length = size * nmem;
- size_t index = 0;
- JSString* hdr = NULL;
- jsuint hdrlen;
- jsval hdrval;
-
- if(length > 7 && strncasecmp(header, "HTTP/1.", 7) == 0)
- {
- if(length < 12)
- {
- return CURLE_WRITE_ERROR;
- }
-
- memcpy(code, header+9, 3*sizeof(char));
- code[3] = '\0';
- state->http->last_status = atoi(code);
-
- state->resp_headers = JS_NewArrayObject(state->cx, 0, NULL);
- if(!state->resp_headers)
- {
- return CURLE_WRITE_ERROR;
- }
-
- return length;
- }
-
- // We get a notice at the \r\n\r\n after headers.
- if(length <= 2)
- {
- return length;
- }
-
- // Append the new header to our array.
- hdr = dec_string(state->cx, header, length);
- if(!hdr)
- {
- return CURLE_WRITE_ERROR;
- }
-
- if(!JS_GetArrayLength(state->cx, state->resp_headers, &hdrlen))
- {
- return CURLE_WRITE_ERROR;
- }
-
- hdrval = STRING_TO_JSVAL(hdr);
- if(!JS_SetElement(state->cx, state->resp_headers, hdrlen, &hdrval))
- {
- return CURLE_WRITE_ERROR;
- }
-
- return length;
-}
-
-static size_t
-recv_body(void *ptr, size_t size, size_t nmem, void *data)
-{
- CurlState* state = (CurlState*) data;
- size_t length = size * nmem;
- char* tmp = NULL;
-
- if(!state->recvbuf)
- {
- state->recvlen = 4096;
- state->read = 0;
- state->recvbuf = JS_malloc(state->cx, state->recvlen);
- }
-
- if(!state->recvbuf)
- {
- return CURLE_WRITE_ERROR;
- }
-
- // +1 so we can add '\0' back up in the go function.
- while(length+1 > state->recvlen - state->read) state->recvlen *= 2;
- tmp = JS_realloc(state->cx, state->recvbuf, state->recvlen);
- if(!tmp) return CURLE_WRITE_ERROR;
- state->recvbuf = tmp;
-
- memcpy(state->recvbuf + state->read, ptr, length);
- state->read += length;
- return length;
-}
-
-JSString*
-str_from_binary(JSContext* cx, char* data, size_t length)
-{
- jschar* conv = (jschar*) JS_malloc(cx, length * sizeof(jschar));
- JSString* ret = NULL;
- size_t i;
-
- if(!conv) return NULL;
-
- for(i = 0; i < length; i++)
- {
- conv[i] = (jschar) data[i];
- }
-
- ret = JS_NewUCString(cx, conv, length);
- if(!ret) JS_free(cx, conv);
-
- return ret;
-}
diff --git a/1.1.x/src/couchdb/priv/couch_js/http.h b/1.1.x/src/couchdb/priv/couch_js/http.h
deleted file mode 100644
index b5f8c70f..00000000
--- a/1.1.x/src/couchdb/priv/couch_js/http.h
+++ /dev/null
@@ -1,18 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#ifndef COUCH_JS_HTTP_H
-#define COUCH_JS_HTTP_H
-
-JSObject* install_http(JSContext* cx, JSObject* global);
-
-#endif \ No newline at end of file
diff --git a/1.1.x/src/couchdb/priv/couch_js/main.c b/1.1.x/src/couchdb/priv/couch_js/main.c
deleted file mode 100644
index 376aa15b..00000000
--- a/1.1.x/src/couchdb/priv/couch_js/main.c
+++ /dev/null
@@ -1,338 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-#include <jsapi.h>
-#include "config.h"
-
-#include "utf8.h"
-#include "http.h"
-
-int gExitCode = 0;
-
-#ifdef JS_THREADSAFE
-#define SETUP_REQUEST(cx) \
- JS_SetContextThread(cx); \
- JS_BeginRequest(cx);
-#define FINISH_REQUEST(cx) \
- JS_EndRequest(cx); \
- JS_ClearContextThread(cx);
-#else
-#define SETUP_REQUEST(cx)
-#define FINISH_REQUEST(cx)
-#endif
-
-static JSBool
-evalcx(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- JSString *str;
- JSObject *sandbox;
- JSContext *subcx;
- const jschar *src;
- size_t srclen;
- JSBool ret = JS_FALSE;
- jsval v;
-
- sandbox = NULL;
- if(!JS_ConvertArguments(cx, argc, argv, "S / o", &str, &sandbox))
- {
- return JS_FALSE;
- }
-
- subcx = JS_NewContext(JS_GetRuntime(cx), 8L * 1024L);
- if(!subcx)
- {
- JS_ReportOutOfMemory(cx);
- return JS_FALSE;
- }
-
- SETUP_REQUEST(subcx);
-
- src = JS_GetStringChars(str);
- srclen = JS_GetStringLength(str);
-
- if(!sandbox)
- {
- sandbox = JS_NewObject(subcx, NULL, NULL, NULL);
- if(!sandbox || !JS_InitStandardClasses(subcx, sandbox)) goto done;
- }
-
- if(srclen == 0)
- {
- *rval = OBJECT_TO_JSVAL(sandbox);
- }
- else
- {
- JS_EvaluateUCScript(subcx, sandbox, src, srclen, NULL, 0, rval);
- }
-
- ret = JS_TRUE;
-
-done:
- FINISH_REQUEST(subcx);
- JS_DestroyContext(subcx);
- return ret;
-}
-
-static JSBool
-gc(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- JS_GC(cx);
- return JS_TRUE;
-}
-
-static JSBool
-print(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- uintN i;
- char *bytes;
-
- for(i = 0; i < argc; i++)
- {
- bytes = enc_string(cx, argv[i], NULL);
- if(!bytes) return JS_FALSE;
-
- fprintf(stdout, "%s%s", i ? " " : "", bytes);
- JS_free(cx, bytes);
- }
-
- fputc('\n', stdout);
- fflush(stdout);
- return JS_TRUE;
-}
-
-static JSBool
-quit(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
-{
- JS_ConvertArguments(cx, argc, argv, "/ i", &gExitCode);
- return JS_FALSE;
-}
-
-static char*
-readfp(JSContext* cx, FILE* fp, size_t* buflen)
-{
- char* bytes = NULL;
- char* tmp = NULL;
- size_t used = 0;
- size_t byteslen = 256;
- size_t readlen = 0;
-
- bytes = JS_malloc(cx, byteslen);
- if(bytes == NULL) return NULL;
-
- while((readlen = js_fgets(bytes+used, byteslen-used, stdin)) > 0)
- {
- used += readlen;
-
- if(bytes[used-1] == '\n')
- {
- bytes[used-1] = '\0';
- break;
- }
-
- // Double our buffer and read more.
- byteslen *= 2;
- tmp = JS_realloc(cx, bytes, byteslen);
- if(!tmp)
- {
- JS_free(cx, bytes);
- return NULL;
- }
- bytes = tmp;
- }
-
- *buflen = used;
- return bytes;
-}
-
-static JSBool
-readline(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval) {
- jschar *chars;
- JSString *str;
- char* bytes;
- char* tmp;
- size_t byteslen;
-
- /* GC Occasionally */
- JS_MaybeGC(cx);
-
- bytes = readfp(cx, stdin, &byteslen);
- if(!bytes) return JS_FALSE;
-
- /* Treat the empty string specially */
- if(byteslen == 0)
- {
- *rval = JS_GetEmptyStringValue(cx);
- JS_free(cx, bytes);
- return JS_TRUE;
- }
-
- /* Shrink the buffer to the real size */
- tmp = JS_realloc(cx, bytes, byteslen);
- if(!tmp)
- {
- JS_free(cx, bytes);
- return JS_FALSE;
- }
- bytes = tmp;
-
- str = dec_string(cx, bytes, byteslen);
- JS_free(cx, bytes);
-
- if(!str) return JS_FALSE;
-
- *rval = STRING_TO_JSVAL(str);
-
- return JS_TRUE;
-}
-
-static JSBool
-seal(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval) {
- JSObject *target;
- JSBool deep = JS_FALSE;
-
- if (!JS_ConvertArguments(cx, argc, argv, "o/b", &target, &deep))
- return JS_FALSE;
- if (!target)
- return JS_TRUE;
- return JS_SealObject(cx, target, deep);
-}
-
-static void
-execute_script(JSContext *cx, JSObject *obj, const char *filename) {
- FILE *file;
- JSScript *script;
- jsval result;
-
- if(!filename || strcmp(filename, "-") == 0)
- {
- file = stdin;
- }
- else
- {
- file = fopen(filename, "r");
- if (!file)
- {
- fprintf(stderr, "could not open script file %s\n", filename);
- gExitCode = 1;
- return;
- }
- }
-
- script = JS_CompileFileHandle(cx, obj, filename, file);
- if(script)
- {
- JS_ExecuteScript(cx, obj, script, &result);
- JS_DestroyScript(cx, script);
- }
-}
-
-static void
-printerror(JSContext *cx, const char *mesg, JSErrorReport *report)
-{
- if(!report || !JSREPORT_IS_WARNING(report->flags))
- {
- fprintf(stderr, "%s\n", mesg);
- }
-}
-
-static JSFunctionSpec global_functions[] = {
- {"evalcx", evalcx, 0, 0, 0},
- {"gc", gc, 0, 0, 0},
- {"print", print, 0, 0, 0},
- {"quit", quit, 0, 0, 0},
- {"readline", readline, 0, 0, 0},
- {"seal", seal, 0, 0, 0},
- {0, 0, 0, 0, 0}
-};
-
-static JSClass global_class = {
- "GlobalClass",
- JSCLASS_GLOBAL_FLAGS,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_PropertyStub,
- JS_EnumerateStub,
- JS_ResolveStub,
- JS_ConvertStub,
- JS_FinalizeStub,
- JSCLASS_NO_OPTIONAL_MEMBERS
-};
-
-int
-main(int argc, const char * argv[])
-{
- JSRuntime* rt = NULL;
- JSContext* cx = NULL;
- JSObject* global = NULL;
- JSFunctionSpec* sp = NULL;
- int i = 0;
-
- rt = JS_NewRuntime(64L * 1024L * 1024L);
- if (!rt) return 1;
-
- cx = JS_NewContext(rt, 8L * 1024L);
- if (!cx) return 1;
-
- JS_SetErrorReporter(cx, printerror);
- JS_ToggleOptions(cx, JSOPTION_XML);
-
- SETUP_REQUEST(cx);
-
- global = JS_NewObject(cx, &global_class, NULL, NULL);
- if (!global) return 1;
- if (!JS_InitStandardClasses(cx, global)) return 1;
-
- for(sp = global_functions; sp->name != NULL; sp++)
- {
- if(!JS_DefineFunction(cx, global,
- sp->name, sp->call, sp->nargs, sp->flags))
- {
- fprintf(stderr, "Failed to create function: %s\n", sp->name);
- return 1;
- }
- }
-
- if(!install_http(cx, global))
- {
- return 1;
- }
-
- JS_SetGlobalObject(cx, global);
-
- if(argc > 2)
- {
- fprintf(stderr, "incorrect number of arguments\n\n");
- fprintf(stderr, "usage: %s <scriptfile>\n", argv[0]);
- return 2;
- }
-
- if(argc == 0)
- {
- execute_script(cx, global, NULL);
- }
- else
- {
- execute_script(cx, global, argv[1]);
- }
-
- FINISH_REQUEST(cx);
-
- JS_DestroyContext(cx);
- JS_DestroyRuntime(rt);
- JS_ShutDown();
-
- return gExitCode;
-}
diff --git a/1.1.x/src/couchdb/priv/couch_js/utf8.c b/1.1.x/src/couchdb/priv/couch_js/utf8.c
deleted file mode 100644
index 699a6fee..00000000
--- a/1.1.x/src/couchdb/priv/couch_js/utf8.c
+++ /dev/null
@@ -1,286 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#include <jsapi.h>
-
-static int
-enc_char(uint8 *utf8Buffer, uint32 ucs4Char)
-{
- int utf8Length = 1;
-
- if (ucs4Char < 0x80)
- {
- *utf8Buffer = (uint8)ucs4Char;
- }
- else
- {
- int i;
- uint32 a = ucs4Char >> 11;
- utf8Length = 2;
- while(a)
- {
- a >>= 5;
- utf8Length++;
- }
- i = utf8Length;
- while(--i)
- {
- utf8Buffer[i] = (uint8)((ucs4Char & 0x3F) | 0x80);
- ucs4Char >>= 6;
- }
- *utf8Buffer = (uint8)(0x100 - (1 << (8-utf8Length)) + ucs4Char);
- }
-
- return utf8Length;
-}
-
-static JSBool
-enc_charbuf(const jschar* src, size_t srclen, char* dst, size_t* dstlenp)
-{
- size_t i;
- size_t utf8Len;
- size_t dstlen = *dstlenp;
- size_t origDstlen = dstlen;
- jschar c;
- jschar c2;
- uint32 v;
- uint8 utf8buf[6];
-
- if(!dst)
- {
- dstlen = origDstlen = (size_t) -1;
- }
-
- while(srclen)
- {
- c = *src++;
- srclen--;
-
- if((c >= 0xDC00) && (c <= 0xDFFF)) goto bad_surrogate;
-
- if(c < 0xD800 || c > 0xDBFF)
- {
- v = c;
- }
- else
- {
- if(srclen < 1) goto buffer_too_small;
- c2 = *src++;
- srclen--;
- if ((c2 < 0xDC00) || (c2 > 0xDFFF))
- {
- c = c2;
- goto bad_surrogate;
- }
- v = ((c - 0xD800) << 10) + (c2 - 0xDC00) + 0x10000;
- }
- if(v < 0x0080)
- {
- /* no encoding necessary - performance hack */
- if(!dstlen) goto buffer_too_small;
- if(dst) *dst++ = (char) v;
- utf8Len = 1;
- }
- else
- {
- utf8Len = enc_char(utf8buf, v);
- if(utf8Len > dstlen) goto buffer_too_small;
- if(dst)
- {
- for (i = 0; i < utf8Len; i++)
- {
- *dst++ = (char) utf8buf[i];
- }
- }
- }
- dstlen -= utf8Len;
- }
-
- *dstlenp = (origDstlen - dstlen);
- return JS_TRUE;
-
-bad_surrogate:
- *dstlenp = (origDstlen - dstlen);
- return JS_FALSE;
-
-buffer_too_small:
- *dstlenp = (origDstlen - dstlen);
- return JS_FALSE;
-}
-
-char*
-enc_string(JSContext* cx, jsval arg, size_t* buflen)
-{
- JSString* str = NULL;
- jschar* src = NULL;
- char* bytes = NULL;
- size_t srclen = 0;
- size_t byteslen = 0;
-
- str = JS_ValueToString(cx, arg);
- if(!str) goto error;
-
- src = JS_GetStringChars(str);
- srclen = JS_GetStringLength(str);
-
- if(!enc_charbuf(src, srclen, NULL, &byteslen)) goto error;
-
- bytes = JS_malloc(cx, (byteslen) + 1);
- bytes[byteslen] = 0;
-
- if(!enc_charbuf(src, srclen, bytes, &byteslen)) goto error;
-
- if(buflen) *buflen = byteslen;
- goto success;
-
-error:
- if(bytes != NULL) JS_free(cx, bytes);
- bytes = NULL;
-
-success:
- return bytes;
-}
-
-static uint32
-dec_char(const uint8 *utf8Buffer, int utf8Length)
-{
- uint32 ucs4Char;
- uint32 minucs4Char;
-
- /* from Unicode 3.1, non-shortest form is illegal */
- static const uint32 minucs4Table[] = {
- 0x00000080, 0x00000800, 0x0001000, 0x0020000, 0x0400000
- };
-
- if (utf8Length == 1)
- {
- ucs4Char = *utf8Buffer;
- }
- else
- {
- ucs4Char = *utf8Buffer++ & ((1<<(7-utf8Length))-1);
- minucs4Char = minucs4Table[utf8Length-2];
- while(--utf8Length)
- {
- ucs4Char = ucs4Char<<6 | (*utf8Buffer++ & 0x3F);
- }
- if(ucs4Char < minucs4Char || ucs4Char == 0xFFFE || ucs4Char == 0xFFFF)
- {
- ucs4Char = 0xFFFD;
- }
- }
-
- return ucs4Char;
-}
-
-static JSBool
-dec_charbuf(const char *src, size_t srclen, jschar *dst, size_t *dstlenp)
-{
- uint32 v;
- size_t offset = 0;
- size_t j;
- size_t n;
- size_t dstlen = *dstlenp;
- size_t origDstlen = dstlen;
-
- if(!dst) dstlen = origDstlen = (size_t) -1;
-
- while(srclen)
- {
- v = (uint8) *src;
- n = 1;
-
- if(v & 0x80)
- {
- while(v & (0x80 >> n))
- {
- n++;
- }
-
- if(n > srclen) goto buffer_too_small;
- if(n == 1 || n > 6) goto bad_character;
-
- for(j = 1; j < n; j++)
- {
- if((src[j] & 0xC0) != 0x80) goto bad_character;
- }
-
- v = dec_char((const uint8 *) src, n);
- if(v >= 0x10000)
- {
- v -= 0x10000;
-
- if(v > 0xFFFFF || dstlen < 2)
- {
- *dstlenp = (origDstlen - dstlen);
- return JS_FALSE;
- }
-
- if(dstlen < 2) goto buffer_too_small;
-
- if(dst)
- {
- *dst++ = (jschar)((v >> 10) + 0xD800);
- v = (jschar)((v & 0x3FF) + 0xDC00);
- }
- dstlen--;
- }
- }
-
- if(!dstlen) goto buffer_too_small;
- if(dst) *dst++ = (jschar) v;
-
- dstlen--;
- offset += n;
- src += n;
- srclen -= n;
- }
-
- *dstlenp = (origDstlen - dstlen);
- return JS_TRUE;
-
-bad_character:
- *dstlenp = (origDstlen - dstlen);
- return JS_FALSE;
-
-buffer_too_small:
- *dstlenp = (origDstlen - dstlen);
- return JS_FALSE;
-}
-
-JSString*
-dec_string(JSContext* cx, const char* bytes, size_t byteslen)
-{
- JSString* str = NULL;
- jschar* chars = NULL;
- size_t charslen;
-
- if(!dec_charbuf(bytes, byteslen, NULL, &charslen)) goto error;
-
- chars = JS_malloc(cx, (charslen + 1) * sizeof(jschar));
- if(!chars) return NULL;
- chars[charslen] = 0;
-
- if(!dec_charbuf(bytes, byteslen, chars, &charslen)) goto error;
-
- str = JS_NewUCString(cx, chars, charslen - 1);
- if(!str) goto error;
-
- goto success;
-
-error:
- if(chars != NULL) JS_free(cx, chars);
- str = NULL;
-
-success:
- return str;
-} \ No newline at end of file
diff --git a/1.1.x/src/couchdb/priv/couch_js/utf8.h b/1.1.x/src/couchdb/priv/couch_js/utf8.h
deleted file mode 100644
index 00f6b736..00000000
--- a/1.1.x/src/couchdb/priv/couch_js/utf8.h
+++ /dev/null
@@ -1,19 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#ifndef COUCH_JS_UTF_8_H
-#define COUCH_JS_UTF_8_H
-
-char* enc_string(JSContext* cx, jsval arg, size_t* buflen);
-JSString* dec_string(JSContext* cx, const char* buf, size_t buflen);
-
-#endif \ No newline at end of file
diff --git a/1.1.x/src/couchdb/priv/icu_driver/couch_icu_driver.c b/1.1.x/src/couchdb/priv/icu_driver/couch_icu_driver.c
deleted file mode 100644
index 1afe8eac..00000000
--- a/1.1.x/src/couchdb/priv/icu_driver/couch_icu_driver.c
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
-
-Licensed under the Apache License, Version 2.0 (the "License"); you may not use
-this file except in compliance with the License. You may obtain a copy of the
-License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software distributed
-under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
-CONDITIONS OF ANY KIND, either express or implied. See the License for the
-specific language governing permissions and limitations under the License.
-
-*/
-
-// This file is the C port driver for Erlang. It provides a low overhead
-// means of calling into C code, however coding errors in this module can
-// crash the entire Erlang server.
-
-#ifdef DARWIN
-#define U_HIDE_DRAFT_API 1
-#define U_DISABLE_RENAMING 1
-#endif
-
-#include "erl_driver.h"
-#include "unicode/ucol.h"
-#include "unicode/ucasemap.h"
-#ifndef WIN32
-#include <string.h> // for memcpy
-#endif
-
-typedef struct {
- ErlDrvPort port;
- UCollator* collNoCase;
- UCollator* coll;
-} couch_drv_data;
-
-static void couch_drv_stop(ErlDrvData data)
-{
- couch_drv_data* pData = (couch_drv_data*)data;
- if (pData->coll) {
- ucol_close(pData->coll);
- }
- if (pData->collNoCase) {
- ucol_close(pData->collNoCase);
- }
- driver_free((char*)pData);
-}
-
-static ErlDrvData couch_drv_start(ErlDrvPort port, char *buff)
-{
- UErrorCode status = U_ZERO_ERROR;
- couch_drv_data* pData = (couch_drv_data*)driver_alloc(sizeof(couch_drv_data));
-
- if (pData == NULL)
- return ERL_DRV_ERROR_GENERAL;
-
- pData->port = port;
-
- pData->coll = ucol_open("", &status);
- if (U_FAILURE(status)) {
- couch_drv_stop((ErlDrvData)pData);
- return ERL_DRV_ERROR_GENERAL;
- }
-
- pData->collNoCase = ucol_open("", &status);
- if (U_FAILURE(status)) {
- couch_drv_stop((ErlDrvData)pData);
- return ERL_DRV_ERROR_GENERAL;
- }
-
- ucol_setAttribute(pData->collNoCase, UCOL_STRENGTH, UCOL_PRIMARY, &status);
- if (U_FAILURE(status)) {
- couch_drv_stop((ErlDrvData)pData);
- return ERL_DRV_ERROR_GENERAL;
- }
-
- return (ErlDrvData)pData;
-}
-
-static int return_control_result(void* pLocalResult, int localLen, char **ppRetBuf, int returnLen)
-{
- if (*ppRetBuf == NULL || localLen > returnLen) {
- *ppRetBuf = (char*)driver_alloc_binary(localLen);
- if(*ppRetBuf == NULL) {
- return -1;
- }
- }
- memcpy(*ppRetBuf, pLocalResult, localLen);
- return localLen;
-}
-
-static int couch_drv_control(ErlDrvData drv_data, unsigned int command, char *pBuf,
- int bufLen, char **rbuf, int rlen)
-{
-
- couch_drv_data* pData = (couch_drv_data*)drv_data;
- switch(command) {
- case 0: // COLLATE
- case 1: // COLLATE_NO_CASE:
- {
- UErrorCode status = U_ZERO_ERROR;
- int collResult;
- char response;
- UCharIterator iterA;
- UCharIterator iterB;
- int32_t length;
-
- // 2 strings are in the buffer, consecutively
- // The strings begin first with a 32 bit integer byte length, then the actual
- // string bytes follow.
-
- // first 32bits are the length
- memcpy(&length, pBuf, sizeof(length));
- pBuf += sizeof(length);
-
- // point the iterator at it.
- uiter_setUTF8(&iterA, pBuf, length);
-
- pBuf += length; // now on to string b
-
- // first 32bits are the length
- memcpy(&length, pBuf, sizeof(length));
- pBuf += sizeof(length);
-
- // point the iterator at it.
- uiter_setUTF8(&iterB, pBuf, length);
-
- if (command == 0) // COLLATE
- collResult = ucol_strcollIter(pData->coll, &iterA, &iterB, &status);
- else // COLLATE_NO_CASE
- collResult = ucol_strcollIter(pData->collNoCase, &iterA, &iterB, &status);
-
- if (collResult < 0)
- response = 0; //lt
- else if (collResult > 0)
- response = 2; //gt
- else
- response = 1; //eq
-
- return return_control_result(&response, sizeof(response), rbuf, rlen);
- }
-
- default:
- return -1;
- }
-}
-
-ErlDrvEntry couch_driver_entry = {
- NULL, /* F_PTR init, N/A */
- couch_drv_start, /* L_PTR start, called when port is opened */
- couch_drv_stop, /* F_PTR stop, called when port is closed */
- NULL, /* F_PTR output, called when erlang has sent */
- NULL, /* F_PTR ready_input, called when input descriptor ready */
- NULL, /* F_PTR ready_output, called when output descriptor ready */
- "couch_icu_driver", /* char *driver_name, the argument to open_port */
- NULL, /* F_PTR finish, called when unloaded */
- NULL, /* Not used */
- couch_drv_control, /* F_PTR control, port_command callback */
- NULL, /* F_PTR timeout, reserved */
- NULL, /* F_PTR outputv, reserved */
- NULL, /* F_PTR ready_async */
- NULL, /* F_PTR flush */
- NULL, /* F_PTR call */
- NULL, /* F_PTR event */
- ERL_DRV_EXTENDED_MARKER,
- ERL_DRV_EXTENDED_MAJOR_VERSION,
- ERL_DRV_EXTENDED_MINOR_VERSION,
- ERL_DRV_FLAG_USE_PORT_LOCKING,
- NULL, /* Reserved -- Used by emulator internally */
- NULL, /* F_PTR process_exit */
-};
-
-DRIVER_INIT(couch_icu_driver) /* must match name in driver_entry */
-{
- return &couch_driver_entry;
-}
diff --git a/1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable.sh b/1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable.sh
deleted file mode 100644
index f8d042e3..00000000
--- a/1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#! /bin/sh -e
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-# The purpose of this script is to echo an OS specific command before launching
-# the actual process. This provides a way for Erlang to hard-kill its external
-# processes.
-
-echo "kill -9 $$"
-exec $*
diff --git a/1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable_win.c b/1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable_win.c
deleted file mode 100644
index 06782315..00000000
--- a/1.1.x/src/couchdb/priv/spawnkillable/couchspawnkillable_win.c
+++ /dev/null
@@ -1,145 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-// Do what 2 lines of shell script in couchspawnkillable does...
-// * Create a new suspended process with the same (duplicated) standard
-// handles as us.
-// * Write a line to stdout, consisting of the path to ourselves, plus
-// '--kill {pid}' where {pid} is the PID of the newly created process.
-// * Un-suspend the new process.
-// * Wait for the process to terminate.
-// * Terminate with the child's exit-code.
-
-// Later, couch will call us with --kill and the PID, so we dutifully
-// terminate the specified PID.
-
-#include <stdlib.h>
-#include "windows.h"
-
-char *get_child_cmdline(int argc, char **argv)
-{
- // make a new command-line, but skipping me.
- // XXX - todo - spaces etc in args???
- int i;
- char *p, *cmdline;
- int nchars = 0;
- int nthis = 1;
- for (i=1;i<argc;i++)
- nchars += strlen(argv[i])+1;
- cmdline = p = malloc(nchars+1);
- if (!cmdline)
- return NULL;
- for (i=1;i<argc;i++) {
- nthis = strlen(argv[i]);
- strncpy(p, argv[i], nthis);
- p[nthis] = ' ';
- p += nthis+1;
- }
- // Replace the last space we added above with a '\0'
- cmdline[nchars-1] = '\0';
- return cmdline;
-}
-
-// create the child process, returning 0, or the exit-code we will
-// terminate with.
-int create_child(int argc, char **argv, PROCESS_INFORMATION *pi)
-{
- char buf[1024];
- DWORD dwcreate;
- STARTUPINFO si;
- char *cmdline;
- if (argc < 2)
- return 1;
- cmdline = get_child_cmdline(argc, argv);
- if (!cmdline)
- return 2;
-
- memset(&si, 0, sizeof(si));
- si.cb = sizeof(si);
- // depending on how *our* parent is started, we may or may not have
- // a valid stderr stream - so although we try and duplicate it, only
- // failing to duplicate stdin and stdout are considered fatal.
- if (!DuplicateHandle(GetCurrentProcess(),
- GetStdHandle(STD_INPUT_HANDLE),
- GetCurrentProcess(),
- &si.hStdInput,
- 0,
- TRUE, // inheritable
- DUPLICATE_SAME_ACCESS) ||
- !DuplicateHandle(GetCurrentProcess(),
- GetStdHandle(STD_OUTPUT_HANDLE),
- GetCurrentProcess(),
- &si.hStdOutput,
- 0,
- TRUE, // inheritable
- DUPLICATE_SAME_ACCESS)) {
- return 3;
- }
- DuplicateHandle(GetCurrentProcess(),
- GetStdHandle(STD_ERROR_HANDLE),
- GetCurrentProcess(),
- &si.hStdError,
- 0,
- TRUE, // inheritable
- DUPLICATE_SAME_ACCESS);
-
- si.dwFlags = STARTF_USESTDHANDLES;
- dwcreate = CREATE_SUSPENDED;
- if (!CreateProcess( NULL, cmdline,
- NULL,
- NULL,
- TRUE, // inherit handles
- dwcreate,
- NULL, // environ
- NULL, // cwd
- &si,
- pi))
- return 4;
- return 0;
-}
-
-// and here we go...
-int main(int argc, char **argv)
-{
- char out_buf[1024];
- int rc;
- DWORD cbwritten;
- DWORD exitcode;
- PROCESS_INFORMATION pi;
- if (argc==3 && strcmp(argv[1], "--kill")==0) {
- HANDLE h = OpenProcess(PROCESS_TERMINATE, 0, atoi(argv[2]));
- if (!h)
- return 1;
- if (!TerminateProcess(h, 0))
- return 2;
- CloseHandle(h);
- return 0;
- }
- // spawn the new suspended process
- rc = create_child(argc, argv, &pi);
- if (rc)
- return rc;
- // Write the 'terminate' command, which includes this PID, back to couch.
- // *sob* - what about spaces etc?
- sprintf_s(out_buf, sizeof(out_buf), "%s --kill %d\n",
- argv[0], pi.dwProcessId);
- WriteFile(GetStdHandle(STD_OUTPUT_HANDLE), out_buf, strlen(out_buf),
- &cbwritten, NULL);
- // Let the child process go...
- ResumeThread(pi.hThread);
- // Wait for the process to terminate so we can reflect the exit code
- // back to couch.
- WaitForSingleObject(pi.hProcess, INFINITE);
- if (!GetExitCodeProcess(pi.hProcess, &exitcode))
- return 6;
- return exitcode;
-}
diff --git a/1.1.x/src/couchdb/priv/stat_descriptions.cfg.in b/1.1.x/src/couchdb/priv/stat_descriptions.cfg.in
deleted file mode 100644
index b80d7684..00000000
--- a/1.1.x/src/couchdb/priv/stat_descriptions.cfg.in
+++ /dev/null
@@ -1,50 +0,0 @@
-%% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-%% use this file except in compliance with the License. You may obtain a copy of
-%% the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-%% License for the specific language governing permissions and limitations under
-%% the License.
-
-% Style guide for descriptions: Start with a lowercase letter & do not add
-% a trailing full-stop / period
-% Please keep this in alphabetical order
-
-{couchdb, database_writes, "number of times a database was changed"}.
-{couchdb, database_reads, "number of times a document was read from a database"}.
-{couchdb, open_databases, "number of open databases"}.
-{couchdb, open_os_files, "number of file descriptors CouchDB has open"}.
-{couchdb, request_time, "length of a request inside CouchDB without MochiWeb"}.
-{couchdb, auth_cache_hits, "number of authentication cache hits"}.
-{couchdb, auth_cache_misses, "number of authentication cache misses"}.
-
-{httpd, bulk_requests, "number of bulk requests"}.
-{httpd, requests, "number of HTTP requests"}.
-{httpd, temporary_view_reads, "number of temporary view reads"}.
-{httpd, view_reads, "number of view reads"}.
-{httpd, clients_requesting_changes, "number of clients for continuous _changes"}.
-
-{httpd_request_methods, 'COPY', "number of HTTP COPY requests"}.
-{httpd_request_methods, 'DELETE', "number of HTTP DELETE requests"}.
-{httpd_request_methods, 'GET', "number of HTTP GET requests"}.
-{httpd_request_methods, 'HEAD', "number of HTTP HEAD requests"}.
-{httpd_request_methods, 'POST', "number of HTTP POST requests"}.
-{httpd_request_methods, 'PUT', "number of HTTP PUT requests"}.
-
-{httpd_status_codes, '200', "number of HTTP 200 OK responses"}.
-{httpd_status_codes, '201', "number of HTTP 201 Created responses"}.
-{httpd_status_codes, '202', "number of HTTP 202 Accepted responses"}.
-{httpd_status_codes, '301', "number of HTTP 301 Moved Permanently responses"}.
-{httpd_status_codes, '304', "number of HTTP 304 Not Modified responses"}.
-{httpd_status_codes, '400', "number of HTTP 400 Bad Request responses"}.
-{httpd_status_codes, '401', "number of HTTP 401 Unauthorized responses"}.
-{httpd_status_codes, '403', "number of HTTP 403 Forbidden responses"}.
-{httpd_status_codes, '404', "number of HTTP 404 Not Found responses"}.
-{httpd_status_codes, '405', "number of HTTP 405 Method Not Allowed responses"}.
-{httpd_status_codes, '409', "number of HTTP 409 Conflict responses"}.
-{httpd_status_codes, '412', "number of HTTP 412 Precondition Failed responses"}.
-{httpd_status_codes, '500', "number of HTTP 500 Internal Server Error responses"}.
diff --git a/1.1.x/src/erlang-oauth/Makefile.am b/1.1.x/src/erlang-oauth/Makefile.am
deleted file mode 100644
index 48b76482..00000000
--- a/1.1.x/src/erlang-oauth/Makefile.am
+++ /dev/null
@@ -1,50 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy
-## of the License at
-##
-## http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-oauthebindir = $(localerlanglibdir)/erlang-oauth/ebin
-
-oauth_file_collection = \
- oauth.app.in \
- oauth.erl \
- oauth_hmac_sha1.erl \
- oauth_http.erl \
- oauth_plaintext.erl \
- oauth_rsa_sha1.erl \
- oauth_unix.erl \
- oauth_uri.erl
-
-# Removed oauth_rsa_sha1.beam until we require R12B5 or
-# we add a ./configure option to enable it.
-
-oauthebin_make_generated_file_list = \
- oauth.app \
- oauth.beam \
- oauth_hmac_sha1.beam \
- oauth_http.beam \
- oauth_plaintext.beam \
- oauth_unix.beam \
- oauth_uri.beam
-
-oauthebin_DATA = \
- $(oauthebin_make_generated_file_list)
-
-EXTRA_DIST = \
- $(oauth_file_collection)
-
-CLEANFILES = \
- $(oauthebin_make_generated_file_list)
-
-%.app: %.app.in
- cp $< $@
-
-%.beam: %.erl
- $(ERLC) $(ERLC_FLAGS) $<
diff --git a/1.1.x/src/erlang-oauth/oauth.app.in b/1.1.x/src/erlang-oauth/oauth.app.in
deleted file mode 100644
index 6357b9b0..00000000
--- a/1.1.x/src/erlang-oauth/oauth.app.in
+++ /dev/null
@@ -1,20 +0,0 @@
-{application, oauth, [
- {description, "Erlang OAuth implementation"},
- {vsn, "dev"},
- {modules, [
- oauth,
- oauth_hmac_sha1,
- oauth_http,
- oauth_plaintext,
- oauth_rsa_sha1,
- oauth_unix,
- oauth_uri
- ]},
- {registered, []},
- {applications, [
- kernel,
- stdlib,
- crypto,
- inets
- ]}
-]}.
diff --git a/1.1.x/src/erlang-oauth/oauth.erl b/1.1.x/src/erlang-oauth/oauth.erl
deleted file mode 100644
index 866655c9..00000000
--- a/1.1.x/src/erlang-oauth/oauth.erl
+++ /dev/null
@@ -1,107 +0,0 @@
--module(oauth).
-
--export(
- [ get/5
- , header/1
- , post/5
- , signature/5
- , signature_base_string/3
- , signed_params/6
- , token/1
- , token_secret/1
- , uri/2
- , verify/6
- ]).
-
-
-get(URL, ExtraParams, Consumer, Token, TokenSecret) ->
- SignedParams = signed_params("GET", URL, ExtraParams, Consumer, Token, TokenSecret),
- oauth_http:get(uri(URL, SignedParams)).
-
-post(URL, ExtraParams, Consumer, Token, TokenSecret) ->
- SignedParams = signed_params("POST", URL, ExtraParams, Consumer, Token, TokenSecret),
- oauth_http:post(URL, oauth_uri:params_to_string(SignedParams)).
-
-uri(Base, []) ->
- Base;
-uri(Base, Params) ->
- lists:concat([Base, "?", oauth_uri:params_to_string(Params)]).
-
-header(Params) ->
- {"Authorization", "OAuth " ++ oauth_uri:params_to_header_string(Params)}.
-
-token(Params) ->
- proplists:get_value("oauth_token", Params).
-
-token_secret(Params) ->
- proplists:get_value("oauth_token_secret", Params).
-
-verify(Signature, HttpMethod, URL, Params, Consumer, TokenSecret) ->
- case signature_method(Consumer) of
- plaintext ->
- oauth_plaintext:verify(Signature, consumer_secret(Consumer), TokenSecret);
- hmac_sha1 ->
- BaseString = signature_base_string(HttpMethod, URL, Params),
- oauth_hmac_sha1:verify(Signature, BaseString, consumer_secret(Consumer), TokenSecret);
- rsa_sha1 ->
- BaseString = signature_base_string(HttpMethod, URL, Params),
- oauth_rsa_sha1:verify(Signature, BaseString, consumer_secret(Consumer))
- end.
-
-signed_params(HttpMethod, URL, ExtraParams, Consumer, Token, TokenSecret) ->
- Params = token_param(Token, params(Consumer, ExtraParams)),
- [{"oauth_signature", signature(HttpMethod, URL, Params, Consumer, TokenSecret)}|Params].
-
-signature(HttpMethod, URL, Params, Consumer, TokenSecret) ->
- case signature_method(Consumer) of
- plaintext ->
- oauth_plaintext:signature(consumer_secret(Consumer), TokenSecret);
- hmac_sha1 ->
- BaseString = signature_base_string(HttpMethod, URL, Params),
- oauth_hmac_sha1:signature(BaseString, consumer_secret(Consumer), TokenSecret);
- rsa_sha1 ->
- BaseString = signature_base_string(HttpMethod, URL, Params),
- oauth_rsa_sha1:signature(BaseString, consumer_secret(Consumer))
- end.
-
-signature_base_string(HttpMethod, URL, Params) ->
- NormalizedURL = oauth_uri:normalize(URL),
- NormalizedParams = oauth_uri:params_to_string(lists:sort(Params)),
- oauth_uri:calate("&", [HttpMethod, NormalizedURL, NormalizedParams]).
-
-token_param("", Params) ->
- Params;
-token_param(Token, Params) ->
- [{"oauth_token", Token}|Params].
-
-params(Consumer, Params) ->
- Nonce = base64:encode_to_string(crypto:rand_bytes(32)), % cf. ruby-oauth
- params(Consumer, oauth_unix:timestamp(), Nonce, Params).
-
-params(Consumer, Timestamp, Nonce, Params) ->
- [ {"oauth_version", "1.0"}
- , {"oauth_nonce", Nonce}
- , {"oauth_timestamp", integer_to_list(Timestamp)}
- , {"oauth_signature_method", signature_method_string(Consumer)}
- , {"oauth_consumer_key", consumer_key(Consumer)}
- | Params
- ].
-
-signature_method_string(Consumer) ->
- case signature_method(Consumer) of
- plaintext ->
- "PLAINTEXT";
- hmac_sha1 ->
- "HMAC-SHA1";
- rsa_sha1 ->
- "RSA-SHA1"
- end.
-
-signature_method(_Consumer={_, _, Method}) ->
- Method.
-
-consumer_secret(_Consumer={_, Secret, _}) ->
- Secret.
-
-consumer_key(_Consumer={Key, _, _}) ->
- Key.
diff --git a/1.1.x/src/erlang-oauth/oauth_hmac_sha1.erl b/1.1.x/src/erlang-oauth/oauth_hmac_sha1.erl
deleted file mode 100644
index 79d59f37..00000000
--- a/1.1.x/src/erlang-oauth/oauth_hmac_sha1.erl
+++ /dev/null
@@ -1,11 +0,0 @@
--module(oauth_hmac_sha1).
-
--export([signature/3, verify/4]).
-
-
-signature(BaseString, CS, TS) ->
- Key = oauth_uri:calate("&", [CS, TS]),
- base64:encode_to_string(crypto:sha_mac(Key, BaseString)).
-
-verify(Signature, BaseString, CS, TS) ->
- couch_util:verify(signature(BaseString, CS, TS), Signature).
diff --git a/1.1.x/src/erlang-oauth/oauth_http.erl b/1.1.x/src/erlang-oauth/oauth_http.erl
deleted file mode 100644
index bf5a4bac..00000000
--- a/1.1.x/src/erlang-oauth/oauth_http.erl
+++ /dev/null
@@ -1,22 +0,0 @@
--module(oauth_http).
-
--export([get/1, post/2, response_params/1, response_body/1, response_code/1]).
-
-
-get(URL) ->
- request(get, {URL, []}).
-
-post(URL, Data) ->
- request(post, {URL, [], "application/x-www-form-urlencoded", Data}).
-
-request(Method, Request) ->
- http:request(Method, Request, [{autoredirect, false}], []).
-
-response_params(Response) ->
- oauth_uri:params_from_string(response_body(Response)).
-
-response_body({{_, _, _}, _, Body}) ->
- Body.
-
-response_code({{_, Code, _}, _, _}) ->
- Code.
diff --git a/1.1.x/src/erlang-oauth/oauth_plaintext.erl b/1.1.x/src/erlang-oauth/oauth_plaintext.erl
deleted file mode 100644
index 41a1e9b2..00000000
--- a/1.1.x/src/erlang-oauth/oauth_plaintext.erl
+++ /dev/null
@@ -1,10 +0,0 @@
--module(oauth_plaintext).
-
--export([signature/2, verify/3]).
-
-
-signature(CS, TS) ->
- oauth_uri:calate("&", [CS, TS]).
-
-verify(Signature, CS, TS) ->
- couch_util:verify(signature(CS, TS), Signature).
diff --git a/1.1.x/src/erlang-oauth/oauth_rsa_sha1.erl b/1.1.x/src/erlang-oauth/oauth_rsa_sha1.erl
deleted file mode 100644
index 6f4828e0..00000000
--- a/1.1.x/src/erlang-oauth/oauth_rsa_sha1.erl
+++ /dev/null
@@ -1,30 +0,0 @@
--module(oauth_rsa_sha1).
-
--export([signature/2, verify/3]).
-
--include_lib("public_key/include/public_key.hrl").
-
-
-signature(BaseString, PrivateKeyPath) ->
- {ok, [Info]} = public_key:pem_to_der(PrivateKeyPath),
- {ok, PrivateKey} = public_key:decode_private_key(Info),
- base64:encode_to_string(public_key:sign(list_to_binary(BaseString), PrivateKey)).
-
-verify(Signature, BaseString, PublicKey) ->
- public_key:verify_signature(to_binary(BaseString), sha, base64:decode(Signature), public_key(PublicKey)).
-
-to_binary(Term) when is_list(Term) ->
- list_to_binary(Term);
-to_binary(Term) when is_binary(Term) ->
- Term.
-
-public_key(Path) when is_list(Path) ->
- {ok, [{cert, DerCert, not_encrypted}]} = public_key:pem_to_der(Path),
- {ok, Cert} = public_key:pkix_decode_cert(DerCert, otp),
- public_key(Cert);
-public_key(#'OTPCertificate'{tbsCertificate=Cert}) ->
- public_key(Cert);
-public_key(#'OTPTBSCertificate'{subjectPublicKeyInfo=Info}) ->
- public_key(Info);
-public_key(#'OTPSubjectPublicKeyInfo'{subjectPublicKey=Key}) ->
- Key.
diff --git a/1.1.x/src/erlang-oauth/oauth_unix.erl b/1.1.x/src/erlang-oauth/oauth_unix.erl
deleted file mode 100644
index 73ca3143..00000000
--- a/1.1.x/src/erlang-oauth/oauth_unix.erl
+++ /dev/null
@@ -1,16 +0,0 @@
--module(oauth_unix).
-
--export([timestamp/0]).
-
-
-timestamp() ->
- timestamp(calendar:universal_time()).
-
-timestamp(DateTime) ->
- seconds(DateTime) - epoch().
-
-epoch() ->
- seconds({{1970,1,1},{00,00,00}}).
-
-seconds(DateTime) ->
- calendar:datetime_to_gregorian_seconds(DateTime).
diff --git a/1.1.x/src/erlang-oauth/oauth_uri.erl b/1.1.x/src/erlang-oauth/oauth_uri.erl
deleted file mode 100644
index 3bdc9076..00000000
--- a/1.1.x/src/erlang-oauth/oauth_uri.erl
+++ /dev/null
@@ -1,88 +0,0 @@
--module(oauth_uri).
-
--export([normalize/1, calate/2, encode/1]).
--export([params_from_string/1, params_to_string/1,
- params_from_header_string/1, params_to_header_string/1]).
-
--import(lists, [concat/1]).
-
--define(is_uppercase_alpha(C), C >= $A, C =< $Z).
--define(is_lowercase_alpha(C), C >= $a, C =< $z).
--define(is_alpha(C), ?is_uppercase_alpha(C); ?is_lowercase_alpha(C)).
--define(is_digit(C), C >= $0, C =< $9).
--define(is_alphanumeric(C), ?is_alpha(C); ?is_digit(C)).
--define(is_unreserved(C), ?is_alphanumeric(C); C =:= $-; C =:= $_; C =:= $.; C =:= $~).
--define(is_hex(C), ?is_digit(C); C >= $A, C =< $F).
-
-
-normalize(URI) ->
- case http_uri:parse(URI) of
- {Scheme, UserInfo, Host, Port, Path, _Query} ->
- normalize(Scheme, UserInfo, string:to_lower(Host), Port, [Path]);
- Else ->
- Else
- end.
-
-normalize(http, UserInfo, Host, 80, Acc) ->
- normalize(http, UserInfo, [Host|Acc]);
-normalize(https, UserInfo, Host, 443, Acc) ->
- normalize(https, UserInfo, [Host|Acc]);
-normalize(Scheme, UserInfo, Host, Port, Acc) ->
- normalize(Scheme, UserInfo, [Host, ":", Port|Acc]).
-
-normalize(Scheme, [], Acc) ->
- concat([Scheme, "://"|Acc]);
-normalize(Scheme, UserInfo, Acc) ->
- concat([Scheme, "://", UserInfo, "@"|Acc]).
-
-params_to_header_string(Params) ->
- intercalate(", ", [concat([encode(K), "=\"", encode(V), "\""]) || {K, V} <- Params]).
-
-params_from_header_string(String) ->
- [param_from_header_string(Param) || Param <- re:split(String, ",\\s*", [{return, list}]), Param =/= ""].
-
-param_from_header_string(Param) ->
- [Key, QuotedValue] = string:tokens(Param, "="),
- Value = string:substr(QuotedValue, 2, length(QuotedValue) - 2),
- {decode(Key), decode(Value)}.
-
-params_from_string(Params) ->
- [param_from_string(Param) || Param <- string:tokens(Params, "&")].
-
-param_from_string(Param) ->
- list_to_tuple([decode(Value) || Value <- string:tokens(Param, "=")]).
-
-params_to_string(Params) ->
- intercalate("&", [calate("=", [K, V]) || {K, V} <- Params]).
-
-calate(Sep, Xs) ->
- intercalate(Sep, [encode(X) || X <- Xs]).
-
-intercalate(Sep, Xs) ->
- concat(intersperse(Sep, Xs)).
-
-intersperse(_, []) -> [];
-intersperse(_, [X]) -> [X];
-intersperse(Sep, [X|Xs]) ->
- [X, Sep|intersperse(Sep, Xs)].
-
-decode(Chars) ->
- decode(Chars, []).
-
-decode([], Decoded) ->
- lists:reverse(Decoded);
-decode([$%,A,B|Etc], Decoded) when ?is_hex(A), ?is_hex(B) ->
- decode(Etc, [erlang:list_to_integer([A,B], 16)|Decoded]);
-decode([C|Etc], Decoded) when ?is_unreserved(C) ->
- decode(Etc, [C|Decoded]).
-
-encode(Chars) ->
- encode(Chars, []).
-
-encode([], Encoded) ->
- lists:flatten(lists:reverse(Encoded));
-encode([C|Etc], Encoded) when ?is_unreserved(C) ->
- encode(Etc, [C|Encoded]);
-encode([C|Etc], Encoded) ->
- Value = io_lib:format("%~2.2.0s", [erlang:integer_to_list(C, 16)]),
- encode(Etc, [Value|Encoded]).
diff --git a/1.1.x/src/etap/Makefile.am b/1.1.x/src/etap/Makefile.am
deleted file mode 100644
index 732347bf..00000000
--- a/1.1.x/src/etap/Makefile.am
+++ /dev/null
@@ -1,44 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy
-## of the License at
-##
-## http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-etapebindir = $(localerlanglibdir)/etap/ebin
-
-etap_file_collection = \
- etap.erl \
- etap_application.erl \
- etap_can.erl \
- etap_exception.erl \
- etap_process.erl \
- etap_report.erl \
- etap_request.erl \
- etap_string.erl \
- etap_web.erl
-
-etapebin_make_generated_file_list = \
- etap.beam \
- etap_application.beam \
- etap_can.beam \
- etap_exception.beam \
- etap_process.beam \
- etap_report.beam \
- etap_request.beam \
- etap_string.beam \
- etap_web.beam
-
-etapebin_DATA = $(etapebin_make_generated_file_list)
-
-EXTRA_DIST = $(etap_file_collection)
-
-CLEANFILES = $(etapebin_make_generated_file_list)
-
-%.beam: %.erl
- $(ERLC) $(ERLC_FLAGS) $<
diff --git a/1.1.x/src/etap/etap.erl b/1.1.x/src/etap/etap.erl
deleted file mode 100644
index 5ad5dba3..00000000
--- a/1.1.x/src/etap/etap.erl
+++ /dev/null
@@ -1,416 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @author Nick Gerakines <nick@gerakines.net> [http://socklabs.com/]
-%% @author Jeremy Wall <jeremy@marzhillstudios.com>
-%% @version 0.3.4
-%% @copyright 2007-2008 Jeremy Wall, 2008-2009 Nick Gerakines
-%% @reference http://testanything.org/wiki/index.php/Main_Page
-%% @reference http://en.wikipedia.org/wiki/Test_Anything_Protocol
-%% @todo Finish implementing the skip directive.
-%% @todo Document the messages handled by this receive loop.
-%% @todo Explain in documentation why we use a process to handle test input.
-%% @doc etap is a TAP testing module for Erlang components and applications.
-%% This module allows developers to test their software using the TAP method.
-%%
-%% <blockquote cite="http://en.wikipedia.org/wiki/Test_Anything_Protocol"><p>
-%% TAP, the Test Anything Protocol, is a simple text-based interface between
-%% testing modules in a test harness. TAP started life as part of the test
-%% harness for Perl but now has implementations in C/C++, Python, PHP, Perl
-%% and probably others by the time you read this.
-%% </p></blockquote>
-%%
-%% The testing process begins by defining a plan using etap:plan/1, running
-%% a number of etap tests and then calling eta:end_tests/0. Please refer to
-%% the Erlang modules in the t directory of this project for example tests.
--module(etap).
--export([
- ensure_test_server/0, start_etap_server/0, test_server/1,
- diag/1, diag/2, plan/1, end_tests/0, not_ok/2, ok/2, is/3, isnt/3,
- any/3, none/3, fun_is/3, is_greater/3, skip/1, skip/2,
- ensure_coverage_starts/0, ensure_coverage_ends/0, coverage_report/0,
- datetime/1, skip/3, bail/0, bail/1
-]).
--record(test_state, {planned = 0, count = 0, pass = 0, fail = 0, skip = 0, skip_reason = ""}).
--vsn("0.3.4").
-
-%% @spec plan(N) -> Result
-%% N = unknown | skip | {skip, string()} | integer()
-%% Result = ok
-%% @doc Create a test plan and boot strap the test server.
-plan(unknown) ->
- ensure_coverage_starts(),
- ensure_test_server(),
- etap_server ! {self(), plan, unknown},
- ok;
-plan(skip) ->
- io:format("1..0 # skip~n");
-plan({skip, Reason}) ->
- io:format("1..0 # skip ~s~n", [Reason]);
-plan(N) when is_integer(N), N > 0 ->
- ensure_coverage_starts(),
- ensure_test_server(),
- etap_server ! {self(), plan, N},
- ok.
-
-%% @spec end_tests() -> ok
-%% @doc End the current test plan and output test results.
-%% @todo This should probably be done in the test_server process.
-end_tests() ->
- ensure_coverage_ends(),
- etap_server ! {self(), state},
- State = receive X -> X end,
- if
- State#test_state.planned == -1 ->
- io:format("1..~p~n", [State#test_state.count]);
- true ->
- ok
- end,
- case whereis(etap_server) of
- undefined -> ok;
- _ -> etap_server ! done, ok
- end.
-
-%% @private
-ensure_coverage_starts() ->
- case os:getenv("COVER") of
- false -> ok;
- _ ->
- BeamDir = case os:getenv("COVER_BIN") of false -> "ebin"; X -> X end,
- cover:compile_beam_directory(BeamDir)
- end.
-
-%% @private
-%% @doc Attempts to write out any collected coverage data to the cover/
-%% directory. This function should not be called externally, but it could be.
-ensure_coverage_ends() ->
- case os:getenv("COVER") of
- false -> ok;
- _ ->
- filelib:ensure_dir("cover/"),
- Name = lists:flatten([
- io_lib:format("~.16b", [X]) || X <- binary_to_list(erlang:md5(
- term_to_binary({make_ref(), now()})
- ))
- ]),
- cover:export("cover/" ++ Name ++ ".coverdata")
- end.
-
-%% @spec coverage_report() -> ok
-%% @doc Use the cover module's covreage report builder to create code coverage
-%% reports from recently created coverdata files.
-coverage_report() ->
- [cover:import(File) || File <- filelib:wildcard("cover/*.coverdata")],
- lists:foreach(
- fun(Mod) ->
- cover:analyse_to_file(Mod, atom_to_list(Mod) ++ "_coverage.txt", [])
- end,
- cover:imported_modules()
- ),
- ok.
-
-bail() ->
- bail("").
-
-bail(Reason) ->
- etap_server ! {self(), diag, "Bail out! " ++ Reason},
- ensure_coverage_ends(),
- etap_server ! done, ok,
- ok.
-
-
-%% @spec diag(S) -> ok
-%% S = string()
-%% @doc Print a debug/status message related to the test suite.
-diag(S) -> etap_server ! {self(), diag, "# " ++ S}, ok.
-
-%% @spec diag(Format, Data) -> ok
-%% Format = atom() | string() | binary()
-%% Data = [term()]
-%% UnicodeList = [Unicode]
-%% Unicode = int()
-%% @doc Print a debug/status message related to the test suite.
-%% Function arguments are passed through io_lib:format/2.
-diag(Format, Data) -> diag(io_lib:format(Format, Data)).
-
-%% @spec ok(Expr, Desc) -> Result
-%% Expr = true | false
-%% Desc = string()
-%% Result = true | false
-%% @doc Assert that a statement is true.
-ok(Expr, Desc) -> mk_tap(Expr == true, Desc).
-
-%% @spec not_ok(Expr, Desc) -> Result
-%% Expr = true | false
-%% Desc = string()
-%% Result = true | false
-%% @doc Assert that a statement is false.
-not_ok(Expr, Desc) -> mk_tap(Expr == false, Desc).
-
-%% @spec is(Got, Expected, Desc) -> Result
-%% Got = any()
-%% Expected = any()
-%% Desc = string()
-%% Result = true | false
-%% @doc Assert that two values are the same.
-is(Got, Expected, Desc) ->
- case mk_tap(Got == Expected, Desc) of
- false ->
- etap_server ! {self(), diag, " ---"},
- etap_server ! {self(), diag, io_lib:format(" description: ~p", [Desc])},
- etap_server ! {self(), diag, io_lib:format(" found: ~p", [Got])},
- etap_server ! {self(), diag, io_lib:format(" wanted: ~p", [Expected])},
- etap_server ! {self(), diag, " ..."},
- false;
- true -> true
- end.
-
-%% @spec isnt(Got, Expected, Desc) -> Result
-%% Got = any()
-%% Expected = any()
-%% Desc = string()
-%% Result = true | false
-%% @doc Assert that two values are not the same.
-isnt(Got, Expected, Desc) -> mk_tap(Got /= Expected, Desc).
-
-%% @spec is_greater(ValueA, ValueB, Desc) -> Result
-%% ValueA = number()
-%% ValueB = number()
-%% Desc = string()
-%% Result = true | false
-%% @doc Assert that an integer is greater than another.
-is_greater(ValueA, ValueB, Desc) when is_integer(ValueA), is_integer(ValueB) ->
- mk_tap(ValueA > ValueB, Desc).
-
-%% @spec any(Got, Items, Desc) -> Result
-%% Got = any()
-%% Items = [any()]
-%% Desc = string()
-%% Result = true | false
-%% @doc Assert that an item is in a list.
-any(Got, Items, Desc) ->
- is(lists:member(Got, Items), true, Desc).
-
-%% @spec none(Got, Items, Desc) -> Result
-%% Got = any()
-%% Items = [any()]
-%% Desc = string()
-%% Result = true | false
-%% @doc Assert that an item is not in a list.
-none(Got, Items, Desc) ->
- is(lists:member(Got, Items), false, Desc).
-
-%% @spec fun_is(Fun, Expected, Desc) -> Result
-%% Fun = function()
-%% Expected = any()
-%% Desc = string()
-%% Result = true | false
-%% @doc Use an anonymous function to assert a pattern match.
-fun_is(Fun, Expected, Desc) when is_function(Fun) ->
- is(Fun(Expected), true, Desc).
-
-%% @equiv skip(TestFun, "")
-skip(TestFun) when is_function(TestFun) ->
- skip(TestFun, "").
-
-%% @spec skip(TestFun, Reason) -> ok
-%% TestFun = function()
-%% Reason = string()
-%% @doc Skip a test.
-skip(TestFun, Reason) when is_function(TestFun), is_list(Reason) ->
- begin_skip(Reason),
- catch TestFun(),
- end_skip(),
- ok.
-
-%% @spec skip(Q, TestFun, Reason) -> ok
-%% Q = true | false | function()
-%% TestFun = function()
-%% Reason = string()
-%% @doc Skips a test conditionally. The first argument to this function can
-%% either be the 'true' or 'false' atoms or a function that returns 'true' or
-%% 'false'.
-skip(QFun, TestFun, Reason) when is_function(QFun), is_function(TestFun), is_list(Reason) ->
- case QFun() of
- true -> begin_skip(Reason), TestFun(), end_skip();
- _ -> TestFun()
- end,
- ok;
-
-skip(Q, TestFun, Reason) when is_function(TestFun), is_list(Reason), Q == true ->
- begin_skip(Reason),
- TestFun(),
- end_skip(),
- ok;
-
-skip(_, TestFun, Reason) when is_function(TestFun), is_list(Reason) ->
- TestFun(),
- ok.
-
-%% @private
-begin_skip(Reason) ->
- etap_server ! {self(), begin_skip, Reason}.
-
-%% @private
-end_skip() ->
- etap_server ! {self(), end_skip}.
-
-% ---
-% Internal / Private functions
-
-%% @private
-%% @doc Start the etap_server process if it is not running already.
-ensure_test_server() ->
- case whereis(etap_server) of
- undefined ->
- proc_lib:start(?MODULE, start_etap_server,[]);
- _ ->
- diag("The test server is already running.")
- end.
-
-%% @private
-%% @doc Start the etap_server loop and register itself as the etap_server
-%% process.
-start_etap_server() ->
- catch register(etap_server, self()),
- proc_lib:init_ack(ok),
- etap:test_server(#test_state{
- planned = 0,
- count = 0,
- pass = 0,
- fail = 0,
- skip = 0,
- skip_reason = ""
- }).
-
-
-%% @private
-%% @doc The main etap_server receive/run loop. The etap_server receive loop
-%% responds to seven messages apperatining to failure or passing of tests.
-%% It is also used to initiate the testing process with the {_, plan, _}
-%% message that clears the current test state.
-test_server(State) ->
- NewState = receive
- {_From, plan, unknown} ->
- io:format("# Current time local ~s~n", [datetime(erlang:localtime())]),
- io:format("# Using etap version ~p~n", [ proplists:get_value(vsn, proplists:get_value(attributes, etap:module_info())) ]),
- State#test_state{
- planned = -1,
- count = 0,
- pass = 0,
- fail = 0,
- skip = 0,
- skip_reason = ""
- };
- {_From, plan, N} ->
- io:format("# Current time local ~s~n", [datetime(erlang:localtime())]),
- io:format("# Using etap version ~p~n", [ proplists:get_value(vsn, proplists:get_value(attributes, etap:module_info())) ]),
- io:format("1..~p~n", [N]),
- State#test_state{
- planned = N,
- count = 0,
- pass = 0,
- fail = 0,
- skip = 0,
- skip_reason = ""
- };
- {_From, begin_skip, Reason} ->
- State#test_state{
- skip = 1,
- skip_reason = Reason
- };
- {_From, end_skip} ->
- State#test_state{
- skip = 0,
- skip_reason = ""
- };
- {_From, pass, Desc} ->
- FullMessage = skip_diag(
- " - " ++ Desc,
- State#test_state.skip,
- State#test_state.skip_reason
- ),
- io:format("ok ~p ~s~n", [State#test_state.count + 1, FullMessage]),
- State#test_state{
- count = State#test_state.count + 1,
- pass = State#test_state.pass + 1
- };
-
- {_From, fail, Desc} ->
- FullMessage = skip_diag(
- " - " ++ Desc,
- State#test_state.skip,
- State#test_state.skip_reason
- ),
- io:format("not ok ~p ~s~n", [State#test_state.count + 1, FullMessage]),
- State#test_state{
- count = State#test_state.count + 1,
- fail = State#test_state.fail + 1
- };
- {From, state} ->
- From ! State,
- State;
- {_From, diag, Message} ->
- io:format("~s~n", [Message]),
- State;
- {From, count} ->
- From ! State#test_state.count,
- State;
- {From, is_skip} ->
- From ! State#test_state.skip,
- State;
- done ->
- exit(normal)
- end,
- test_server(NewState).
-
-%% @private
-%% @doc Process the result of a test and send it to the etap_server process.
-mk_tap(Result, Desc) ->
- IsSkip = lib:sendw(etap_server, is_skip),
- case [IsSkip, Result] of
- [_, true] ->
- etap_server ! {self(), pass, Desc},
- true;
- [1, _] ->
- etap_server ! {self(), pass, Desc},
- true;
- _ ->
- etap_server ! {self(), fail, Desc},
- false
- end.
-
-%% @private
-%% @doc Format a date/time string.
-datetime(DateTime) ->
- {{Year, Month, Day}, {Hour, Min, Sec}} = DateTime,
- io_lib:format("~4.10.0B-~2.10.0B-~2.10.0B ~2.10.0B:~2.10.0B:~2.10.0B", [Year, Month, Day, Hour, Min, Sec]).
-
-%% @private
-%% @doc Craft an output message taking skip/todo into consideration.
-skip_diag(Message, 0, _) ->
- Message;
-skip_diag(_Message, 1, "") ->
- " # SKIP";
-skip_diag(_Message, 1, Reason) ->
- " # SKIP : " ++ Reason.
diff --git a/1.1.x/src/etap/etap_application.erl b/1.1.x/src/etap/etap_application.erl
deleted file mode 100644
index 98b52751..00000000
--- a/1.1.x/src/etap/etap_application.erl
+++ /dev/null
@@ -1,72 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @author Nick Gerakines <nick@gerakines.net> [http://socklabs.com/]
-%% @copyright 2008 Nick Gerakines
-%% @reference http://testanything.org/wiki/index.php/Main_Page
-%% @reference http://en.wikipedia.org/wiki/Test_Anything_Protocol
-%% @todo Explain in documentation why we use a process to handle test input.
-%% @todo Add test to verify the number of members in a pg2 group.
-%% @doc Provide test functionality to the application and related behaviors.
--module(etap_application).
--export([
- start_ok/2, ensure_loaded/3, load_ok/2,
- pg2_group_exists/2, pg2_group_doesntexist/2
-]).
-
-%% @spec load_ok(string(), string()) -> true | false
-%% @doc Assert that an application can be loaded successfully.
-load_ok(AppName, Desc) ->
- etap:ok(application:load(AppName) == ok, Desc).
-
-%% @spec start_ok(string(), string()) -> true | false
-%% @doc Assert that an application can be started successfully.
-start_ok(AppName, Desc) ->
- etap:ok(application:start(AppName) == ok, Desc).
-
-%% @spec ensure_loaded(string(), string(), string()) -> true | false
-%% @doc Assert that an application has been loaded successfully.
-ensure_loaded(AppName, AppVsn, Desc) ->
- etap:any(
- fun(Match) -> case Match of {AppName, _, AppVsn} -> true; _ -> false end end,
- application:loaded_applications(),
- Desc
- ).
-
-%% @spec pg2_group_exists(string(), string()) -> true | false
-%% @doc Assert that a pg2 group exists.
-pg2_group_exists(GroupName, Desc) ->
- etap:any(
- fun(Match) -> Match == GroupName end,
- pg2:which_groups(),
- Desc
- ).
-
-%% @spec pg2_group_doesntexist(string(), string()) -> true | false
-%% @doc Assert that a pg2 group does not exists.
-pg2_group_doesntexist(GroupName, Desc) ->
- etap:none(
- fun(Match) -> Match == GroupName end,
- pg2:which_groups(),
- Desc
- ).
diff --git a/1.1.x/src/etap/etap_can.erl b/1.1.x/src/etap/etap_can.erl
deleted file mode 100644
index 552b7174..00000000
--- a/1.1.x/src/etap/etap_can.erl
+++ /dev/null
@@ -1,79 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @reference http://testanything.org/wiki/index.php/Main_Page
-%% @reference http://en.wikipedia.org/wiki/Test_Anything_Protocol
-%% @doc Provide test functionality modules
--module(etap_can).
-
--export([
- loaded_ok/2, can_ok/2, can_ok/3,
- has_attrib/2, is_attrib/3, is_behaviour/2
-]).
-
-%% @spec loaded_ok(atom(), string()) -> true | false
-%% @doc Assert that a module has been loaded successfully.
-loaded_ok(M, Desc) when is_atom(M) ->
- etap:fun_is(fun({module, _}) -> true; (_) -> false end, code:load_file(M), Desc).
-
-%% @spec can_ok(atom(), atom()) -> true | false
-%% @doc Assert that a module exports a given function.
-can_ok(M, F) when is_atom(M), is_atom(F) ->
- Matches = [X || {X, _} <- M:module_info(exports), X == F],
- etap:ok(Matches > 0, lists:concat([M, " can ", F])).
-
-%% @spec can_ok(atom(), atom(), integer()) -> true | false
-%% @doc Assert that a module exports a given function with a given arity.
-can_ok(M, F, A) when is_atom(M); is_atom(F), is_number(A) ->
- Matches = [X || X <- M:module_info(exports), X == {F, A}],
- etap:ok(Matches > 0, lists:concat([M, " can ", F, "/", A])).
-
-%% @spec has_attrib(M, A) -> true | false
-%% M = atom()
-%% A = atom()
-%% @doc Asserts that a module has a given attribute.
-has_attrib(M, A) when is_atom(M), is_atom(A) ->
- etap:isnt(
- proplists:get_value(A, M:module_info(attributes), 'asdlkjasdlkads'),
- 'asdlkjasdlkads',
- lists:concat([M, " has attribute ", A])
- ).
-
-%% @spec has_attrib(M, A. V) -> true | false
-%% M = atom()
-%% A = atom()
-%% V = any()
-%% @doc Asserts that a module has a given attribute with a given value.
-is_attrib(M, A, V) when is_atom(M) andalso is_atom(A) ->
- etap:is(
- proplists:get_value(A, M:module_info(attributes)),
- [V],
- lists:concat([M, "'s ", A, " is ", V])
- ).
-
-%% @spec is_behavior(M, B) -> true | false
-%% M = atom()
-%% B = atom()
-%% @doc Asserts that a given module has a specific behavior.
-is_behaviour(M, B) when is_atom(M) andalso is_atom(B) ->
- is_attrib(M, behaviour, B).
diff --git a/1.1.x/src/etap/etap_exception.erl b/1.1.x/src/etap/etap_exception.erl
deleted file mode 100644
index ba660727..00000000
--- a/1.1.x/src/etap/etap_exception.erl
+++ /dev/null
@@ -1,66 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @reference http://testanything.org/wiki/index.php/Main_Page
-%% @reference http://en.wikipedia.org/wiki/Test_Anything_Protocol
-%% @doc Adds exception based testing to the etap suite.
--module(etap_exception).
-
--export([dies_ok/2, lives_ok/2, throws_ok/3]).
-
-% ---
-% External / Public functions
-
-%% @doc Assert that an exception is raised when running a given function.
-dies_ok(F, Desc) ->
- case (catch F()) of
- {'EXIT', _} -> etap:ok(true, Desc);
- _ -> etap:ok(false, Desc)
- end.
-
-%% @doc Assert that an exception is not raised when running a given function.
-lives_ok(F, Desc) ->
- etap:is(try_this(F), success, Desc).
-
-%% @doc Assert that the exception thrown by a function matches the given exception.
-throws_ok(F, Exception, Desc) ->
- try F() of
- _ -> etap:ok(nok, Desc)
- catch
- _:E ->
- etap:is(E, Exception, Desc)
- end.
-
-% ---
-% Internal / Private functions
-
-%% @private
-%% @doc Run a function and catch any exceptions.
-try_this(F) when is_function(F, 0) ->
- try F() of
- _ -> success
- catch
- throw:E -> {throw, E};
- error:E -> {error, E};
- exit:E -> {exit, E}
- end.
diff --git a/1.1.x/src/etap/etap_process.erl b/1.1.x/src/etap/etap_process.erl
deleted file mode 100644
index 69f5ba00..00000000
--- a/1.1.x/src/etap/etap_process.erl
+++ /dev/null
@@ -1,42 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @doc Adds process/pid testing to the etap suite.
--module(etap_process).
-
--export([is_pid/2, is_alive/2, is_mfa/3]).
-
-% ---
-% External / Public functions
-
-%% @doc Assert that a given variable is a pid.
-is_pid(Pid, Desc) when is_pid(Pid) -> etap:ok(true, Desc);
-is_pid(_, Desc) -> etap:ok(false, Desc).
-
-%% @doc Assert that a given process/pid is alive.
-is_alive(Pid, Desc) ->
- etap:ok(erlang:is_process_alive(Pid), Desc).
-
-%% @doc Assert that the current function of a pid is a given {M, F, A} tuple.
-is_mfa(Pid, MFA, Desc) ->
- etap:is({current_function, MFA}, erlang:process_info(Pid, current_function), Desc).
diff --git a/1.1.x/src/etap/etap_report.erl b/1.1.x/src/etap/etap_report.erl
deleted file mode 100644
index 6d692fb6..00000000
--- a/1.1.x/src/etap/etap_report.erl
+++ /dev/null
@@ -1,343 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @doc A module for creating nice looking code coverage reports.
--module(etap_report).
--export([create/0]).
-
-%% @spec create() -> ok
-%% @doc Create html code coverage reports for each module that code coverage
-%% data exists for.
-create() ->
- [cover:import(File) || File <- filelib:wildcard("cover/*.coverdata")],
- Modules = lists:foldl(
- fun(Module, Acc) ->
- [{Module, file_report(Module)} | Acc]
- end,
- [],
- cover:imported_modules()
- ),
- index(Modules).
-
-%% @private
-index(Modules) ->
- {ok, IndexFD} = file:open("cover/index.html", [write]),
- io:format(IndexFD, "<html><head><style>
- table.percent_graph { height: 12px; border:1px solid #E2E6EF; empty-cells: show; }
- table.percent_graph td.covered { height: 10px; background: #00f000; }
- table.percent_graph td.uncovered { height: 10px; background: #e00000; }
- .odd { background-color: #ddd; }
- .even { background-color: #fff; }
- </style></head>", []),
- io:format(IndexFD, "<body>", []),
- lists:foldl(
- fun({Module, {Good, Bad, Source}}, LastRow) ->
- case {Good + Bad, Source} of
- {0, _} -> LastRow;
- {_, none} -> LastRow;
- _ ->
- CovPer = round((Good / (Good + Bad)) * 100),
- UnCovPer = round((Bad / (Good + Bad)) * 100),
- RowClass = case LastRow of 1 -> "odd"; _ -> "even" end,
- io:format(IndexFD, "<div class=\"~s\">", [RowClass]),
- io:format(IndexFD, "<a href=\"~s\">~s</a>", [atom_to_list(Module) ++ "_report.html", atom_to_list(Module)]),
- io:format(IndexFD, "
- <table cellspacing='0' cellpadding='0' align='right'>
- <tr>
- <td><tt>~p%</tt>&nbsp;</td><td>
- <table cellspacing='0' class='percent_graph' cellpadding='0' width='100'>
- <tr><td class='covered' width='~p' /><td class='uncovered' width='~p' /></tr>
- </table>
- </td>
- </tr>
- </table>
- ", [CovPer, CovPer, UnCovPer]),
- io:format(IndexFD, "</div>", []),
- case LastRow of
- 1 -> 0;
- 0 -> 1
- end
- end
- end,
- 0,
- lists:sort(Modules)
- ),
- {TotalGood, TotalBad} = lists:foldl(
- fun({_, {Good, Bad, Source}}, {TGood, TBad}) ->
- case Source of none -> {TGood, TBad}; _ -> {TGood + Good, TBad + Bad} end
- end,
- {0, 0},
- Modules
- ),
- io:format(IndexFD, "<p>Generated on ~s.</p>~n", [etap:datetime({date(), time()})]),
- case TotalGood + TotalBad of
- 0 -> ok;
- _ ->
- TotalCovPer = round((TotalGood / (TotalGood + TotalBad)) * 100),
- TotalUnCovPer = round((TotalBad / (TotalGood + TotalBad)) * 100),
- io:format(IndexFD, "<div>", []),
- io:format(IndexFD, "Total
- <table cellspacing='0' cellpadding='0' align='right'>
- <tr>
- <td><tt>~p%</tt>&nbsp;</td><td>
- <table cellspacing='0' class='percent_graph' cellpadding='0' width='100'>
- <tr><td class='covered' width='~p' /><td class='uncovered' width='~p' /></tr>
- </table>
- </td>
- </tr>
- </table>
- ", [TotalCovPer, TotalCovPer, TotalUnCovPer]),
- io:format(IndexFD, "</div>", [])
- end,
- io:format(IndexFD, "</body></html>", []),
- file:close(IndexFD),
- ok.
-
-%% @private
-file_report(Module) ->
- {ok, Data} = cover:analyse(Module, calls, line),
- Source = find_source(Module),
- {Good, Bad} = collect_coverage(Data, {0, 0}),
- case {Source, Good + Bad} of
- {none, _} -> ok;
- {_, 0} -> ok;
- _ ->
- {ok, SourceFD} = file:open(Source, [read]),
- {ok, WriteFD} = file:open("cover/" ++ atom_to_list(Module) ++ "_report.html", [write]),
- io:format(WriteFD, "~s", [header(Module, Good, Bad)]),
- output_lines(Data, WriteFD, SourceFD, 1),
- io:format(WriteFD, "~s", [footer()]),
- file:close(WriteFD),
- file:close(SourceFD),
- ok
- end,
- {Good, Bad, Source}.
-
-%% @private
-collect_coverage([], Acc) -> Acc;
-collect_coverage([{{_, _}, 0} | Data], {Good, Bad}) ->
- collect_coverage(Data, {Good, Bad + 1});
-collect_coverage([_ | Data], {Good, Bad}) ->
- collect_coverage(Data, {Good + 1, Bad}).
-
-%% @private
-output_lines(Data, WriteFD, SourceFD, LineNumber) ->
- {Match, NextData} = datas_match(Data, LineNumber),
- case io:get_line(SourceFD, '') of
- eof -> ok;
- Line = "%% @todo" ++ _ ->
- io:format(WriteFD, "~s", [out_line(LineNumber, highlight, Line)]),
- output_lines(NextData, WriteFD, SourceFD, LineNumber + 1);
- Line = "% " ++ _ ->
- io:format(WriteFD, "~s", [out_line(LineNumber, none, Line)]),
- output_lines(NextData, WriteFD, SourceFD, LineNumber + 1);
- Line ->
- case Match of
- {true, CC} ->
- io:format(WriteFD, "~s", [out_line(LineNumber, CC, Line)]),
- output_lines(NextData, WriteFD, SourceFD, LineNumber + 1);
- false ->
- io:format(WriteFD, "~s", [out_line(LineNumber, none, Line)]),
- output_lines(NextData, WriteFD, SourceFD, LineNumber + 1)
- end
- end.
-
-%% @private
-out_line(Number, none, Line) ->
- PadNu = string:right(integer_to_list(Number), 5, $.),
- io_lib:format("<span class=\"marked\"><a name=\"line~p\"></a>~s ~s</span>", [Number, PadNu, Line]);
-out_line(Number, highlight, Line) ->
- PadNu = string:right(integer_to_list(Number), 5, $.),
- io_lib:format("<span class=\"highlight\"><a name=\"line~p\"></a>~s ~s</span>", [Number, PadNu, Line]);
-out_line(Number, 0, Line) ->
- PadNu = string:right(integer_to_list(Number), 5, $.),
- io_lib:format("<span class=\"uncovered\"><a name=\"line~p\"></a>~s ~s</span>", [Number, PadNu, Line]);
-out_line(Number, _, Line) ->
- PadNu = string:right(integer_to_list(Number), 5, $.),
- io_lib:format("<span class=\"covered\"><a name=\"line~p\"></a>~s ~s</span>", [Number, PadNu, Line]).
-
-%% @private
-datas_match([], _) -> {false, []};
-datas_match([{{_, Line}, CC} | Datas], LineNumber) when Line == LineNumber -> {{true, CC}, Datas};
-datas_match(Data, _) -> {false, Data}.
-
-%% @private
-find_source(Module) when is_atom(Module) ->
- Root = filename:rootname(Module),
- Dir = filename:dirname(Root),
- XDir = case os:getenv("SRC") of false -> "src"; X -> X end,
- find_source([
- filename:join([Dir, Root ++ ".erl"]),
- filename:join([Dir, "..", "src", Root ++ ".erl"]),
- filename:join([Dir, "src", Root ++ ".erl"]),
- filename:join([Dir, "elibs", Root ++ ".erl"]),
- filename:join([Dir, "..", "elibs", Root ++ ".erl"]),
- filename:join([Dir, XDir, Root ++ ".erl"])
- ]);
-find_source([]) -> none;
-find_source([Test | Tests]) ->
- case filelib:is_file(Test) of
- true -> Test;
- false -> find_source(Tests)
- end.
-
-%% @private
-header(Module, Good, Bad) ->
- io:format("Good ~p~n", [Good]),
- io:format("Bad ~p~n", [Bad]),
- CovPer = round((Good / (Good + Bad)) * 100),
- UnCovPer = round((Bad / (Good + Bad)) * 100),
- io:format("CovPer ~p~n", [CovPer]),
- io_lib:format("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">
- <html lang='en' xml:lang='en' xmlns='http://www.w3.org/1999/xhtml'>
- <head>
- <title>~s - C0 code coverage information</title>
- <style type='text/css'>body { background-color: rgb(240, 240, 245); }</style>
- <style type='text/css'>span.marked0 {
- background-color: rgb(185, 210, 200);
- display: block;
- }
- span.marked { display: block; background-color: #ffffff; }
- span.highlight { display: block; background-color: #fff9d7; }
- span.covered { display: block; background-color: #f7f7f7 ; }
- span.uncovered { display: block; background-color: #ffebe8 ; }
- span.overview {
- border-bottom: 1px solid #E2E6EF;
- }
- div.overview {
- border-bottom: 1px solid #E2E6EF;
- }
- body {
- font-family: verdana, arial, helvetica;
- }
- div.footer {
- font-size: 68%;
- margin-top: 1.5em;
- }
- h1, h2, h3, h4, h5, h6 {
- margin-bottom: 0.5em;
- }
- h5 {
- margin-top: 0.5em;
- }
- .hidden {
- display: none;
- }
- div.separator {
- height: 10px;
- }
- table.percent_graph {
- height: 12px;
- border: 1px solid #E2E6EF;
- empty-cells: show;
- }
- table.percent_graph td.covered {
- height: 10px;
- background: #00f000;
- }
- table.percent_graph td.uncovered {
- height: 10px;
- background: #e00000;
- }
- table.percent_graph td.NA {
- height: 10px;
- background: #eaeaea;
- }
- table.report {
- border-collapse: collapse;
- width: 100%;
- }
- table.report td.heading {
- background: #dcecff;
- border: 1px solid #E2E6EF;
- font-weight: bold;
- text-align: center;
- }
- table.report td.heading:hover {
- background: #c0ffc0;
- }
- table.report td.text {
- border: 1px solid #E2E6EF;
- }
- table.report td.value {
- text-align: right;
- border: 1px solid #E2E6EF;
- }
- table.report tr.light {
- background-color: rgb(240, 240, 245);
- }
- table.report tr.dark {
- background-color: rgb(230, 230, 235);
- }
- </style>
- </head>
- <body>
- <h3>C0 code coverage information</h3>
- <p>Generated on ~s with <a href='http://github.com/ngerakines/etap'>etap 0.3.4</a>.
- </p>
- <table class='report'>
- <thead>
- <tr>
- <td class='heading'>Name</td>
- <td class='heading'>Total lines</td>
- <td class='heading'>Lines of code</td>
- <td class='heading'>Total coverage</td>
- <td class='heading'>Code coverage</td>
- </tr>
- </thead>
- <tbody>
- <tr class='light'>
-
- <td>
- <a href='~s'>~s</a>
- </td>
- <td class='value'>
- <tt>??</tt>
- </td>
- <td class='value'>
- <tt>??</tt>
- </td>
- <td class='value'>
- <tt>??</tt>
- </td>
- <td>
- <table cellspacing='0' cellpadding='0' align='right'>
- <tr>
- <td><tt>~p%</tt>&nbsp;</td><td>
- <table cellspacing='0' class='percent_graph' cellpadding='0' width='100'>
- <tr><td class='covered' width='~p' /><td class='uncovered' width='~p' /></tr>
- </table>
- </td>
- </tr>
- </table>
- </td>
- </tr>
- </tbody>
- </table><pre>", [Module, etap:datetime({date(), time()}), atom_to_list(Module) ++ "_report.html", Module, CovPer, CovPer, UnCovPer]).
-
-%% @private
-footer() ->
- "</pre><hr /><p>Generated using <a href='http://github.com/ngerakines/etap'>etap 0.3.4</a>.</p>
- </body>
- </html>
- ".
diff --git a/1.1.x/src/etap/etap_request.erl b/1.1.x/src/etap/etap_request.erl
deleted file mode 100644
index 9fd23aca..00000000
--- a/1.1.x/src/etap/etap_request.erl
+++ /dev/null
@@ -1,89 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @doc Provides test functionality against a specific web request. Many of
-%% the exported methods can be used to build your own more complex tests.
--module(etap_request, [Method, Url, InHeaders, InBody, Status, OutHeaders, OutBody]).
-
--export([status_is/2]).
-
--export([
- method/0, url/0, status/0, status_code/0, status_line/0, rheaders/0,
- has_rheader/1, rheader/1, rbody/0, header_is/3, body_is/2,
- body_has_string/2
-]).
-
-% ---
-% Tests
-
-%% @doc Assert that response status code is the given status code.
-status_is(Code, Desc) ->
- etap:is(status_code(), Code, Desc).
-
-header_is(Name, Value, Desc) ->
- etap:is(rheader(Name), Value, Desc).
-
-body_is(Value, Desc) ->
- etap:is(rbody(), Value, Desc).
-
-body_has_string(String, Desc) when is_list(OutBody), is_list(String) ->
- etap_string:contains_ok(OutBody, String, Desc).
-
-% ---
-% Accessor functions
-
-%% @doc Access a request's method.
-method() -> Method.
-
-%% @doc Access a request's URL.
-url() -> Url.
-
-%% @doc Access a request's status.
-status() -> Status.
-
-%% @doc Access a request's status code.
-status_code() ->
- {_, Code, _} = Status,
- Code.
-
-%% @doc Access a request's status line.
-status_line() ->
- {_, _, Line} = Status,
- Line.
-
-%% @doc Access a request's headers.
-rheaders() -> OutHeaders.
-
-%% @doc Dertermine if a specific request header exists.
-has_rheader(Key) ->
- lists:keymember(Key, 1, OutHeaders).
-
-%% @doc Return a specific request header.
-rheader(Key) ->
- case lists:keysearch(Key, 1, OutHeaders) of
- false -> undefined;
- {value, {Key, Value}} -> Value
- end.
-
-%% @doc Access the request's body.
-rbody() -> OutBody.
diff --git a/1.1.x/src/etap/etap_string.erl b/1.1.x/src/etap/etap_string.erl
deleted file mode 100644
index 67aa3d54..00000000
--- a/1.1.x/src/etap/etap_string.erl
+++ /dev/null
@@ -1,47 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @author Nick Gerakines <nick@gerakines.net> [http://socklabs.com/]
-%% @copyright 2008 Nick Gerakines
-%% @doc Provide testing functionality for strings.
--module(etap_string).
-
--export([contains_ok/3, is_before/4]).
-
-%% @spec contains_ok(string(), string(), string()) -> true | false
-%% @doc Assert that a string is contained in another string.
-contains_ok(Source, String, Desc) ->
- etap:isnt(
- string:str(Source, String),
- 0,
- Desc
- ).
-
-%% @spec is_before(string(), string(), string(), string()) -> true | false
-%% @doc Assert that a string comes before another string within a larger body.
-is_before(Source, StringA, StringB, Desc) ->
- etap:is_greater(
- string:str(Source, StringB),
- string:str(Source, StringA),
- Desc
- ).
diff --git a/1.1.x/src/etap/etap_web.erl b/1.1.x/src/etap/etap_web.erl
deleted file mode 100644
index fb7aee16..00000000
--- a/1.1.x/src/etap/etap_web.erl
+++ /dev/null
@@ -1,65 +0,0 @@
-%% Copyright (c) 2008-2009 Nick Gerakines <nick@gerakines.net>
-%%
-%% Permission is hereby granted, free of charge, to any person
-%% obtaining a copy of this software and associated documentation
-%% files (the "Software"), to deal in the Software without
-%% restriction, including without limitation the rights to use,
-%% copy, modify, merge, publish, distribute, sublicense, and/or sell
-%% copies of the Software, and to permit persons to whom the
-%% Software is furnished to do so, subject to the following
-%% conditions:
-%%
-%% The above copyright notice and this permission notice shall be
-%% included in all copies or substantial portions of the Software.
-%%
-%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-%% OTHER DEALINGS IN THE SOFTWARE.
-%%
-%% @author Nick Gerakines <nick@gerakines.net> [http://socklabs.com/]
-%% @copyright 2008 Nick Gerakines
-%% @todo Support cookies.
-%% @doc Provide testing functionality for web requests.
--module(etap_web).
-
--export([simple_200/2, simple_404/2, build_request/4]).
-
-%% @doc Fetch a url and verify that it returned a 200 status.
-simple_200(Url, Desc) ->
- Request = build_request(get, Url, [], []),
- Request:status_is(200, Desc).
-
-%% @doc Fetch a url and verify that it returned a 404 status.
-simple_404(Url, Desc) ->
- Request = build_request(get, Url, [], []),
- Request:status_is(404, Desc).
-
-%% @doc Create and return a request structure.
-build_request(Method, Url, Headers, Body)
- when Method==options;Method==get;Method==head;Method==delete;Method==trace ->
- try http:request(Method, {Url, Headers}, [{autoredirect, false}], []) of
- {ok, {OutStatus, OutHeaders, OutBody}} ->
- etap_request:new(Method, Url, Headers, Body, OutStatus, OutHeaders, OutBody);
- _ -> error
- catch
- _:_ -> error
- end;
-
-%% @doc Create and return a request structure.
-build_request(Method, Url, Headers, Body) when Method == post; Method == put ->
- ContentType = case lists:keysearch("Content-Type", 1, Headers) of
- {value, {"Content-Type", X}} -> X;
- _ -> []
- end,
- try http:request(Method, {Url, Headers, ContentType, Body}, [{autoredirect, false}], []) of
- {ok, {OutStatus, OutHeaders, OutBody}} ->
- etap_request:new(Method, Url, Headers, Body, OutStatus, OutHeaders, OutBody);
- _ -> error
- catch
- _:_ -> error
- end.
diff --git a/1.1.x/src/ibrowse/Makefile.am b/1.1.x/src/ibrowse/Makefile.am
deleted file mode 100644
index 869bd107..00000000
--- a/1.1.x/src/ibrowse/Makefile.am
+++ /dev/null
@@ -1,49 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-## http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-ibrowseebindir = $(localerlanglibdir)/ibrowse-2.2.0/ebin
-
-ibrowse_file_collection = \
- ibrowse.app.in \
- ibrowse.erl \
- ibrowse_app.erl \
- ibrowse_http_client.erl \
- ibrowse_lb.erl \
- ibrowse_lib.erl \
- ibrowse_sup.erl \
- ibrowse_test.erl
-
-ibrowseebin_make_generated_file_list = \
- ibrowse.app \
- ibrowse.beam \
- ibrowse_app.beam \
- ibrowse_http_client.beam \
- ibrowse_lb.beam \
- ibrowse_lib.beam \
- ibrowse_sup.beam \
- ibrowse_test.beam
-
-ibrowseebin_DATA = \
- $(ibrowseebin_make_generated_file_list)
-
-EXTRA_DIST = \
- $(ibrowse_file_collection) \
- ibrowse.hrl
-
-CLEANFILES = \
- $(ibrowseebin_make_generated_file_list)
-
-%.app: %.app.in
- cp $< $@
-
-%.beam: %.erl
- $(ERLC) $(ERLC_FLAGS) $<
diff --git a/1.1.x/src/ibrowse/ibrowse.app.in b/1.1.x/src/ibrowse/ibrowse.app.in
deleted file mode 100644
index af46d8a5..00000000
--- a/1.1.x/src/ibrowse/ibrowse.app.in
+++ /dev/null
@@ -1,13 +0,0 @@
-{application, ibrowse,
- [{description, "HTTP client application"},
- {vsn, "2.2.0"},
- {modules, [ ibrowse,
- ibrowse_http_client,
- ibrowse_app,
- ibrowse_sup,
- ibrowse_lib,
- ibrowse_lb ]},
- {registered, []},
- {applications, [kernel,stdlib,sasl]},
- {env, []},
- {mod, {ibrowse_app, []}}]}.
diff --git a/1.1.x/src/ibrowse/ibrowse.erl b/1.1.x/src/ibrowse/ibrowse.erl
deleted file mode 100644
index f70f92f1..00000000
--- a/1.1.x/src/ibrowse/ibrowse.erl
+++ /dev/null
@@ -1,863 +0,0 @@
-%%%-------------------------------------------------------------------
-%%% File : ibrowse.erl
-%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description : Load balancer process for HTTP client connections.
-%%%
-%%% Created : 11 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%%-------------------------------------------------------------------
-%% @author Chandrashekhar Mullaparthi <chandrashekhar dot mullaparthi at gmail dot com>
-%% @copyright 2005-2011 Chandrashekhar Mullaparthi
-%% @version 2.1.3
-%% @doc The ibrowse application implements an HTTP 1.1 client in erlang. This
-%% module implements the API of the HTTP client. There is one named
-%% process called 'ibrowse' which assists in load balancing and maintaining configuration. There is one load balancing process per unique webserver. There is
-%% one process to handle one TCP connection to a webserver
-%% (implemented in the module ibrowse_http_client). Multiple connections to a
-%% webserver are setup based on the settings for each webserver. The
-%% ibrowse process also determines which connection to pipeline a
-%% certain request on. The functions to call are send_req/3,
-%% send_req/4, send_req/5, send_req/6.
-%%
-%% <p>Here are a few sample invocations.</p>
-%%
-%% <code>
-%% ibrowse:send_req("http://intranet/messenger/", [], get).
-%% <br/><br/>
-%%
-%% ibrowse:send_req("http://www.google.com/", [], get, [],
-%% [{proxy_user, "XXXXX"},
-%% {proxy_password, "XXXXX"},
-%% {proxy_host, "proxy"},
-%% {proxy_port, 8080}], 1000).
-%% <br/><br/>
-%%
-%%ibrowse:send_req("http://www.erlang.org/download/otp_src_R10B-3.tar.gz", [], get, [],
-%% [{proxy_user, "XXXXX"},
-%% {proxy_password, "XXXXX"},
-%% {proxy_host, "proxy"},
-%% {proxy_port, 8080},
-%% {save_response_to_file, true}], 1000).
-%% <br/><br/>
-%%
-%% ibrowse:send_req("http://www.erlang.org", [], head).
-%%
-%% <br/><br/>
-%% ibrowse:send_req("http://www.sun.com", [], options).
-%%
-%% <br/><br/>
-%% ibrowse:send_req("http://www.bbc.co.uk", [], trace).
-%%
-%% <br/><br/>
-%% ibrowse:send_req("http://www.google.com", [], get, [],
-%% [{stream_to, self()}]).
-%% </code>
-%%
-
--module(ibrowse).
--behaviour(gen_server).
-%%--------------------------------------------------------------------
-%% Include files
-%%--------------------------------------------------------------------
-
-%%--------------------------------------------------------------------
-%% External exports
--export([start_link/0, start/0, stop/0]).
-
-%% gen_server callbacks
--export([init/1, handle_call/3, handle_cast/2, handle_info/2,
- terminate/2, code_change/3]).
-
-%% API interface
--export([
- rescan_config/0,
- rescan_config/1,
- get_config_value/1,
- get_config_value/2,
- spawn_worker_process/1,
- spawn_worker_process/2,
- spawn_link_worker_process/1,
- spawn_link_worker_process/2,
- stop_worker_process/1,
- send_req/3,
- send_req/4,
- send_req/5,
- send_req/6,
- send_req_direct/4,
- send_req_direct/5,
- send_req_direct/6,
- send_req_direct/7,
- stream_next/1,
- stream_close/1,
- set_max_sessions/3,
- set_max_pipeline_size/3,
- set_dest/3,
- trace_on/0,
- trace_off/0,
- trace_on/2,
- trace_off/2,
- all_trace_off/0,
- show_dest_status/0,
- show_dest_status/2
- ]).
-
--ifdef(debug).
--compile(export_all).
--endif.
-
--import(ibrowse_lib, [
- parse_url/1,
- get_value/3,
- do_trace/2
- ]).
-
--record(state, {trace = false}).
-
--include("ibrowse.hrl").
--include_lib("stdlib/include/ms_transform.hrl").
-
--define(DEF_MAX_SESSIONS,10).
--define(DEF_MAX_PIPELINE_SIZE,10).
-
-%%====================================================================
-%% External functions
-%%====================================================================
-%%--------------------------------------------------------------------
-%% Function: start_link/0
-%% Description: Starts the server
-%%--------------------------------------------------------------------
-%% @doc Starts the ibrowse process linked to the calling process. Usually invoked by the supervisor ibrowse_sup
-%% @spec start_link() -> {ok, pid()}
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-%% @doc Starts the ibrowse process without linking. Useful when testing using the shell
-start() ->
- gen_server:start({local, ?MODULE}, ?MODULE, [], [{debug, []}]).
-
-%% @doc Stop the ibrowse process. Useful when testing using the shell.
-stop() ->
- catch gen_server:call(ibrowse, stop).
-
-%% @doc This is the basic function to send a HTTP request.
-%% The Status return value indicates the HTTP status code returned by the webserver
-%% @spec send_req(Url::string(), Headers::headerList(), Method::method()) -> response()
-%% headerList() = [{header(), value()}]
-%% header() = atom() | string()
-%% value() = term()
-%% method() = get | post | head | options | put | delete | trace | mkcol | propfind | proppatch | lock | unlock | move | copy
-%% Status = string()
-%% ResponseHeaders = [respHeader()]
-%% respHeader() = {headerName(), headerValue()}
-%% headerName() = string()
-%% headerValue() = string()
-%% response() = {ok, Status, ResponseHeaders, ResponseBody} | {ibrowse_req_id, req_id() } | {error, Reason}
-%% req_id() = term()
-%% ResponseBody = string() | {file, Filename}
-%% Reason = term()
-send_req(Url, Headers, Method) ->
- send_req(Url, Headers, Method, [], []).
-
-%% @doc Same as send_req/3.
-%% If a list is specified for the body it has to be a flat list. The body can also be a fun/0 or a fun/1. <br/>
-%% If fun/0, the connection handling process will repeatdely call the fun until it returns an error or eof. <pre>Fun() = {ok, Data} | eof</pre><br/>
-%% If fun/1, the connection handling process will repeatedly call the fun with the supplied state until it returns an error or eof. <pre>Fun(State) = {ok, Data} | {ok, Data, NewState} | eof</pre>
-%% @spec send_req(Url, Headers, Method::method(), Body::body()) -> response()
-%% body() = [] | string() | binary() | fun_arity_0() | {fun_arity_1(), initial_state()}
-%% initial_state() = term()
-send_req(Url, Headers, Method, Body) ->
- send_req(Url, Headers, Method, Body, []).
-
-%% @doc Same as send_req/4.
-%% For a description of SSL Options, look in the <a href="http://www.erlang.org/doc/apps/ssl/index.html">ssl</a> manpage. If the
-%% HTTP Version to use is not specified, the default is 1.1.
-%% <br/>
-%% <ul>
-%% <li>The <code>host_header</code> option is useful in the case where ibrowse is
-%% connecting to a component such as <a
-%% href="http://www.stunnel.org">stunnel</a> which then sets up a
-%% secure connection to a webserver. In this case, the URL supplied to
-%% ibrowse must have the stunnel host/port details, but that won't
-%% make sense to the destination webserver. This option can then be
-%% used to specify what should go in the <code>Host</code> header in
-%% the request.</li>
-%% <li>The <code>stream_to</code> option can be used to have the HTTP
-%% response streamed to a process as messages as data arrives on the
-%% socket. If the calling process wishes to control the rate at which
-%% data is received from the server, the option <code>{stream_to,
-%% {process(), once}}</code> can be specified. The calling process
-%% will have to invoke <code>ibrowse:stream_next(Request_id)</code> to
-%% receive the next packet.</li>
-%%
-%% <li>When both the options <code>save_response_to_file</code> and <code>stream_to</code>
-%% are specified, the former takes precedence.</li>
-%%
-%% <li>For the <code>save_response_to_file</code> option, the response body is saved to
-%% file only if the status code is in the 200-299 range. If not, the response body is returned
-%% as a string.</li>
-%% <li>Whenever an error occurs in the processing of a request, ibrowse will return as much
-%% information as it has, such as HTTP Status Code and HTTP Headers. When this happens, the response
-%% is of the form <code>{error, {Reason, {stat_code, StatusCode}, HTTP_headers}}</code></li>
-%%
-%% <li>The <code>inactivity_timeout</code> option is useful when
-%% dealing with large response bodies and/or slow links. In these
-%% cases, it might be hard to estimate how long a request will take to
-%% complete. In such cases, the client might want to timeout if no
-%% data has been received on the link for a certain time interval.
-%%
-%% This value is also used to close connections which are not in use for
-%% the specified timeout value.
-%% </li>
-%%
-%% <li>
-%% The <code>connect_timeout</code> option is to specify how long the
-%% client process should wait for connection establishment. This is
-%% useful in scenarios where connections to servers are usually setup
-%% very fast, but responses might take much longer compared to
-%% connection setup. In such cases, it is better for the calling
-%% process to timeout faster if there is a problem (DNS lookup
-%% delays/failures, network routing issues, etc). The total timeout
-%% value specified for the request will enforced. To illustrate using
-%% an example:
-%% <code>
-%% ibrowse:send_req("http://www.example.com/cgi-bin/request", [], get, [], [{connect_timeout, 100}], 1000).
-%% </code>
-%% In the above invocation, if the connection isn't established within
-%% 100 milliseconds, the request will fail with
-%% <code>{error, conn_failed}</code>.<br/>
-%% If connection setup succeeds, the total time allowed for the
-%% request to complete will be 1000 milliseconds minus the time taken
-%% for connection setup.
-%% </li>
-%%
-%% <li> The <code>socket_options</code> option can be used to set
-%% specific options on the socket. The <code>{active, true | false | once}</code>
-%% and <code>{packet_type, Packet_type}</code> will be filtered out by ibrowse. </li>
-%%
-%% <li> The <code>headers_as_is</code> option is to enable the caller
-%% to send headers exactly as specified in the request without ibrowse
-%% adding some of its own. Required for some picky servers apparently. </li>
-%%
-%% <li>The <code>give_raw_headers</code> option is to enable the
-%% caller to get access to the raw status line and raw unparsed
-%% headers. Not quite sure why someone would want this, but one of my
-%% users asked for it, so here it is. </li>
-%%
-%% <li> The <code>preserve_chunked_encoding</code> option enables the caller
-%% to receive the raw data stream when the Transfer-Encoding of the server
-%% response is Chunked.
-%% </li>
-%% </ul>
-%%
-%% @spec send_req(Url::string(), Headers::headerList(), Method::method(), Body::body(), Options::optionList()) -> response()
-%% optionList() = [option()]
-%% option() = {max_sessions, integer()} |
-%% {response_format,response_format()}|
-%% {stream_chunk_size, integer()} |
-%% {max_pipeline_size, integer()} |
-%% {trace, boolean()} |
-%% {is_ssl, boolean()} |
-%% {ssl_options, [SSLOpt]} |
-%% {pool_name, atom()} |
-%% {proxy_host, string()} |
-%% {proxy_port, integer()} |
-%% {proxy_user, string()} |
-%% {proxy_password, string()} |
-%% {use_absolute_uri, boolean()} |
-%% {basic_auth, {username(), password()}} |
-%% {cookie, string()} |
-%% {content_length, integer()} |
-%% {content_type, string()} |
-%% {save_response_to_file, srtf()} |
-%% {stream_to, stream_to()} |
-%% {http_vsn, {MajorVsn, MinorVsn}} |
-%% {host_header, string()} |
-%% {inactivity_timeout, integer()} |
-%% {connect_timeout, integer()} |
-%% {socket_options, Sock_opts} |
-%% {transfer_encoding, {chunked, ChunkSize}} |
-%% {headers_as_is, boolean()} |
-%% {give_raw_headers, boolean()} |
-%% {preserve_chunked_encoding,boolean()}
-%%
-%% stream_to() = process() | {process(), once}
-%% process() = pid() | atom()
-%% username() = string()
-%% password() = string()
-%% SSLOpt = term()
-%% Sock_opts = [Sock_opt]
-%% Sock_opt = term()
-%% ChunkSize = integer()
-%% srtf() = boolean() | filename()
-%% filename() = string()
-%% response_format() = list | binary
-send_req(Url, Headers, Method, Body, Options) ->
- send_req(Url, Headers, Method, Body, Options, 30000).
-
-%% @doc Same as send_req/5.
-%% All timeout values are in milliseconds.
-%% @spec send_req(Url, Headers::headerList(), Method::method(), Body::body(), Options::optionList(), Timeout) -> response()
-%% Timeout = integer() | infinity
-send_req(Url, Headers, Method, Body, Options, Timeout) ->
- case catch parse_url(Url) of
- #url{host = Host,
- port = Port,
- protocol = Protocol} = Parsed_url ->
- Lb_pid = case ets:lookup(ibrowse_lb, {Host, Port}) of
- [] ->
- get_lb_pid(Parsed_url);
- [#lb_pid{pid = Lb_pid_1}] ->
- Lb_pid_1
- end,
- Max_sessions = get_max_sessions(Host, Port, Options),
- Max_pipeline_size = get_max_pipeline_size(Host, Port, Options),
- Options_1 = merge_options(Host, Port, Options),
- {SSLOptions, IsSSL} =
- case (Protocol == https) orelse
- get_value(is_ssl, Options_1, false) of
- false -> {[], false};
- true -> {get_value(ssl_options, Options_1, []), true}
- end,
- try_routing_request(Lb_pid, Parsed_url,
- Max_sessions,
- Max_pipeline_size,
- {SSLOptions, IsSSL},
- Headers, Method, Body, Options_1, Timeout, 0);
- Err ->
- {error, {url_parsing_failed, Err}}
- end.
-
-try_routing_request(Lb_pid, Parsed_url,
- Max_sessions,
- Max_pipeline_size,
- {SSLOptions, IsSSL},
- Headers, Method, Body, Options_1, Timeout, Try_count) when Try_count < 3 ->
- case ibrowse_lb:spawn_connection(Lb_pid, Parsed_url,
- Max_sessions,
- Max_pipeline_size,
- {SSLOptions, IsSSL}) of
- {ok, Conn_Pid} ->
- case do_send_req(Conn_Pid, Parsed_url, Headers,
- Method, Body, Options_1, Timeout) of
- {error, sel_conn_closed} ->
- try_routing_request(Lb_pid, Parsed_url,
- Max_sessions,
- Max_pipeline_size,
- {SSLOptions, IsSSL},
- Headers, Method, Body, Options_1, Timeout, Try_count + 1);
- Res ->
- Res
- end;
- Err ->
- Err
- end;
-try_routing_request(_, _, _, _, _, _, _, _, _, _, _) ->
- {error, retry_later}.
-
-merge_options(Host, Port, Options) ->
- Config_options = get_config_value({options, Host, Port}, []),
- lists:foldl(
- fun({Key, Val}, Acc) ->
- case lists:keysearch(Key, 1, Options) of
- false ->
- [{Key, Val} | Acc];
- _ ->
- Acc
- end
- end, Options, Config_options).
-
-get_lb_pid(Url) ->
- gen_server:call(?MODULE, {get_lb_pid, Url}).
-
-get_max_sessions(Host, Port, Options) ->
- get_value(max_sessions, Options,
- get_config_value({max_sessions, Host, Port},
- default_max_sessions())).
-
-get_max_pipeline_size(Host, Port, Options) ->
- get_value(max_pipeline_size, Options,
- get_config_value({max_pipeline_size, Host, Port},
- default_max_pipeline_size())).
-
-default_max_sessions() ->
- safe_get_env(ibrowse, default_max_sessions, ?DEF_MAX_SESSIONS).
-
-default_max_pipeline_size() ->
- safe_get_env(ibrowse, default_max_pipeline_size, ?DEF_MAX_PIPELINE_SIZE).
-
-safe_get_env(App, Key, Def_val) ->
- case application:get_env(App, Key) of
- undefined ->
- Def_val;
- {ok, Val} ->
- Val
- end.
-
-%% @doc Deprecated. Use set_max_sessions/3 and set_max_pipeline_size/3
-%% for achieving the same effect.
-set_dest(Host, Port, [{max_sessions, Max} | T]) ->
- set_max_sessions(Host, Port, Max),
- set_dest(Host, Port, T);
-set_dest(Host, Port, [{max_pipeline_size, Max} | T]) ->
- set_max_pipeline_size(Host, Port, Max),
- set_dest(Host, Port, T);
-set_dest(Host, Port, [{trace, Bool} | T]) when Bool == true; Bool == false ->
- ibrowse ! {trace, true, Host, Port},
- set_dest(Host, Port, T);
-set_dest(_Host, _Port, [H | _]) ->
- exit({invalid_option, H});
-set_dest(_, _, []) ->
- ok.
-
-%% @doc Set the maximum number of connections allowed to a specific Host:Port.
-%% @spec set_max_sessions(Host::string(), Port::integer(), Max::integer()) -> ok
-set_max_sessions(Host, Port, Max) when is_integer(Max), Max > 0 ->
- gen_server:call(?MODULE, {set_config_value, {max_sessions, Host, Port}, Max}).
-
-%% @doc Set the maximum pipeline size for each connection to a specific Host:Port.
-%% @spec set_max_pipeline_size(Host::string(), Port::integer(), Max::integer()) -> ok
-set_max_pipeline_size(Host, Port, Max) when is_integer(Max), Max > 0 ->
- gen_server:call(?MODULE, {set_config_value, {max_pipeline_size, Host, Port}, Max}).
-
-do_send_req(Conn_Pid, Parsed_url, Headers, Method, Body, Options, Timeout) ->
- case catch ibrowse_http_client:send_req(Conn_Pid, Parsed_url,
- Headers, Method, ensure_bin(Body),
- Options, Timeout) of
- {'EXIT', {timeout, _}} ->
- {error, req_timedout};
- {'EXIT', {noproc, {gen_server, call, [Conn_Pid, _, _]}}} ->
- {error, sel_conn_closed};
- {error, connection_closed} ->
- {error, sel_conn_closed};
- {'EXIT', Reason} ->
- {error, {'EXIT', Reason}};
- {ok, St_code, Headers, Body} = Ret when is_binary(Body) ->
- case get_value(response_format, Options, list) of
- list ->
- {ok, St_code, Headers, binary_to_list(Body)};
- binary ->
- Ret
- end;
- Ret ->
- Ret
- end.
-
-ensure_bin(L) when is_list(L) -> list_to_binary(L);
-ensure_bin(B) when is_binary(B) -> B;
-ensure_bin(Fun) when is_function(Fun) -> Fun;
-ensure_bin({Fun}) when is_function(Fun) -> Fun;
-ensure_bin({Fun, _} = Body) when is_function(Fun) -> Body.
-
-%% @doc Creates a HTTP client process to the specified Host:Port which
-%% is not part of the load balancing pool. This is useful in cases
-%% where some requests to a webserver might take a long time whereas
-%% some might take a very short time. To avoid getting these quick
-%% requests stuck in the pipeline behind time consuming requests, use
-%% this function to get a handle to a connection process. <br/>
-%% <b>Note:</b> Calling this function only creates a worker process. No connection
-%% is setup. The connection attempt is made only when the first
-%% request is sent via any of the send_req_direct/4,5,6,7 functions.<br/>
-%% <b>Note:</b> It is the responsibility of the calling process to control
-%% pipeline size on such connections.
-%%
-%% @spec spawn_worker_process(Url::string()) -> {ok, pid()}
-spawn_worker_process(Url) ->
- ibrowse_http_client:start(Url).
-
-%% @doc Same as spawn_worker_process/1 but takes as input a Host and Port
-%% instead of a URL.
-%% @spec spawn_worker_process(Host::string(), Port::integer()) -> {ok, pid()}
-spawn_worker_process(Host, Port) ->
- ibrowse_http_client:start({Host, Port}).
-
-%% @doc Same as spawn_worker_process/1 except the the calling process
-%% is linked to the worker process which is spawned.
-%% @spec spawn_link_worker_process(Url::string()) -> {ok, pid()}
-spawn_link_worker_process(Url) ->
- ibrowse_http_client:start_link(Url).
-
-%% @doc Same as spawn_worker_process/2 except the the calling process
-%% is linked to the worker process which is spawned.
-%% @spec spawn_link_worker_process(Host::string(), Port::integer()) -> {ok, pid()}
-spawn_link_worker_process(Host, Port) ->
- ibrowse_http_client:start_link({Host, Port}).
-
-%% @doc Terminate a worker process spawned using
-%% spawn_worker_process/2 or spawn_link_worker_process/2. Requests in
-%% progress will get the error response <pre>{error, closing_on_request}</pre>
-%% @spec stop_worker_process(Conn_pid::pid()) -> ok
-stop_worker_process(Conn_pid) ->
- ibrowse_http_client:stop(Conn_pid).
-
-%% @doc Same as send_req/3 except that the first argument is the PID
-%% returned by spawn_worker_process/2 or spawn_link_worker_process/2
-send_req_direct(Conn_pid, Url, Headers, Method) ->
- send_req_direct(Conn_pid, Url, Headers, Method, [], []).
-
-%% @doc Same as send_req/4 except that the first argument is the PID
-%% returned by spawn_worker_process/2 or spawn_link_worker_process/2
-send_req_direct(Conn_pid, Url, Headers, Method, Body) ->
- send_req_direct(Conn_pid, Url, Headers, Method, Body, []).
-
-%% @doc Same as send_req/5 except that the first argument is the PID
-%% returned by spawn_worker_process/2 or spawn_link_worker_process/2
-send_req_direct(Conn_pid, Url, Headers, Method, Body, Options) ->
- send_req_direct(Conn_pid, Url, Headers, Method, Body, Options, 30000).
-
-%% @doc Same as send_req/6 except that the first argument is the PID
-%% returned by spawn_worker_process/2 or spawn_link_worker_process/2
-send_req_direct(Conn_pid, Url, Headers, Method, Body, Options, Timeout) ->
- case catch parse_url(Url) of
- #url{host = Host,
- port = Port} = Parsed_url ->
- Options_1 = merge_options(Host, Port, Options),
- case do_send_req(Conn_pid, Parsed_url, Headers, Method, Body, Options_1, Timeout) of
- {error, {'EXIT', {noproc, _}}} ->
- {error, worker_is_dead};
- Ret ->
- Ret
- end;
- Err ->
- {error, {url_parsing_failed, Err}}
- end.
-
-%% @doc Tell ibrowse to stream the next chunk of data to the
-%% caller. Should be used in conjunction with the
-%% <code>stream_to</code> option
-%% @spec stream_next(Req_id :: req_id()) -> ok | {error, unknown_req_id}
-stream_next(Req_id) ->
- case ets:lookup(ibrowse_stream, {req_id_pid, Req_id}) of
- [] ->
- {error, unknown_req_id};
- [{_, Pid}] ->
- catch Pid ! {stream_next, Req_id},
- ok
- end.
-
-%% @doc Tell ibrowse to close the connection associated with the
-%% specified stream. Should be used in conjunction with the
-%% <code>stream_to</code> option. Note that all requests in progress on
-%% the connection which is serving this Req_id will be aborted, and an
-%% error returned.
-%% @spec stream_close(Req_id :: req_id()) -> ok | {error, unknown_req_id}
-stream_close(Req_id) ->
- case ets:lookup(ibrowse_stream, {req_id_pid, Req_id}) of
- [] ->
- {error, unknown_req_id};
- [{_, Pid}] ->
- catch Pid ! {stream_close, Req_id},
- ok
- end.
-
-%% @doc Turn tracing on for the ibrowse process
-trace_on() ->
- ibrowse ! {trace, true}.
-%% @doc Turn tracing off for the ibrowse process
-trace_off() ->
- ibrowse ! {trace, false}.
-
-%% @doc Turn tracing on for all connections to the specified HTTP
-%% server. Host is whatever is specified as the domain name in the URL
-%% @spec trace_on(Host, Port) -> ok
-%% Host = string()
-%% Port = integer()
-trace_on(Host, Port) ->
- ibrowse ! {trace, true, Host, Port},
- ok.
-
-%% @doc Turn tracing OFF for all connections to the specified HTTP
-%% server.
-%% @spec trace_off(Host, Port) -> ok
-trace_off(Host, Port) ->
- ibrowse ! {trace, false, Host, Port},
- ok.
-
-%% @doc Turn Off ALL tracing
-%% @spec all_trace_off() -> ok
-all_trace_off() ->
- ibrowse ! all_trace_off,
- ok.
-
-%% @doc Shows some internal information about load balancing. Info
-%% about workers spawned using spawn_worker_process/2 or
-%% spawn_link_worker_process/2 is not included.
-show_dest_status() ->
- Dests = lists:filter(fun({lb_pid, {Host, Port}, _}) when is_list(Host),
- is_integer(Port) ->
- true;
- (_) ->
- false
- end, ets:tab2list(ibrowse_lb)),
- All_ets = ets:all(),
- io:format("~-40.40s | ~-5.5s | ~-10.10s | ~s~n",
- ["Server:port", "ETS", "Num conns", "LB Pid"]),
- io:format("~80.80.=s~n", [""]),
- lists:foreach(fun({lb_pid, {Host, Port}, Lb_pid}) ->
- case lists:dropwhile(
- fun(Tid) ->
- ets:info(Tid, owner) /= Lb_pid
- end, All_ets) of
- [] ->
- io:format("~40.40s | ~-5.5s | ~-5.5s | ~s~n",
- [Host ++ ":" ++ integer_to_list(Port),
- "",
- "",
- io_lib:format("~p", [Lb_pid])]
- );
- [Tid | _] ->
- catch (
- begin
- Size = ets:info(Tid, size),
- io:format("~40.40s | ~-5.5s | ~-5.5s | ~s~n",
- [Host ++ ":" ++ integer_to_list(Port),
- io_lib:format("~p", [Tid]),
- integer_to_list(Size),
- io_lib:format("~p", [Lb_pid])]
- )
- end
- )
- end
- end, Dests).
-
-%% @doc Shows some internal information about load balancing to a
-%% specified Host:Port. Info about workers spawned using
-%% spawn_worker_process/2 or spawn_link_worker_process/2 is not
-%% included.
-show_dest_status(Host, Port) ->
- case ets:lookup(ibrowse_lb, {Host, Port}) of
- [] ->
- no_active_processes;
- [#lb_pid{pid = Lb_pid}] ->
- io:format("Load Balancer Pid : ~p~n", [Lb_pid]),
- io:format("LB process msg q size : ~p~n", [(catch process_info(Lb_pid, message_queue_len))]),
- case lists:dropwhile(
- fun(Tid) ->
- ets:info(Tid, owner) /= Lb_pid
- end, ets:all()) of
- [] ->
- io:format("Couldn't locate ETS table for ~p~n", [Lb_pid]);
- [Tid | _] ->
- First = ets:first(Tid),
- Last = ets:last(Tid),
- Size = ets:info(Tid, size),
- io:format("LB ETS table id : ~p~n", [Tid]),
- io:format("Num Connections : ~p~n", [Size]),
- case Size of
- 0 ->
- ok;
- _ ->
- {First_p_sz, _} = First,
- {Last_p_sz, _} = Last,
- io:format("Smallest pipeline : ~1000.p~n", [First_p_sz]),
- io:format("Largest pipeline : ~1000.p~n", [Last_p_sz])
- end
- end
- end.
-
-%% @doc Clear current configuration for ibrowse and load from the file
-%% ibrowse.conf in the IBROWSE_EBIN/../priv directory. Current
-%% configuration is cleared only if the ibrowse.conf file is readable
-%% using file:consult/1
-rescan_config() ->
- gen_server:call(?MODULE, rescan_config).
-
-%% Clear current configuration for ibrowse and load from the specified
-%% file. Current configuration is cleared only if the specified
-%% file is readable using file:consult/1
-rescan_config(File) when is_list(File) ->
- gen_server:call(?MODULE, {rescan_config, File}).
-
-%%====================================================================
-%% Server functions
-%%====================================================================
-
-%%--------------------------------------------------------------------
-%% Function: init/1
-%% Description: Initiates the server
-%% Returns: {ok, State} |
-%% {ok, State, Timeout} |
-%% ignore |
-%% {stop, Reason}
-%%--------------------------------------------------------------------
-init(_) ->
- process_flag(trap_exit, true),
- State = #state{},
- put(my_trace_flag, State#state.trace),
- put(ibrowse_trace_token, "ibrowse"),
- ibrowse_lb = ets:new(ibrowse_lb, [named_table, public, {keypos, 2}]),
- ibrowse_conf = ets:new(ibrowse_conf, [named_table, protected, {keypos, 2}]),
- ibrowse_stream = ets:new(ibrowse_stream, [named_table, public]),
- import_config(),
- {ok, #state{}}.
-
-import_config() ->
- case code:priv_dir(ibrowse) of
- {error, _} ->
- ok;
- PrivDir ->
- Filename = filename:join(PrivDir, "ibrowse.conf"),
- import_config(Filename)
- end.
-
-import_config(Filename) ->
- case file:consult(Filename) of
- {ok, Terms} ->
- ets:delete_all_objects(ibrowse_conf),
- Fun = fun({dest, Host, Port, MaxSess, MaxPipe, Options})
- when is_list(Host), is_integer(Port),
- is_integer(MaxSess), MaxSess > 0,
- is_integer(MaxPipe), MaxPipe > 0, is_list(Options) ->
- I = [{{max_sessions, Host, Port}, MaxSess},
- {{max_pipeline_size, Host, Port}, MaxPipe},
- {{options, Host, Port}, Options}],
- lists:foreach(
- fun({X, Y}) ->
- ets:insert(ibrowse_conf,
- #ibrowse_conf{key = X,
- value = Y})
- end, I);
- ({K, V}) ->
- ets:insert(ibrowse_conf,
- #ibrowse_conf{key = K,
- value = V});
- (X) ->
- io:format("Skipping unrecognised term: ~p~n", [X])
- end,
- lists:foreach(Fun, Terms);
- _Err ->
- ok
- end.
-
-%% @doc Internal export
-get_config_value(Key) ->
- [#ibrowse_conf{value = V}] = ets:lookup(ibrowse_conf, Key),
- V.
-
-%% @doc Internal export
-get_config_value(Key, DefVal) ->
- case ets:lookup(ibrowse_conf, Key) of
- [] ->
- DefVal;
- [#ibrowse_conf{value = V}] ->
- V
- end.
-
-set_config_value(Key, Val) ->
- ets:insert(ibrowse_conf, #ibrowse_conf{key = Key, value = Val}).
-%%--------------------------------------------------------------------
-%% Function: handle_call/3
-%% Description: Handling call messages
-%% Returns: {reply, Reply, State} |
-%% {reply, Reply, State, Timeout} |
-%% {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, Reply, State} | (terminate/2 is called)
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-handle_call({get_lb_pid, #url{host = Host, port = Port} = Url}, _From, State) ->
- Pid = do_get_connection(Url, ets:lookup(ibrowse_lb, {Host, Port})),
- {reply, Pid, State};
-
-handle_call(stop, _From, State) ->
- do_trace("IBROWSE shutting down~n", []),
- ets:foldl(fun(#lb_pid{pid = Pid}, Acc) ->
- ibrowse_lb:stop(Pid),
- Acc
- end, [], ibrowse_lb),
- {stop, normal, ok, State};
-
-handle_call({set_config_value, Key, Val}, _From, State) ->
- set_config_value(Key, Val),
- {reply, ok, State};
-
-handle_call(rescan_config, _From, State) ->
- Ret = (catch import_config()),
- {reply, Ret, State};
-
-handle_call({rescan_config, File}, _From, State) ->
- Ret = (catch import_config(File)),
- {reply, Ret, State};
-
-handle_call(Request, _From, State) ->
- Reply = {unknown_request, Request},
- {reply, Reply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_cast/2
-%% Description: Handling cast messages
-%% Returns: {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_info/2
-%% Description: Handling all non call/cast messages
-%% Returns: {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-handle_info(all_trace_off, State) ->
- Mspec = [{{ibrowse_conf,{trace,'$1','$2'},true},[],[{{'$1','$2'}}]}],
- Trace_on_dests = ets:select(ibrowse_conf, Mspec),
- Fun = fun(#lb_pid{host_port = {H, P}, pid = Pid}, _) ->
- case lists:member({H, P}, Trace_on_dests) of
- false ->
- ok;
- true ->
- catch Pid ! {trace, false}
- end;
- (_, Acc) ->
- Acc
- end,
- ets:foldl(Fun, undefined, ibrowse_lb),
- ets:select_delete(ibrowse_conf, [{{ibrowse_conf,{trace,'$1','$2'},true},[],['true']}]),
- {noreply, State};
-
-handle_info({trace, Bool}, State) ->
- put(my_trace_flag, Bool),
- {noreply, State};
-
-handle_info({trace, Bool, Host, Port}, State) ->
- Fun = fun(#lb_pid{host_port = {H, P}, pid = Pid}, _)
- when H == Host,
- P == Port ->
- catch Pid ! {trace, Bool};
- (_, Acc) ->
- Acc
- end,
- ets:foldl(Fun, undefined, ibrowse_lb),
- ets:insert(ibrowse_conf, #ibrowse_conf{key = {trace, Host, Port},
- value = Bool}),
- {noreply, State};
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: terminate/2
-%% Description: Shutdown the server
-%% Returns: any (ignored by gen_server)
-%%--------------------------------------------------------------------
-terminate(_Reason, _State) ->
- ok.
-
-%%--------------------------------------------------------------------
-%% Func: code_change/3
-%% Purpose: Convert process state when code is changed
-%% Returns: {ok, NewState}
-%%--------------------------------------------------------------------
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%%--------------------------------------------------------------------
-%%% Internal functions
-%%--------------------------------------------------------------------
-do_get_connection(#url{host = Host, port = Port}, []) ->
- {ok, Pid} = ibrowse_lb:start_link([Host, Port]),
- ets:insert(ibrowse_lb, #lb_pid{host_port = {Host, Port}, pid = Pid}),
- Pid;
-do_get_connection(_Url, [#lb_pid{pid = Pid}]) ->
- Pid.
diff --git a/1.1.x/src/ibrowse/ibrowse.hrl b/1.1.x/src/ibrowse/ibrowse.hrl
deleted file mode 100644
index 18dde827..00000000
--- a/1.1.x/src/ibrowse/ibrowse.hrl
+++ /dev/null
@@ -1,21 +0,0 @@
--ifndef(IBROWSE_HRL).
--define(IBROWSE_HRL, "ibrowse.hrl").
-
--record(url, {
- abspath,
- host,
- port,
- username,
- password,
- path,
- protocol,
- host_type % 'hostname', 'ipv4_address' or 'ipv6_address'
-}).
-
--record(lb_pid, {host_port, pid}).
-
--record(client_conn, {key, cur_pipeline_size = 0, reqs_served = 0}).
-
--record(ibrowse_conf, {key, value}).
-
--endif.
diff --git a/1.1.x/src/ibrowse/ibrowse_app.erl b/1.1.x/src/ibrowse/ibrowse_app.erl
deleted file mode 100644
index d3a0f7bb..00000000
--- a/1.1.x/src/ibrowse/ibrowse_app.erl
+++ /dev/null
@@ -1,63 +0,0 @@
-%%%-------------------------------------------------------------------
-%%% File : ibrowse_app.erl
-%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
-%%%
-%%% Created : 15 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%%-------------------------------------------------------------------
--module(ibrowse_app).
-
--behaviour(application).
-%%--------------------------------------------------------------------
-%% Include files
-%%--------------------------------------------------------------------
-
-%%--------------------------------------------------------------------
-%% External exports
-%%--------------------------------------------------------------------
--export([
- start/2,
- stop/1
- ]).
-
-%%--------------------------------------------------------------------
-%% Internal exports
-%%--------------------------------------------------------------------
--export([
- ]).
-
-%%--------------------------------------------------------------------
-%% Macros
-%%--------------------------------------------------------------------
-
-%%--------------------------------------------------------------------
-%% Records
-%%--------------------------------------------------------------------
-
-%%====================================================================
-%% External functions
-%%====================================================================
-%%--------------------------------------------------------------------
-%% Func: start/2
-%% Returns: {ok, Pid} |
-%% {ok, Pid, State} |
-%% {error, Reason}
-%%--------------------------------------------------------------------
-start(_Type, _StartArgs) ->
- case ibrowse_sup:start_link() of
- {ok, Pid} ->
- {ok, Pid};
- Error ->
- Error
- end.
-
-%%--------------------------------------------------------------------
-%% Func: stop/1
-%% Returns: any
-%%--------------------------------------------------------------------
-stop(_State) ->
- ok.
-
-%%====================================================================
-%% Internal functions
-%%====================================================================
diff --git a/1.1.x/src/ibrowse/ibrowse_http_client.erl b/1.1.x/src/ibrowse/ibrowse_http_client.erl
deleted file mode 100644
index eb2bf315..00000000
--- a/1.1.x/src/ibrowse/ibrowse_http_client.erl
+++ /dev/null
@@ -1,1855 +0,0 @@
-%%%-------------------------------------------------------------------
-%%% File : ibrowse_http_client.erl
-%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description : The name says it all
-%%%
-%%% Created : 11 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%%-------------------------------------------------------------------
--module(ibrowse_http_client).
--behaviour(gen_server).
-%%--------------------------------------------------------------------
-%% Include files
-%%--------------------------------------------------------------------
-
-%%--------------------------------------------------------------------
-%% External exports
--export([
- start_link/1,
- start/1,
- stop/1,
- send_req/7
- ]).
-
--ifdef(debug).
--compile(export_all).
--endif.
-
-%% gen_server callbacks
--export([
- init/1,
- handle_call/3,
- handle_cast/2,
- handle_info/2,
- terminate/2,
- code_change/3
- ]).
-
--include("ibrowse.hrl").
--include_lib("kernel/include/inet.hrl").
-
--record(state, {host, port, connect_timeout,
- inactivity_timer_ref,
- use_proxy = false, proxy_auth_digest,
- ssl_options = [], is_ssl = false, socket,
- proxy_tunnel_setup = false,
- tunnel_setup_queue = [],
- reqs=queue:new(), cur_req, status=idle, http_status_code,
- reply_buffer = <<>>, rep_buf_size=0, streamed_size = 0,
- recvd_headers=[],
- status_line, raw_headers,
- is_closing, send_timer, content_length,
- deleted_crlf = false, transfer_encoding,
- chunk_size, chunk_size_buffer = <<>>,
- recvd_chunk_size, interim_reply_sent = false,
- lb_ets_tid, cur_pipeline_size = 0, prev_req_id
- }).
-
--record(request, {url, method, options, from,
- stream_to, caller_controls_socket = false,
- caller_socket_options = [],
- req_id,
- stream_chunk_size,
- save_response_to_file = false,
- tmp_file_name, tmp_file_fd, preserve_chunked_encoding,
- response_format}).
-
--import(ibrowse_lib, [
- get_value/2,
- get_value/3,
- do_trace/2
- ]).
-
--define(DEFAULT_STREAM_CHUNK_SIZE, 1024*1024).
--define(dec2hex(X), erlang:integer_to_list(X, 16)).
-%%====================================================================
-%% External functions
-%%====================================================================
-%%--------------------------------------------------------------------
-%% Function: start_link/0
-%% Description: Starts the server
-%%--------------------------------------------------------------------
-start(Args) ->
- gen_server:start(?MODULE, Args, []).
-
-start_link(Args) ->
- gen_server:start_link(?MODULE, Args, []).
-
-stop(Conn_pid) ->
- case catch gen_server:call(Conn_pid, stop) of
- {'EXIT', {timeout, _}} ->
- exit(Conn_pid, kill),
- ok;
- _ ->
- ok
- end.
-
-send_req(Conn_Pid, Url, Headers, Method, Body, Options, Timeout) ->
- gen_server:call(
- Conn_Pid,
- {send_req, {Url, Headers, Method, Body, Options, Timeout}}, Timeout).
-
-%%====================================================================
-%% Server functions
-%%====================================================================
-
-%%--------------------------------------------------------------------
-%% Function: init/1
-%% Description: Initiates the server
-%% Returns: {ok, State} |
-%% {ok, State, Timeout} |
-%% ignore |
-%% {stop, Reason}
-%%--------------------------------------------------------------------
-init({Lb_Tid, #url{host = Host, port = Port}, {SSLOptions, Is_ssl}}) ->
- State = #state{host = Host,
- port = Port,
- ssl_options = SSLOptions,
- is_ssl = Is_ssl,
- lb_ets_tid = Lb_Tid},
- put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
- put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
- {ok, State};
-init(Url) when is_list(Url) ->
- case catch ibrowse_lib:parse_url(Url) of
- #url{protocol = Protocol} = Url_rec ->
- init({undefined, Url_rec, {[], Protocol == https}});
- {'EXIT', _} ->
- {error, invalid_url}
- end;
-init({Host, Port}) ->
- State = #state{host = Host,
- port = Port},
- put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
- put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
- {ok, State}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_call/3
-%% Description: Handling call messages
-%% Returns: {reply, Reply, State} |
-%% {reply, Reply, State, Timeout} |
-%% {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, Reply, State} | (terminate/2 is called)
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-%% Received a request when the remote server has already sent us a
-%% Connection: Close header
-handle_call({send_req, _}, _From, #state{is_closing = true} = State) ->
- {reply, {error, connection_closing}, State};
-
-handle_call({send_req, {Url, Headers, Method, Body, Options, Timeout}},
- From, State) ->
- send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State);
-
-handle_call(stop, _From, State) ->
- do_close(State),
- do_error_reply(State, closing_on_request),
- {stop, normal, ok, State};
-
-handle_call(Request, _From, State) ->
- Reply = {unknown_request, Request},
- {reply, Reply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_cast/2
-%% Description: Handling cast messages
-%% Returns: {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_info/2
-%% Description: Handling all non call/cast messages
-%% Returns: {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-handle_info({tcp, _Sock, Data}, #state{status = Status} = State) ->
-%% io:format("Recvd data: ~p~n", [Data]),
- do_trace("Data recvd in state: ~p. Size: ~p. ~p~n~n", [Status, size(Data), Data]),
- handle_sock_data(Data, State);
-handle_info({ssl, _Sock, Data}, State) ->
- handle_sock_data(Data, State);
-
-handle_info({stream_next, Req_id}, #state{socket = Socket,
- cur_req = #request{req_id = Req_id}} = State) ->
- %% io:format("Client process set {active, once}~n", []),
- do_setopts(Socket, [{active, once}], State),
- {noreply, set_inac_timer(State)};
-
-handle_info({stream_next, _Req_id}, State) ->
- _Cur_req_id = case State#state.cur_req of
- #request{req_id = Cur} ->
- Cur;
- _ ->
- undefined
- end,
-%% io:format("Ignoring stream_next as ~1000.p is not cur req (~1000.p)~n",
-%% [_Req_id, _Cur_req_id]),
- {noreply, State};
-
-handle_info({stream_close, _Req_id}, State) ->
- shutting_down(State),
- do_close(State),
- do_error_reply(State, closing_on_request),
- {stop, normal, State};
-
-handle_info({tcp_closed, _Sock}, State) ->
- do_trace("TCP connection closed by peer!~n", []),
- handle_sock_closed(State),
- {stop, normal, State};
-handle_info({ssl_closed, _Sock}, State) ->
- do_trace("SSL connection closed by peer!~n", []),
- handle_sock_closed(State),
- {stop, normal, State};
-
-handle_info({tcp_error, _Sock, Reason}, State) ->
- do_trace("Error on connection to ~1000.p:~1000.p -> ~1000.p~n",
- [State#state.host, State#state.port, Reason]),
- handle_sock_closed(State),
- {stop, normal, State};
-handle_info({ssl_error, _Sock, Reason}, State) ->
- do_trace("Error on SSL connection to ~1000.p:~1000.p -> ~1000.p~n",
- [State#state.host, State#state.port, Reason]),
- handle_sock_closed(State),
- {stop, normal, State};
-
-handle_info({req_timedout, From}, State) ->
- case lists:keymember(From, #request.from, queue:to_list(State#state.reqs)) of
- false ->
- {noreply, State};
- true ->
- shutting_down(State),
- do_error_reply(State, req_timedout),
- {stop, normal, State}
- end;
-
-handle_info(timeout, State) ->
- do_trace("Inactivity timeout triggered. Shutting down connection~n", []),
- shutting_down(State),
- do_error_reply(State, req_timedout),
- {stop, normal, State};
-
-handle_info({trace, Bool}, State) ->
- put(my_trace_flag, Bool),
- {noreply, State};
-
-handle_info(Info, State) ->
- io:format("Unknown message recvd for ~1000.p:~1000.p -> ~p~n",
- [State#state.host, State#state.port, Info]),
- io:format("Recvd unknown message ~p when in state: ~p~n", [Info, State]),
- {noreply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: terminate/2
-%% Description: Shutdown the server
-%% Returns: any (ignored by gen_server)
-%%--------------------------------------------------------------------
-terminate(_Reason, State) ->
- do_close(State),
- ok.
-
-%%--------------------------------------------------------------------
-%% Func: code_change/3
-%% Purpose: Convert process state when code is changed
-%% Returns: {ok, NewState}
-%%--------------------------------------------------------------------
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%%--------------------------------------------------------------------
-%%% Internal functions
-%%--------------------------------------------------------------------
-
-%%--------------------------------------------------------------------
-%% Handles data recvd on the socket
-%%--------------------------------------------------------------------
-handle_sock_data(Data, #state{status=idle}=State) ->
- do_trace("Data recvd on socket in state idle!. ~1000.p~n", [Data]),
- shutting_down(State),
- do_error_reply(State, data_in_status_idle),
- do_close(State),
- {stop, normal, State};
-
-handle_sock_data(Data, #state{status = get_header}=State) ->
- case parse_response(Data, State) of
- {error, _Reason} ->
- shutting_down(State),
- {stop, normal, State};
- #state{socket = Socket, status = Status, cur_req = CurReq} = State_1 ->
- case {Status, CurReq} of
- {get_header, #request{caller_controls_socket = true}} ->
- do_setopts(Socket, [{active, once}], State_1);
- _ ->
- active_once(State_1)
- end,
- {noreply, set_inac_timer(State_1)}
- end;
-
-handle_sock_data(Data, #state{status = get_body,
- socket = Socket,
- content_length = CL,
- http_status_code = StatCode,
- recvd_headers = Headers,
- chunk_size = CSz} = State) ->
- case (CL == undefined) and (CSz == undefined) of
- true ->
- case accumulate_response(Data, State) of
- {error, Reason} ->
- shutting_down(State),
- fail_pipelined_requests(State,
- {error, {Reason, {stat_code, StatCode}, Headers}}),
- {stop, normal, State};
- State_1 ->
- active_once(State_1),
- State_2 = set_inac_timer(State_1),
- {noreply, State_2}
- end;
- _ ->
- case parse_11_response(Data, State) of
- {error, Reason} ->
- shutting_down(State),
- fail_pipelined_requests(State,
- {error, {Reason, {stat_code, StatCode}, Headers}}),
- {stop, normal, State};
- #state{cur_req = #request{caller_controls_socket = Ccs},
- interim_reply_sent = Irs} = State_1 ->
- case Irs of
- true ->
- active_once(State_1);
- false when Ccs == true ->
- do_setopts(Socket, [{active, once}], State);
- false ->
- active_once(State_1)
- end,
- State_2 = State_1#state{interim_reply_sent = false},
- case Ccs of
- true ->
- cancel_timer(State_2#state.inactivity_timer_ref, {eat_message, timeout}),
- {noreply, State_2#state{inactivity_timer_ref = undefined}};
- _ ->
- {noreply, set_inac_timer(State_2)}
- end;
- State_1 ->
- active_once(State_1),
- State_2 = set_inac_timer(State_1),
- {noreply, State_2}
- end
- end.
-
-accumulate_response(Data,
- #state{
- cur_req = #request{save_response_to_file = Srtf,
- tmp_file_fd = undefined} = CurReq,
- http_status_code=[$2 | _]}=State) when Srtf /= false ->
- TmpFilename = make_tmp_filename(Srtf),
- case file:open(TmpFilename, [write, delayed_write, raw]) of
- {ok, Fd} ->
- accumulate_response(Data, State#state{
- cur_req = CurReq#request{
- tmp_file_fd = Fd,
- tmp_file_name = TmpFilename}});
- {error, Reason} ->
- {error, {file_open_error, Reason}}
- end;
-accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
- tmp_file_fd = Fd},
- transfer_encoding=chunked,
- reply_buffer = Reply_buf,
- http_status_code=[$2 | _]
- } = State) when Srtf /= false ->
- case file:write(Fd, [Reply_buf, Data]) of
- ok ->
- State#state{reply_buffer = <<>>};
- {error, Reason} ->
- {error, {file_write_error, Reason}}
- end;
-accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
- tmp_file_fd = Fd},
- reply_buffer = RepBuf,
- http_status_code=[$2 | _]
- } = State) when Srtf /= false ->
- case file:write(Fd, [RepBuf, Data]) of
- ok ->
- State#state{reply_buffer = <<>>};
- {error, Reason} ->
- {error, {file_write_error, Reason}}
- end;
-accumulate_response(Data, #state{reply_buffer = RepBuf,
- rep_buf_size = RepBufSize,
- streamed_size = Streamed_size,
- cur_req = CurReq}=State) ->
- #request{stream_to = StreamTo,
- req_id = ReqId,
- stream_chunk_size = Stream_chunk_size,
- response_format = Response_format,
- caller_controls_socket = Caller_controls_socket} = CurReq,
- RepBuf_1 = <<RepBuf/binary, Data/binary>>,
- New_data_size = RepBufSize - Streamed_size,
- case StreamTo of
- undefined ->
- State#state{reply_buffer = RepBuf_1};
- _ when Caller_controls_socket == true ->
- do_interim_reply(StreamTo, Response_format, ReqId, RepBuf_1),
- State#state{reply_buffer = <<>>,
- interim_reply_sent = true,
- streamed_size = Streamed_size + size(RepBuf_1)};
- _ when New_data_size >= Stream_chunk_size ->
- {Stream_chunk, Rem_data} = split_binary(RepBuf_1, Stream_chunk_size),
- do_interim_reply(StreamTo, Response_format, ReqId, Stream_chunk),
- State_1 = State#state{
- reply_buffer = <<>>,
- interim_reply_sent = true,
- streamed_size = Streamed_size + Stream_chunk_size},
- case Rem_data of
- <<>> ->
- State_1;
- _ ->
- accumulate_response(Rem_data, State_1)
- end;
- _ ->
- State#state{reply_buffer = RepBuf_1}
- end.
-
-make_tmp_filename(true) ->
- DownloadDir = ibrowse:get_config_value(download_dir, filename:absname("./")),
- {A,B,C} = now(),
- filename:join([DownloadDir,
- "ibrowse_tmp_file_"++
- integer_to_list(A) ++
- integer_to_list(B) ++
- integer_to_list(C)]);
-make_tmp_filename(File) when is_list(File) ->
- File.
-
-
-%%--------------------------------------------------------------------
-%% Handles the case when the server closes the socket
-%%--------------------------------------------------------------------
-handle_sock_closed(#state{status=get_header} = State) ->
- shutting_down(State),
- do_error_reply(State, connection_closed);
-
-handle_sock_closed(#state{cur_req=undefined} = State) ->
- shutting_down(State);
-
-%% We check for IsClosing because this the server could have sent a
-%% Connection-Close header and has closed the socket to indicate end
-%% of response. There maybe requests pipelined which need a response.
-handle_sock_closed(#state{reply_buffer = Buf, reqs = Reqs, http_status_code = SC,
- is_closing = IsClosing,
- cur_req = #request{tmp_file_name=TmpFilename,
- tmp_file_fd=Fd} = CurReq,
- status = get_body,
- recvd_headers = Headers,
- status_line = Status_line,
- raw_headers = Raw_headers
- }=State) ->
- #request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format,
- options = Options} = CurReq,
- case IsClosing of
- true ->
- {_, Reqs_1} = queue:out(Reqs),
- Body = case TmpFilename of
- undefined ->
- Buf;
- _ ->
- ok = file:close(Fd),
- {file, TmpFilename}
- end,
- Reply = case get_value(give_raw_headers, Options, false) of
- true ->
- {ok, Status_line, Raw_headers, Body};
- false ->
- {ok, SC, Headers, Buf}
- end,
- State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
- ok = do_error_reply(State_1#state{reqs = Reqs_1}, connection_closed),
- State_1;
- _ ->
- ok = do_error_reply(State, connection_closed),
- State
- end.
-
-do_connect(Host, Port, Options, #state{is_ssl = true,
- use_proxy = false,
- ssl_options = SSLOptions},
- Timeout) ->
- ssl:connect(Host, Port, get_sock_options(Host, Options, SSLOptions), Timeout);
-do_connect(Host, Port, Options, _State, Timeout) ->
- gen_tcp:connect(Host, Port, get_sock_options(Host, Options, []), Timeout).
-
-get_sock_options(Host, Options, SSLOptions) ->
- Caller_socket_options = get_value(socket_options, Options, []),
- Ipv6Options = case is_ipv6_host(Host) of
- true ->
- [inet6];
- false ->
- []
- end,
- Other_sock_options = filter_sock_options(SSLOptions ++ Caller_socket_options ++ Ipv6Options),
- case lists:keysearch(nodelay, 1, Other_sock_options) of
- false ->
- [{nodelay, true}, binary, {active, false} | Other_sock_options];
- {value, _} ->
- [binary, {active, false} | Other_sock_options]
- end.
-
-is_ipv6_host(Host) ->
- case inet_parse:address(Host) of
- {ok, {_, _, _, _, _, _, _, _}} ->
- true;
- {ok, {_, _, _, _}} ->
- false;
- _ ->
- case inet:gethostbyname(Host) of
- {ok, #hostent{h_addrtype = inet6}} ->
- true;
- _ ->
- false
- end
- end.
-
-%% We don't want the caller to specify certain options
-filter_sock_options(Opts) ->
- lists:filter(fun({active, _}) ->
- false;
- ({packet, _}) ->
- false;
- (list) ->
- false;
- (_) ->
- true
- end, Opts).
-
-do_send(Req, #state{socket = Sock,
- is_ssl = true,
- use_proxy = true,
- proxy_tunnel_setup = Pts}) when Pts /= done -> gen_tcp:send(Sock, Req);
-do_send(Req, #state{socket = Sock, is_ssl = true}) -> ssl:send(Sock, Req);
-do_send(Req, #state{socket = Sock, is_ssl = false}) -> gen_tcp:send(Sock, Req).
-
-%% @spec do_send_body(Sock::socket_descriptor(), Source::source_descriptor(), IsSSL::boolean()) -> ok | error()
-%% source_descriptor() = fun_arity_0 |
-%% {fun_arity_0} |
-%% {fun_arity_1, term()}
-%% error() = term()
-do_send_body(Source, State, TE) when is_function(Source) ->
- do_send_body({Source}, State, TE);
-do_send_body({Source}, State, TE) when is_function(Source) ->
- do_send_body1(Source, Source(), State, TE);
-do_send_body({Source, Source_state}, State, TE) when is_function(Source) ->
- do_send_body1(Source, Source(Source_state), State, TE);
-do_send_body(Body, State, _TE) ->
- do_send(Body, State).
-
-do_send_body1(Source, Resp, State, TE) ->
- case Resp of
- {ok, Data} ->
- do_send(maybe_chunked_encode(Data, TE), State),
- do_send_body({Source}, State, TE);
- {ok, Data, New_source_state} ->
- do_send(maybe_chunked_encode(Data, TE), State),
- do_send_body({Source, New_source_state}, State, TE);
- eof when TE == true ->
- do_send(<<"0\r\n\r\n">>, State),
- ok;
- eof ->
- ok;
- Err ->
- Err
- end.
-
-maybe_chunked_encode(Data, false) ->
- Data;
-maybe_chunked_encode(Data, true) ->
- [?dec2hex(iolist_size(Data)), "\r\n", Data, "\r\n"].
-
-do_close(#state{socket = undefined}) -> ok;
-do_close(#state{socket = Sock,
- is_ssl = true,
- use_proxy = true,
- proxy_tunnel_setup = Pts
- }) when Pts /= done -> catch gen_tcp:close(Sock);
-do_close(#state{socket = Sock, is_ssl = true}) -> catch ssl:close(Sock);
-do_close(#state{socket = Sock, is_ssl = false}) -> catch gen_tcp:close(Sock).
-
-active_once(#state{cur_req = #request{caller_controls_socket = true}}) ->
- ok;
-active_once(#state{socket = Socket} = State) ->
- do_setopts(Socket, [{active, once}], State).
-
-do_setopts(_Sock, [], _) -> ok;
-do_setopts(Sock, Opts, #state{is_ssl = true,
- use_proxy = true,
- proxy_tunnel_setup = Pts}
- ) when Pts /= done -> inet:setopts(Sock, Opts);
-do_setopts(Sock, Opts, #state{is_ssl = true}) -> ssl:setopts(Sock, Opts);
-do_setopts(Sock, Opts, _) -> inet:setopts(Sock, Opts).
-
-check_ssl_options(Options, State) ->
- case get_value(is_ssl, Options, false) of
- false ->
- State;
- true ->
- State#state{is_ssl=true, ssl_options=get_value(ssl_options, Options)}
- end.
-
-send_req_1(From,
- #url{host = Host,
- port = Port} = Url,
- Headers, Method, Body, Options, Timeout,
- #state{socket = undefined} = State) ->
- {Host_1, Port_1, State_1} =
- case get_value(proxy_host, Options, false) of
- false ->
- {Host, Port, State};
- PHost ->
- ProxyUser = get_value(proxy_user, Options, []),
- ProxyPassword = get_value(proxy_password, Options, []),
- Digest = http_auth_digest(ProxyUser, ProxyPassword),
- {PHost, get_value(proxy_port, Options, 80),
- State#state{use_proxy = true,
- proxy_auth_digest = Digest}}
- end,
- State_2 = check_ssl_options(Options, State_1),
- do_trace("Connecting...~n", []),
- Conn_timeout = get_value(connect_timeout, Options, Timeout),
- case do_connect(Host_1, Port_1, Options, State_2, Conn_timeout) of
- {ok, Sock} ->
- do_trace("Connected! Socket: ~1000.p~n", [Sock]),
- State_3 = State_2#state{socket = Sock,
- connect_timeout = Conn_timeout},
- send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State_3);
- Err ->
- shutting_down(State_2),
- do_trace("Error connecting. Reason: ~1000.p~n", [Err]),
- gen_server:reply(From, {error, {conn_failed, Err}}),
- {stop, normal, State_2}
- end;
-
-%% Send a CONNECT request.
-%% Wait for 200 OK
-%% Upgrade to SSL connection
-%% Then send request
-
-send_req_1(From,
- #url{
- host = Server_host,
- port = Server_port
- } = Url,
- Headers, Method, Body, Options, Timeout,
- #state{
- proxy_tunnel_setup = false,
- use_proxy = true,
- is_ssl = true} = State) ->
- NewReq = #request{
- method = connect,
- preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false),
- options = Options
- },
- State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
- Pxy_auth_headers = maybe_modify_headers(Url, Method, Options, [], State_1),
- Path = [Server_host, $:, integer_to_list(Server_port)],
- {Req, Body_1} = make_request(connect, Pxy_auth_headers,
- Path, Path,
- [], Options, State_1, undefined),
- TE = is_chunked_encoding_specified(Options),
- trace_request(Req),
- case do_send(Req, State) of
- ok ->
- case do_send_body(Body_1, State_1, TE) of
- ok ->
- trace_request_body(Body_1),
- active_once(State_1),
- Ref = case Timeout of
- infinity ->
- undefined;
- _ ->
- erlang:send_after(Timeout, self(), {req_timedout, From})
- end,
- State_2 = State_1#state{status = get_header,
- cur_req = NewReq,
- send_timer = Ref,
- proxy_tunnel_setup = in_progress,
- tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout}]},
- State_3 = set_inac_timer(State_2),
- {noreply, State_3};
- Err ->
- shutting_down(State_1),
- do_trace("Send failed... Reason: ~p~n", [Err]),
- gen_server:reply(From, {error, {send_failed, Err}}),
- {stop, normal, State_1}
- end;
- Err ->
- shutting_down(State_1),
- do_trace("Send failed... Reason: ~p~n", [Err]),
- gen_server:reply(From, {error, {send_failed, Err}}),
- {stop, normal, State_1}
- end;
-
-send_req_1(From, Url, Headers, Method, Body, Options, Timeout,
- #state{proxy_tunnel_setup = in_progress,
- tunnel_setup_queue = Q} = State) ->
- do_trace("Queued SSL request awaiting tunnel setup: ~n"
- "URL : ~s~n"
- "Method : ~p~n"
- "Headers : ~p~n", [Url, Method, Headers]),
- {noreply, State#state{tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout} | Q]}};
-
-send_req_1(From,
- #url{abspath = AbsPath,
- path = RelPath} = Url,
- Headers, Method, Body, Options, Timeout,
- #state{status = Status,
- socket = Socket} = State) ->
- cancel_timer(State#state.inactivity_timer_ref, {eat_message, timeout}),
- ReqId = make_req_id(),
- Resp_format = get_value(response_format, Options, list),
- Caller_socket_options = get_value(socket_options, Options, []),
- {StreamTo, Caller_controls_socket} =
- case get_value(stream_to, Options, undefined) of
- {Caller, once} when is_pid(Caller) or
- is_atom(Caller) ->
- Async_pid_rec = {{req_id_pid, ReqId}, self()},
- true = ets:insert(ibrowse_stream, Async_pid_rec),
- {Caller, true};
- undefined ->
- {undefined, false};
- Caller when is_pid(Caller) or
- is_atom(Caller) ->
- {Caller, false};
- Stream_to_inv ->
- exit({invalid_option, {stream_to, Stream_to_inv}})
- end,
- SaveResponseToFile = get_value(save_response_to_file, Options, false),
- NewReq = #request{url = Url,
- method = Method,
- stream_to = StreamTo,
- caller_controls_socket = Caller_controls_socket,
- caller_socket_options = Caller_socket_options,
- options = Options,
- req_id = ReqId,
- save_response_to_file = SaveResponseToFile,
- stream_chunk_size = get_stream_chunk_size(Options),
- response_format = Resp_format,
- from = From,
- preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false)
- },
- State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
- Headers_1 = maybe_modify_headers(Url, Method, Options, Headers, State_1),
- {Req, Body_1} = make_request(Method,
- Headers_1,
- AbsPath, RelPath, Body, Options, State_1,
- ReqId),
- trace_request(Req),
- do_setopts(Socket, Caller_socket_options, State_1),
- TE = is_chunked_encoding_specified(Options),
- case do_send(Req, State_1) of
- ok ->
- case do_send_body(Body_1, State_1, TE) of
- ok ->
- trace_request_body(Body_1),
- State_2 = inc_pipeline_counter(State_1),
- active_once(State_2),
- Ref = case Timeout of
- infinity ->
- undefined;
- _ ->
- erlang:send_after(Timeout, self(), {req_timedout, From})
- end,
- State_3 = case Status of
- idle ->
- State_2#state{status = get_header,
- cur_req = NewReq,
- send_timer = Ref};
- _ ->
- State_2#state{send_timer = Ref}
- end,
- case StreamTo of
- undefined ->
- ok;
- _ ->
- gen_server:reply(From, {ibrowse_req_id, ReqId})
- end,
- State_4 = set_inac_timer(State_3),
- {noreply, State_4};
- Err ->
- shutting_down(State_1),
- do_trace("Send failed... Reason: ~p~n", [Err]),
- gen_server:reply(From, {error, {send_failed, Err}}),
- {stop, normal, State_1}
- end;
- Err ->
- shutting_down(State_1),
- do_trace("Send failed... Reason: ~p~n", [Err]),
- gen_server:reply(From, {error, {send_failed, Err}}),
- {stop, normal, State_1}
- end.
-
-maybe_modify_headers(#url{}, connect, _, Headers, State) ->
- add_proxy_auth_headers(State, Headers);
-maybe_modify_headers(#url{host = Host, port = Port} = Url,
- _Method,
- Options, Headers, State) ->
- case get_value(headers_as_is, Options, false) of
- false ->
- Headers_1 = add_auth_headers(Url, Options, Headers, State),
- HostHeaderValue = case lists:keysearch(host_header, 1, Options) of
- false ->
- case Port of
- 80 -> Host;
- 443 -> Host;
- _ -> [Host, ":", integer_to_list(Port)]
- end;
- {value, {_, Host_h_val}} ->
- Host_h_val
- end,
- [{"Host", HostHeaderValue} | Headers_1];
- true ->
- Headers
- end.
-
-add_auth_headers(#url{username = User,
- password = UPw},
- Options,
- Headers,
- State) ->
- Headers_1 = case User of
- undefined ->
- case get_value(basic_auth, Options, undefined) of
- undefined ->
- Headers;
- {U,P} ->
- [{"Authorization", ["Basic ", http_auth_digest(U, P)]} | Headers]
- end;
- _ ->
- [{"Authorization", ["Basic ", http_auth_digest(User, UPw)]} | Headers]
- end,
- add_proxy_auth_headers(State, Headers_1).
-
-add_proxy_auth_headers(#state{use_proxy = false}, Headers) ->
- Headers;
-add_proxy_auth_headers(#state{proxy_auth_digest = []}, Headers) ->
- Headers;
-add_proxy_auth_headers(#state{proxy_auth_digest = Auth_digest}, Headers) ->
- [{"Proxy-Authorization", ["Basic ", Auth_digest]} | Headers].
-
-http_auth_digest([], []) ->
- [];
-http_auth_digest(Username, Password) ->
- ibrowse_lib:encode_base64(Username ++ [$: | Password]).
-
-make_request(Method, Headers, AbsPath, RelPath, Body, Options,
- #state{use_proxy = UseProxy, is_ssl = Is_ssl}, ReqId) ->
- HttpVsn = http_vsn_string(get_value(http_vsn, Options, {1,1})),
- Fun1 = fun({X, Y}) when is_atom(X) ->
- {to_lower(atom_to_list(X)), X, Y};
- ({X, Y}) when is_list(X) ->
- {to_lower(X), X, Y}
- end,
- Headers_0 = [Fun1(X) || X <- Headers],
- Headers_1 =
- case lists:keysearch("content-length", 1, Headers_0) of
- false when (Body =:= [] orelse Body =:= <<>>) andalso
- (Method =:= post orelse Method =:= put) ->
- [{"content-length", "Content-Length", "0"} | Headers_0];
- false when is_binary(Body) orelse is_list(Body) ->
- [{"content-length", "Content-Length", integer_to_list(iolist_size(Body))} | Headers_0];
- _ ->
- %% Content-Length is already specified or Body is a
- %% function or function/state pair
- Headers_0
- end,
- {Headers_2, Body_1} =
- case is_chunked_encoding_specified(Options) of
- false ->
- {[{Y, Z} || {_, Y, Z} <- Headers_1], Body};
- true ->
- Chunk_size_1 = case get_value(transfer_encoding, Options) of
- chunked ->
- 5120;
- {chunked, Chunk_size} ->
- Chunk_size
- end,
- {[{Y, Z} || {X, Y, Z} <- Headers_1,
- X /= "content-length"] ++
- [{"Transfer-Encoding", "chunked"}],
- chunk_request_body(Body, Chunk_size_1)}
- end,
- Headers_3 = case lists:member({include_ibrowse_req_id, true}, Options) of
- true ->
- [{"x-ibrowse-request-id", io_lib:format("~1000.p",[ReqId])} | Headers_2];
- false ->
- Headers_2
- end,
- Headers_4 = cons_headers(Headers_3),
- Uri = case get_value(use_absolute_uri, Options, false) or UseProxy of
- true ->
- case Is_ssl of
- true ->
- RelPath;
- false ->
- AbsPath
- end;
- false ->
- RelPath
- end,
- {[method(Method), " ", Uri, " ", HttpVsn, crnl(), Headers_4, crnl()], Body_1}.
-
-is_chunked_encoding_specified(Options) ->
- case get_value(transfer_encoding, Options, false) of
- false ->
- false;
- {chunked, _} ->
- true;
- chunked ->
- true
- end.
-
-http_vsn_string({0,9}) -> "HTTP/0.9";
-http_vsn_string({1,0}) -> "HTTP/1.0";
-http_vsn_string({1,1}) -> "HTTP/1.1".
-
-cons_headers(Headers) ->
- cons_headers(Headers, []).
-cons_headers([], Acc) ->
- encode_headers(Acc);
-cons_headers([{basic_auth, {U,P}} | T], Acc) ->
- cons_headers(T, [{"Authorization",
- ["Basic ", ibrowse_lib:encode_base64(U++":"++P)]} | Acc]);
-cons_headers([{cookie, Cookie} | T], Acc) ->
- cons_headers(T, [{"Cookie", Cookie} | Acc]);
-cons_headers([{content_length, L} | T], Acc) ->
- cons_headers(T, [{"Content-Length", L} | Acc]);
-cons_headers([{content_type, L} | T], Acc) ->
- cons_headers(T, [{"Content-Type", L} | Acc]);
-cons_headers([H | T], Acc) ->
- cons_headers(T, [H | Acc]).
-
-encode_headers(L) ->
- encode_headers(L, []).
-encode_headers([{http_vsn, _Val} | T], Acc) ->
- encode_headers(T, Acc);
-encode_headers([{Name,Val} | T], Acc) when is_list(Name) ->
- encode_headers(T, [[Name, ": ", fmt_val(Val), crnl()] | Acc]);
-encode_headers([{Name,Val} | T], Acc) when is_atom(Name) ->
- encode_headers(T, [[atom_to_list(Name), ": ", fmt_val(Val), crnl()] | Acc]);
-encode_headers([], Acc) ->
- lists:reverse(Acc).
-
-chunk_request_body(Body, _ChunkSize) when is_tuple(Body) orelse
- is_function(Body) ->
- Body;
-chunk_request_body(Body, ChunkSize) ->
- chunk_request_body(Body, ChunkSize, []).
-
-chunk_request_body(Body, _ChunkSize, Acc) when Body == <<>>; Body == [] ->
- LastChunk = "0\r\n",
- lists:reverse(["\r\n", LastChunk | Acc]);
-chunk_request_body(Body, ChunkSize, Acc) when is_binary(Body),
- size(Body) >= ChunkSize ->
- <<ChunkBody:ChunkSize/binary, Rest/binary>> = Body,
- Chunk = [?dec2hex(ChunkSize),"\r\n",
- ChunkBody, "\r\n"],
- chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
-chunk_request_body(Body, _ChunkSize, Acc) when is_binary(Body) ->
- BodySize = size(Body),
- Chunk = [?dec2hex(BodySize),"\r\n",
- Body, "\r\n"],
- LastChunk = "0\r\n",
- lists:reverse(["\r\n", LastChunk, Chunk | Acc]);
-chunk_request_body(Body, ChunkSize, Acc) when length(Body) >= ChunkSize ->
- {ChunkBody, Rest} = split_list_at(Body, ChunkSize),
- Chunk = [?dec2hex(ChunkSize),"\r\n",
- ChunkBody, "\r\n"],
- chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
-chunk_request_body(Body, _ChunkSize, Acc) when is_list(Body) ->
- BodySize = length(Body),
- Chunk = [?dec2hex(BodySize),"\r\n",
- Body, "\r\n"],
- LastChunk = "0\r\n",
- lists:reverse(["\r\n", LastChunk, Chunk | Acc]).
-
-
-parse_response(_Data, #state{cur_req = undefined}=State) ->
- State#state{status = idle};
-parse_response(Data, #state{reply_buffer = Acc, reqs = Reqs,
- cur_req = CurReq} = State) ->
- #request{from=From, stream_to=StreamTo, req_id=ReqId,
- method=Method, response_format = Resp_format,
- options = Options
- } = CurReq,
- MaxHeaderSize = ibrowse:get_config_value(max_headers_size, infinity),
- case scan_header(Acc, Data) of
- {yes, Headers, Data_1} ->
- do_trace("Recvd Header Data -> ~s~n----~n", [Headers]),
- do_trace("Recvd headers~n--- Headers Begin ---~n~s~n--- Headers End ---~n~n", [Headers]),
- {HttpVsn, StatCode, Headers_1, Status_line, Raw_headers} = parse_headers(Headers),
- do_trace("HttpVsn: ~p StatusCode: ~p Headers_1 -> ~1000.p~n", [HttpVsn, StatCode, Headers_1]),
- LCHeaders = [{to_lower(X), Y} || {X,Y} <- Headers_1],
- ConnClose = to_lower(get_value("connection", LCHeaders, "false")),
- IsClosing = is_connection_closing(HttpVsn, ConnClose),
- case IsClosing of
- true ->
- shutting_down(State);
- false ->
- ok
- end,
- Give_raw_headers = get_value(give_raw_headers, Options, false),
- State_1 = case Give_raw_headers of
- true ->
- State#state{recvd_headers=Headers_1, status=get_body,
- reply_buffer = <<>>,
- status_line = Status_line,
- raw_headers = Raw_headers,
- http_status_code=StatCode, is_closing=IsClosing};
- false ->
- State#state{recvd_headers=Headers_1, status=get_body,
- reply_buffer = <<>>,
- http_status_code=StatCode, is_closing=IsClosing}
- end,
- put(conn_close, ConnClose),
- TransferEncoding = to_lower(get_value("transfer-encoding", LCHeaders, "false")),
- case get_value("content-length", LCHeaders, undefined) of
- _ when Method == connect,
- hd(StatCode) == $2 ->
- cancel_timer(State#state.send_timer),
- {_, Reqs_1} = queue:out(Reqs),
- upgrade_to_ssl(set_cur_request(State#state{reqs = Reqs_1,
- recvd_headers = [],
- status = idle
- }));
- _ when Method == connect ->
- {_, Reqs_1} = queue:out(Reqs),
- do_error_reply(State#state{reqs = Reqs_1},
- {error, proxy_tunnel_failed}),
- {error, proxy_tunnel_failed};
- _ when Method == head ->
- {_, Reqs_1} = queue:out(Reqs),
- send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
- State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
- {ok, StatCode, Headers_1, []}),
- cancel_timer(State_1_1#state.send_timer, {eat_message, {req_timedout, From}}),
- State_2 = reset_state(State_1_1),
- State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
- parse_response(Data_1, State_3);
- _ when hd(StatCode) =:= $1 ->
- %% No message body is expected. Server may send
- %% one or more 1XX responses before a proper
- %% response.
- send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
- do_trace("Recvd a status code of ~p. Ignoring and waiting for a proper response~n", [StatCode]),
- parse_response(Data_1, State_1#state{recvd_headers = [],
- status = get_header});
- _ when StatCode =:= "204";
- StatCode =:= "304" ->
- %% No message body is expected for these Status Codes.
- %% RFC2616 - Sec 4.4
- {_, Reqs_1} = queue:out(Reqs),
- send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
- State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
- {ok, StatCode, Headers_1, []}),
- cancel_timer(State_1_1#state.send_timer, {eat_message, {req_timedout, From}}),
- State_2 = reset_state(State_1_1),
- State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
- parse_response(Data_1, State_3);
- _ when TransferEncoding =:= "chunked" ->
- do_trace("Chunked encoding detected...~n",[]),
- send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
- case parse_11_response(Data_1, State_1#state{transfer_encoding=chunked,
- chunk_size=chunk_start,
- reply_buffer = <<>>}) of
- {error, Reason} ->
- fail_pipelined_requests(State_1,
- {error, {Reason,
- {stat_code, StatCode}, Headers_1}}),
- {error, Reason};
- State_2 ->
- State_2
- end;
- undefined when HttpVsn =:= "HTTP/1.0";
- ConnClose =:= "close" ->
- send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
- State_1#state{reply_buffer = Data_1};
- undefined ->
- fail_pipelined_requests(State_1,
- {error, {content_length_undefined,
- {stat_code, StatCode}, Headers}}),
- {error, content_length_undefined};
- V ->
- case catch list_to_integer(V) of
- V_1 when is_integer(V_1), V_1 >= 0 ->
- send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
- do_trace("Recvd Content-Length of ~p~n", [V_1]),
- State_2 = State_1#state{rep_buf_size=0,
- reply_buffer = <<>>,
- content_length=V_1},
- case parse_11_response(Data_1, State_2) of
- {error, Reason} ->
- fail_pipelined_requests(State_1,
- {error, {Reason,
- {stat_code, StatCode}, Headers_1}}),
- {error, Reason};
- State_3 ->
- State_3
- end;
- _ ->
- fail_pipelined_requests(State_1,
- {error, {content_length_undefined,
- {stat_code, StatCode}, Headers}}),
- {error, content_length_undefined}
- end
- end;
- {no, Acc_1} when MaxHeaderSize == infinity ->
- State#state{reply_buffer = Acc_1};
- {no, Acc_1} when size(Acc_1) < MaxHeaderSize ->
- State#state{reply_buffer = Acc_1};
- {no, _Acc_1} ->
- fail_pipelined_requests(State, {error, max_headers_size_exceeded}),
- {error, max_headers_size_exceeded}
- end.
-
-upgrade_to_ssl(#state{socket = Socket,
- connect_timeout = Conn_timeout,
- ssl_options = Ssl_options,
- tunnel_setup_queue = Q} = State) ->
- case ssl:connect(Socket, Ssl_options, Conn_timeout) of
- {ok, Ssl_socket} ->
- do_trace("Upgraded to SSL socket!!~n", []),
- State_1 = State#state{socket = Ssl_socket,
- proxy_tunnel_setup = done},
- send_queued_requests(lists:reverse(Q), State_1);
- Err ->
- do_trace("Upgrade to SSL socket failed. Reson: ~p~n", [Err]),
- do_error_reply(State, {error, {send_failed, Err}}),
- {error, send_failed}
- end.
-
-send_queued_requests([], State) ->
- do_trace("Sent all queued requests via SSL connection~n", []),
- State#state{tunnel_setup_queue = []};
-send_queued_requests([{From, Url, Headers, Method, Body, Options, Timeout} | Q],
- State) ->
- case send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State) of
- {noreply, State_1} ->
- send_queued_requests(Q, State_1);
- Err ->
- do_trace("Error sending queued SSL request: ~n"
- "URL : ~s~n"
- "Method : ~p~n"
- "Headers : ~p~n", [Url, Method, Headers]),
- do_error_reply(State, {error, {send_failed, Err}}),
- {error, send_failed}
- end.
-
-is_connection_closing("HTTP/0.9", _) -> true;
-is_connection_closing(_, "close") -> true;
-is_connection_closing("HTTP/1.0", "false") -> true;
-is_connection_closing(_, _) -> false.
-
-%% This clause determines the chunk size when given data from the beginning of the chunk
-parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked,
- chunk_size = chunk_start,
- chunk_size_buffer = Chunk_sz_buf
- } = State) ->
- case scan_crlf(Chunk_sz_buf, DataRecvd) of
- {yes, ChunkHeader, Data_1} ->
- State_1 = maybe_accumulate_ce_data(State, <<ChunkHeader/binary, $\r, $\n>>),
- ChunkSize = parse_chunk_header(ChunkHeader),
- %%
- %% Do we have to preserve the chunk encoding when
- %% streaming? NO. This should be transparent to the client
- %% process. Chunked encoding was only introduced to make
- %% it efficient for the server.
- %%
- RemLen = size(Data_1),
- do_trace("Determined chunk size: ~p. Already recvd: ~p~n",
- [ChunkSize, RemLen]),
- parse_11_response(Data_1, State_1#state{chunk_size_buffer = <<>>,
- deleted_crlf = true,
- recvd_chunk_size = 0,
- chunk_size = ChunkSize});
- {no, Data_1} ->
- State#state{chunk_size_buffer = Data_1}
- end;
-
-%% This clause is to remove the CRLF between two chunks
-%%
-parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked,
- chunk_size = tbd,
- chunk_size_buffer = Buf
- } = State) ->
- case scan_crlf(Buf, DataRecvd) of
- {yes, _, NextChunk} ->
- State_1 = maybe_accumulate_ce_data(State, <<$\r, $\n>>),
- State_2 = State_1#state{chunk_size = chunk_start,
- chunk_size_buffer = <<>>,
- deleted_crlf = true},
- parse_11_response(NextChunk, State_2);
- {no, Data_1} ->
- State#state{chunk_size_buffer = Data_1}
- end;
-
-%% This clause deals with the end of a chunked transfer. ibrowse does
-%% not support Trailers in the Chunked Transfer encoding. Any trailer
-%% received is silently discarded.
-parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked, chunk_size = 0,
- cur_req = CurReq,
- deleted_crlf = DelCrlf,
- chunk_size_buffer = Trailer,
- reqs = Reqs} = State) ->
- do_trace("Detected end of chunked transfer...~n", []),
- DataRecvd_1 = case DelCrlf of
- false ->
- DataRecvd;
- true ->
- <<$\r, $\n, DataRecvd/binary>>
- end,
- case scan_header(Trailer, DataRecvd_1) of
- {yes, TEHeaders, Rem} ->
- {_, Reqs_1} = queue:out(Reqs),
- State_1 = maybe_accumulate_ce_data(State, <<TEHeaders/binary, $\r, $\n>>),
- State_2 = handle_response(CurReq,
- State_1#state{reqs = Reqs_1}),
- parse_response(Rem, reset_state(State_2));
- {no, Rem} ->
- accumulate_response(<<>>, State#state{chunk_size_buffer = Rem, deleted_crlf = false})
- end;
-
-%% This clause extracts a chunk, given the size.
-parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked,
- chunk_size = CSz,
- recvd_chunk_size = Recvd_csz,
- rep_buf_size = RepBufSz} = State) ->
- NeedBytes = CSz - Recvd_csz,
- DataLen = size(DataRecvd),
- do_trace("Recvd more data: size: ~p. NeedBytes: ~p~n", [DataLen, NeedBytes]),
- case DataLen >= NeedBytes of
- true ->
- {RemChunk, RemData} = split_binary(DataRecvd, NeedBytes),
- do_trace("Recvd another chunk...~p~n", [RemChunk]),
- do_trace("RemData -> ~p~n", [RemData]),
- case accumulate_response(RemChunk, State) of
- {error, Reason} ->
- do_trace("Error accumulating response --> ~p~n", [Reason]),
- {error, Reason};
- #state{} = State_1 ->
- State_2 = State_1#state{chunk_size=tbd},
- parse_11_response(RemData, State_2)
- end;
- false ->
- accumulate_response(DataRecvd,
- State#state{rep_buf_size = RepBufSz + DataLen,
- recvd_chunk_size = Recvd_csz + DataLen})
- end;
-
-%% This clause to extract the body when Content-Length is specified
-parse_11_response(DataRecvd,
- #state{content_length=CL, rep_buf_size=RepBufSz,
- reqs=Reqs}=State) ->
- NeedBytes = CL - RepBufSz,
- DataLen = size(DataRecvd),
- case DataLen >= NeedBytes of
- true ->
- {RemBody, Rem} = split_binary(DataRecvd, NeedBytes),
- {_, Reqs_1} = queue:out(Reqs),
- State_1 = accumulate_response(RemBody, State),
- State_2 = handle_response(State_1#state.cur_req, State_1#state{reqs=Reqs_1}),
- State_3 = reset_state(State_2),
- parse_response(Rem, State_3);
- false ->
- accumulate_response(DataRecvd, State#state{rep_buf_size = (RepBufSz+DataLen)})
- end.
-
-maybe_accumulate_ce_data(#state{cur_req = #request{preserve_chunked_encoding = false}} = State, _) ->
- State;
-maybe_accumulate_ce_data(State, Data) ->
- accumulate_response(Data, State).
-
-handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format,
- save_response_to_file = SaveResponseToFile,
- tmp_file_name = TmpFilename,
- tmp_file_fd = Fd,
- options = Options
- },
- #state{http_status_code = SCode,
- status_line = Status_line,
- raw_headers = Raw_headers,
- send_timer = ReqTimer,
- reply_buffer = RepBuf,
- recvd_headers = RespHeaders}=State) when SaveResponseToFile /= false ->
- Body = RepBuf,
- case Fd of
- undefined ->
- ok;
- _ ->
- ok = file:close(Fd)
- end,
- ResponseBody = case TmpFilename of
- undefined ->
- Body;
- _ ->
- {file, TmpFilename}
- end,
- {Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(RespHeaders, Raw_headers, Options),
- Reply = case get_value(give_raw_headers, Options, false) of
- true ->
- {ok, Status_line, Raw_headers_1, ResponseBody};
- false ->
- {ok, SCode, Resp_headers_1, ResponseBody}
- end,
- State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
- cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
- set_cur_request(State_1);
-handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format,
- options = Options},
- #state{http_status_code = SCode,
- status_line = Status_line,
- raw_headers = Raw_headers,
- recvd_headers = Resp_headers,
- reply_buffer = RepBuf,
- send_timer = ReqTimer} = State) ->
- Body = RepBuf,
- {Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(Resp_headers, Raw_headers, Options),
- Reply = case get_value(give_raw_headers, Options, false) of
- true ->
- {ok, Status_line, Raw_headers_1, Body};
- false ->
- {ok, SCode, Resp_headers_1, Body}
- end,
- State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
- cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
- set_cur_request(State_1).
-
-reset_state(State) ->
- State#state{status = get_header,
- rep_buf_size = 0,
- streamed_size = 0,
- content_length = undefined,
- reply_buffer = <<>>,
- chunk_size_buffer = <<>>,
- recvd_headers = [],
- status_line = undefined,
- raw_headers = undefined,
- deleted_crlf = false,
- http_status_code = undefined,
- chunk_size = undefined,
- transfer_encoding = undefined
- }.
-
-set_cur_request(#state{reqs = Reqs, socket = Socket} = State) ->
- case queue:to_list(Reqs) of
- [] ->
- State#state{cur_req = undefined};
- [#request{caller_controls_socket = Ccs} = NextReq | _] ->
- case Ccs of
- true ->
- do_setopts(Socket, [{active, once}], State);
- _ ->
- ok
- end,
- State#state{cur_req = NextReq}
- end.
-
-parse_headers(Headers) ->
- case scan_crlf(Headers) of
- {yes, StatusLine, T} ->
- parse_headers(StatusLine, T);
- {no, StatusLine} ->
- parse_headers(StatusLine, <<>>)
- end.
-
-parse_headers(StatusLine, Headers) ->
- Headers_1 = parse_headers_1(Headers),
- case parse_status_line(StatusLine) of
- {ok, HttpVsn, StatCode, _Msg} ->
- put(http_prot_vsn, HttpVsn),
- {HttpVsn, StatCode, Headers_1, StatusLine, Headers};
- _ -> %% A HTTP 0.9 response?
- put(http_prot_vsn, "HTTP/0.9"),
- {"HTTP/0.9", undefined, Headers, StatusLine, Headers}
- end.
-
-% From RFC 2616
-%
-% HTTP/1.1 header field values can be folded onto multiple lines if
-% the continuation line begins with a space or horizontal tab. All
-% linear white space, including folding, has the same semantics as
-% SP. A recipient MAY replace any linear white space with a single
-% SP before interpreting the field value or forwarding the message
-% downstream.
-parse_headers_1(B) when is_binary(B) ->
- parse_headers_1(binary_to_list(B));
-parse_headers_1(String) ->
- parse_headers_1(String, [], []).
-
-parse_headers_1([$\n, H |T], [$\r | L], Acc) when H =:= 32;
- H =:= $\t ->
- parse_headers_1(lists:dropwhile(fun(X) ->
- is_whitespace(X)
- end, T), [32 | L], Acc);
-parse_headers_1([$\n|T], [$\r | L], Acc) ->
- case parse_header(lists:reverse(L)) of
- invalid ->
- parse_headers_1(T, [], Acc);
- NewHeader ->
- parse_headers_1(T, [], [NewHeader | Acc])
- end;
-parse_headers_1([H|T], L, Acc) ->
- parse_headers_1(T, [H|L], Acc);
-parse_headers_1([], [], Acc) ->
- lists:reverse(Acc);
-parse_headers_1([], L, Acc) ->
- Acc_1 = case parse_header(lists:reverse(L)) of
- invalid ->
- Acc;
- NewHeader ->
- [NewHeader | Acc]
- end,
- lists:reverse(Acc_1).
-
-parse_status_line(Line) when is_binary(Line) ->
- parse_status_line(binary_to_list(Line));
-parse_status_line(Line) ->
- parse_status_line(Line, get_prot_vsn, [], []).
-parse_status_line([32 | T], get_prot_vsn, ProtVsn, StatCode) ->
- parse_status_line(T, get_status_code, ProtVsn, StatCode);
-parse_status_line([32 | T], get_status_code, ProtVsn, StatCode) ->
- {ok, lists:reverse(ProtVsn), lists:reverse(StatCode), T};
-parse_status_line([], get_status_code, ProtVsn, StatCode) ->
- {ok, lists:reverse(ProtVsn), lists:reverse(StatCode), []};
-parse_status_line([H | T], get_prot_vsn, ProtVsn, StatCode) ->
- parse_status_line(T, get_prot_vsn, [H|ProtVsn], StatCode);
-parse_status_line([H | T], get_status_code, ProtVsn, StatCode) ->
- parse_status_line(T, get_status_code, ProtVsn, [H | StatCode]);
-parse_status_line([], _, _, _) ->
- http_09.
-
-parse_header(L) ->
- parse_header(L, []).
-
-parse_header([$: | V], Acc) ->
- {lists:reverse(Acc), string:strip(V)};
-parse_header([H | T], Acc) ->
- parse_header(T, [H | Acc]);
-parse_header([], _) ->
- invalid.
-
-scan_header(Bin) ->
- case get_crlf_crlf_pos(Bin, 0) of
- {yes, Pos} ->
- {Headers, <<_:4/binary, Body/binary>>} = split_binary(Bin, Pos),
- {yes, Headers, Body};
- no ->
- {no, Bin}
- end.
-
-scan_header(Bin1, Bin2) when size(Bin1) < 4 ->
- scan_header(<<Bin1/binary, Bin2/binary>>);
-scan_header(Bin1, <<>>) ->
- scan_header(Bin1);
-scan_header(Bin1, Bin2) ->
- Bin1_already_scanned_size = size(Bin1) - 4,
- <<Headers_prefix:Bin1_already_scanned_size/binary, Rest/binary>> = Bin1,
- Bin_to_scan = <<Rest/binary, Bin2/binary>>,
- case get_crlf_crlf_pos(Bin_to_scan, 0) of
- {yes, Pos} ->
- {Headers_suffix, <<_:4/binary, Body/binary>>} = split_binary(Bin_to_scan, Pos),
- {yes, <<Headers_prefix/binary, Headers_suffix/binary>>, Body};
- no ->
- {no, <<Bin1/binary, Bin2/binary>>}
- end.
-
-get_crlf_crlf_pos(<<$\r, $\n, $\r, $\n, _/binary>>, Pos) -> {yes, Pos};
-get_crlf_crlf_pos(<<_, Rest/binary>>, Pos) -> get_crlf_crlf_pos(Rest, Pos + 1);
-get_crlf_crlf_pos(<<>>, _) -> no.
-
-scan_crlf(Bin) ->
- case get_crlf_pos(Bin) of
- {yes, Pos} ->
- {Prefix, <<_, _, Suffix/binary>>} = split_binary(Bin, Pos),
- {yes, Prefix, Suffix};
- no ->
- {no, Bin}
- end.
-
-scan_crlf(<<>>, Bin2) ->
- scan_crlf(Bin2);
-scan_crlf(Bin1, Bin2) when size(Bin1) < 2 ->
- scan_crlf(<<Bin1/binary, Bin2/binary>>);
-scan_crlf(Bin1, Bin2) ->
- scan_crlf_1(size(Bin1) - 2, Bin1, Bin2).
-
-scan_crlf_1(Bin1_head_size, Bin1, Bin2) ->
- <<Bin1_head:Bin1_head_size/binary, Bin1_tail/binary>> = Bin1,
- Bin3 = <<Bin1_tail/binary, Bin2/binary>>,
- case get_crlf_pos(Bin3) of
- {yes, Pos} ->
- {Prefix, <<_, _, Suffix/binary>>} = split_binary(Bin3, Pos),
- {yes, list_to_binary([Bin1_head, Prefix]), Suffix};
- no ->
- {no, list_to_binary([Bin1, Bin2])}
- end.
-
-get_crlf_pos(Bin) ->
- get_crlf_pos(Bin, 0).
-
-get_crlf_pos(<<$\r, $\n, _/binary>>, Pos) -> {yes, Pos};
-get_crlf_pos(<<_, Rest/binary>>, Pos) -> get_crlf_pos(Rest, Pos + 1);
-get_crlf_pos(<<>>, _) -> no.
-
-fmt_val(L) when is_list(L) -> L;
-fmt_val(I) when is_integer(I) -> integer_to_list(I);
-fmt_val(A) when is_atom(A) -> atom_to_list(A);
-fmt_val(Term) -> io_lib:format("~p", [Term]).
-
-crnl() -> "\r\n".
-
-method(get) -> "GET";
-method(post) -> "POST";
-method(head) -> "HEAD";
-method(options) -> "OPTIONS";
-method(put) -> "PUT";
-method(delete) -> "DELETE";
-method(trace) -> "TRACE";
-method(mkcol) -> "MKCOL";
-method(propfind) -> "PROPFIND";
-method(proppatch) -> "PROPPATCH";
-method(lock) -> "LOCK";
-method(unlock) -> "UNLOCK";
-method(move) -> "MOVE";
-method(copy) -> "COPY";
-method(connect) -> "CONNECT".
-
-%% From RFC 2616
-%%
-% The chunked encoding modifies the body of a message in order to
-% transfer it as a series of chunks, each with its own size indicator,
-% followed by an OPTIONAL trailer containing entity-header
-% fields. This allows dynamically produced content to be transferred
-% along with the information necessary for the recipient to verify
-% that it has received the full message.
-% Chunked-Body = *chunk
-% last-chunk
-% trailer
-% CRLF
-% chunk = chunk-size [ chunk-extension ] CRLF
-% chunk-data CRLF
-% chunk-size = 1*HEX
-% last-chunk = 1*("0") [ chunk-extension ] CRLF
-% chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
-% chunk-ext-name = token
-% chunk-ext-val = token | quoted-string
-% chunk-data = chunk-size(OCTET)
-% trailer = *(entity-header CRLF)
-% The chunk-size field is a string of hex digits indicating the size
-% of the chunk. The chunked encoding is ended by any chunk whose size
-% is zero, followed by the trailer, which is terminated by an empty
-% line.
-%%
-%% The parsing implemented here discards all chunk extensions. It also
-%% strips trailing spaces from the chunk size fields as Apache 1.3.27 was
-%% sending them.
-parse_chunk_header(ChunkHeader) ->
- parse_chunk_header(ChunkHeader, []).
-
-parse_chunk_header(<<$;, _/binary>>, Acc) ->
- hexlist_to_integer(lists:reverse(Acc));
-parse_chunk_header(<<H, T/binary>>, Acc) ->
- case is_whitespace(H) of
- true ->
- parse_chunk_header(T, Acc);
- false ->
- parse_chunk_header(T, [H | Acc])
- end;
-parse_chunk_header(<<>>, Acc) ->
- hexlist_to_integer(lists:reverse(Acc)).
-
-is_whitespace($\s) -> true;
-is_whitespace($\r) -> true;
-is_whitespace($\n) -> true;
-is_whitespace($\t) -> true;
-is_whitespace(_) -> false.
-
-send_async_headers(_ReqId, undefined, _, _State) ->
- ok;
-send_async_headers(ReqId, StreamTo, Give_raw_headers,
- #state{status_line = Status_line, raw_headers = Raw_headers,
- recvd_headers = Headers, http_status_code = StatCode,
- cur_req = #request{options = Opts}
- }) ->
- {Headers_1, Raw_headers_1} = maybe_add_custom_headers(Headers, Raw_headers, Opts),
- case Give_raw_headers of
- false ->
- catch StreamTo ! {ibrowse_async_headers, ReqId, StatCode, Headers_1};
- true ->
- catch StreamTo ! {ibrowse_async_headers, ReqId, Status_line, Raw_headers_1}
- end.
-
-maybe_add_custom_headers(Headers, Raw_headers, Opts) ->
- Custom_headers = get_value(add_custom_headers, Opts, []),
- Headers_1 = Headers ++ Custom_headers,
- Raw_headers_1 = case Custom_headers of
- [_ | _] when is_binary(Raw_headers) ->
- Custom_headers_bin = list_to_binary(string:join([[X, $:, Y] || {X, Y} <- Custom_headers], "\r\n")),
- <<Raw_headers/binary, "\r\n", Custom_headers_bin/binary>>;
- _ ->
- Raw_headers
- end,
- {Headers_1, Raw_headers_1}.
-
-format_response_data(Resp_format, Body) ->
- case Resp_format of
- list when is_list(Body) ->
- flatten(Body);
- list when is_binary(Body) ->
- binary_to_list(Body);
- binary when is_list(Body) ->
- list_to_binary(Body);
- _ ->
- %% This is to cater for sending messages such as
- %% {chunk_start, _}, chunk_end etc
- Body
- end.
-
-do_reply(State, From, undefined, _, Resp_format, {ok, St_code, Headers, Body}) ->
- Msg_1 = {ok, St_code, Headers, format_response_data(Resp_format, Body)},
- gen_server:reply(From, Msg_1),
- dec_pipeline_counter(State);
-do_reply(State, From, undefined, _, _, Msg) ->
- gen_server:reply(From, Msg),
- dec_pipeline_counter(State);
-do_reply(#state{prev_req_id = Prev_req_id} = State,
- _From, StreamTo, ReqId, Resp_format, {ok, _, _, Body}) ->
- State_1 = dec_pipeline_counter(State),
- case Body of
- [] ->
- ok;
- _ ->
- Body_1 = format_response_data(Resp_format, Body),
- catch StreamTo ! {ibrowse_async_response, ReqId, Body_1}
- end,
- catch StreamTo ! {ibrowse_async_response_end, ReqId},
- %% We don't want to delete the Req-id to Pid mapping straightaway
- %% as the client may send a stream_next message just while we are
- %% sending back this ibrowse_async_response_end message. If we
- %% deleted this mapping straightaway, the caller will see a
- %% {error, unknown_req_id} when it calls ibrowse:stream_next/1. To
- %% get around this, we store the req id, and clear it after the
- %% next request. If there are wierd combinations of stream,
- %% stream_once and sync requests on the same connection, it will
- %% take a while for the req_id-pid mapping to get cleared, but it
- %% should do no harm.
- ets:delete(ibrowse_stream, {req_id_pid, Prev_req_id}),
- State_1#state{prev_req_id = ReqId};
-do_reply(State, _From, StreamTo, ReqId, Resp_format, Msg) ->
- State_1 = dec_pipeline_counter(State),
- Msg_1 = format_response_data(Resp_format, Msg),
- catch StreamTo ! {ibrowse_async_response, ReqId, Msg_1},
- State_1.
-
-do_interim_reply(undefined, _, _ReqId, _Msg) ->
- ok;
-do_interim_reply(StreamTo, Response_format, ReqId, Msg) ->
- Msg_1 = format_response_data(Response_format, Msg),
- catch StreamTo ! {ibrowse_async_response, ReqId, Msg_1}.
-
-do_error_reply(#state{reqs = Reqs, tunnel_setup_queue = Tun_q} = State, Err) ->
- ReqList = queue:to_list(Reqs),
- lists:foreach(fun(#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format}) ->
- ets:delete(ibrowse_stream, {req_id_pid, ReqId}),
- do_reply(State, From, StreamTo, ReqId, Resp_format, {error, Err})
- end, ReqList),
- lists:foreach(
- fun({From, _Url, _Headers, _Method, _Body, _Options, _Timeout}) ->
- do_reply(State, From, undefined, undefined, undefined, Err)
- end, Tun_q).
-
-fail_pipelined_requests(#state{reqs = Reqs, cur_req = CurReq} = State, Reply) ->
- {_, Reqs_1} = queue:out(Reqs),
- #request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format} = CurReq,
- State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
- do_error_reply(State_1#state{reqs = Reqs_1}, previous_request_failed).
-
-split_list_at(List, N) ->
- split_list_at(List, N, []).
-
-split_list_at([], _, Acc) ->
- {lists:reverse(Acc), []};
-split_list_at(List2, 0, List1) ->
- {lists:reverse(List1), List2};
-split_list_at([H | List2], N, List1) ->
- split_list_at(List2, N-1, [H | List1]).
-
-hexlist_to_integer(List) ->
- hexlist_to_integer(lists:reverse(List), 1, 0).
-
-hexlist_to_integer([H | T], Multiplier, Acc) ->
- hexlist_to_integer(T, Multiplier*16, Multiplier*to_ascii(H) + Acc);
-hexlist_to_integer([], _, Acc) ->
- Acc.
-
-to_ascii($A) -> 10;
-to_ascii($a) -> 10;
-to_ascii($B) -> 11;
-to_ascii($b) -> 11;
-to_ascii($C) -> 12;
-to_ascii($c) -> 12;
-to_ascii($D) -> 13;
-to_ascii($d) -> 13;
-to_ascii($E) -> 14;
-to_ascii($e) -> 14;
-to_ascii($F) -> 15;
-to_ascii($f) -> 15;
-to_ascii($1) -> 1;
-to_ascii($2) -> 2;
-to_ascii($3) -> 3;
-to_ascii($4) -> 4;
-to_ascii($5) -> 5;
-to_ascii($6) -> 6;
-to_ascii($7) -> 7;
-to_ascii($8) -> 8;
-to_ascii($9) -> 9;
-to_ascii($0) -> 0.
-
-cancel_timer(undefined) -> ok;
-cancel_timer(Ref) -> _ = erlang:cancel_timer(Ref),
- ok.
-
-cancel_timer(Ref, {eat_message, Msg}) ->
- cancel_timer(Ref),
- receive
- Msg ->
- ok
- after 0 ->
- ok
- end.
-
-make_req_id() ->
- now().
-
-to_lower(Str) ->
- to_lower(Str, []).
-to_lower([H|T], Acc) when H >= $A, H =< $Z ->
- to_lower(T, [H+32|Acc]);
-to_lower([H|T], Acc) ->
- to_lower(T, [H|Acc]);
-to_lower([], Acc) ->
- lists:reverse(Acc).
-
-shutting_down(#state{lb_ets_tid = undefined}) ->
- ok;
-shutting_down(#state{lb_ets_tid = Tid,
- cur_pipeline_size = Sz}) ->
- catch ets:delete(Tid, {Sz, self()}).
-
-inc_pipeline_counter(#state{is_closing = true} = State) ->
- State;
-inc_pipeline_counter(#state{cur_pipeline_size = Pipe_sz} = State) ->
- State#state{cur_pipeline_size = Pipe_sz + 1}.
-
-dec_pipeline_counter(#state{is_closing = true} = State) ->
- State;
-dec_pipeline_counter(#state{lb_ets_tid = undefined} = State) ->
- State;
-dec_pipeline_counter(#state{cur_pipeline_size = Pipe_sz,
- lb_ets_tid = Tid} = State) ->
- ets:delete(Tid, {Pipe_sz, self()}),
- ets:insert(Tid, {{Pipe_sz - 1, self()}, []}),
- State#state{cur_pipeline_size = Pipe_sz - 1}.
-
-flatten([H | _] = L) when is_integer(H) ->
- L;
-flatten([H | _] = L) when is_list(H) ->
- lists:flatten(L);
-flatten([]) ->
- [].
-
-get_stream_chunk_size(Options) ->
- case lists:keysearch(stream_chunk_size, 1, Options) of
- {value, {_, V}} when V > 0 ->
- V;
- _ ->
- ?DEFAULT_STREAM_CHUNK_SIZE
- end.
-
-set_inac_timer(State) ->
- cancel_timer(State#state.inactivity_timer_ref),
- set_inac_timer(State#state{inactivity_timer_ref = undefined},
- get_inac_timeout(State)).
-
-set_inac_timer(State, Timeout) when is_integer(Timeout) ->
- Ref = erlang:send_after(Timeout, self(), timeout),
- State#state{inactivity_timer_ref = Ref};
-set_inac_timer(State, _) ->
- State.
-
-get_inac_timeout(#state{cur_req = #request{options = Opts}}) ->
- get_value(inactivity_timeout, Opts, infinity);
-get_inac_timeout(#state{cur_req = undefined}) ->
- case ibrowse:get_config_value(inactivity_timeout, undefined) of
- Val when is_integer(Val) ->
- Val;
- _ ->
- case application:get_env(ibrowse, inactivity_timeout) of
- {ok, Val} when is_integer(Val), Val > 0 ->
- Val;
- _ ->
- 10000
- end
- end.
-
-trace_request(Req) ->
- case get(my_trace_flag) of
- true ->
- %%Avoid the binary operations if trace is not on...
- NReq = to_binary(Req),
- do_trace("Sending request: ~n"
- "--- Request Begin ---~n~s~n"
- "--- Request End ---~n", [NReq]);
- _ -> ok
- end.
-
-trace_request_body(Body) ->
- case get(my_trace_flag) of
- true ->
- %%Avoid the binary operations if trace is not on...
- NBody = to_binary(Body),
- case size(NBody) > 1024 of
- true ->
- ok;
- false ->
- do_trace("Sending request body: ~n"
- "--- Request Body Begin ---~n~s~n"
- "--- Request Body End ---~n", [NBody])
- end;
- false ->
- ok
- end.
-
-to_binary(X) when is_list(X) -> list_to_binary(X);
-to_binary(X) when is_binary(X) -> X.
diff --git a/1.1.x/src/ibrowse/ibrowse_lb.erl b/1.1.x/src/ibrowse/ibrowse_lb.erl
deleted file mode 100644
index 0e001d48..00000000
--- a/1.1.x/src/ibrowse/ibrowse_lb.erl
+++ /dev/null
@@ -1,235 +0,0 @@
-%%%-------------------------------------------------------------------
-%%% File : ibrowse_lb.erl
-%%% Author : chandru <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
-%%%
-%%% Created : 6 Mar 2008 by chandru <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%%-------------------------------------------------------------------
--module(ibrowse_lb).
--author(chandru).
--behaviour(gen_server).
-%%--------------------------------------------------------------------
-%% Include files
-%%--------------------------------------------------------------------
-
-%%--------------------------------------------------------------------
-%% External exports
--export([
- start_link/1,
- spawn_connection/5,
- stop/1
- ]).
-
-%% gen_server callbacks
--export([
- init/1,
- handle_call/3,
- handle_cast/2,
- handle_info/2,
- terminate/2,
- code_change/3
- ]).
-
--record(state, {parent_pid,
- ets_tid,
- host,
- port,
- max_sessions,
- max_pipeline_size,
- num_cur_sessions = 0}).
-
--include("ibrowse.hrl").
-
-%%====================================================================
-%% External functions
-%%====================================================================
-%%--------------------------------------------------------------------
-%% Function: start_link/0
-%% Description: Starts the server
-%%--------------------------------------------------------------------
-start_link(Args) ->
- gen_server:start_link(?MODULE, Args, []).
-
-%%====================================================================
-%% Server functions
-%%====================================================================
-
-%%--------------------------------------------------------------------
-%% Function: init/1
-%% Description: Initiates the server
-%% Returns: {ok, State} |
-%% {ok, State, Timeout} |
-%% ignore |
-%% {stop, Reason}
-%%--------------------------------------------------------------------
-init([Host, Port]) ->
- process_flag(trap_exit, true),
- Max_sessions = ibrowse:get_config_value({max_sessions, Host, Port}, 10),
- Max_pipe_sz = ibrowse:get_config_value({max_pipeline_size, Host, Port}, 10),
- put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
- put(ibrowse_trace_token, ["LB: ", Host, $:, integer_to_list(Port)]),
- Tid = ets:new(ibrowse_lb, [public, ordered_set]),
- {ok, #state{parent_pid = whereis(ibrowse),
- host = Host,
- port = Port,
- ets_tid = Tid,
- max_pipeline_size = Max_pipe_sz,
- max_sessions = Max_sessions}}.
-
-spawn_connection(Lb_pid, Url,
- Max_sessions,
- Max_pipeline_size,
- SSL_options)
- when is_pid(Lb_pid),
- is_record(Url, url),
- is_integer(Max_pipeline_size),
- is_integer(Max_sessions) ->
- gen_server:call(Lb_pid,
- {spawn_connection, Url, Max_sessions, Max_pipeline_size, SSL_options}).
-
-stop(Lb_pid) ->
- case catch gen_server:call(Lb_pid, stop) of
- {'EXIT', {timeout, _}} ->
- exit(Lb_pid, kill);
- ok ->
- ok
- end.
-%%--------------------------------------------------------------------
-%% Function: handle_call/3
-%% Description: Handling call messages
-%% Returns: {reply, Reply, State} |
-%% {reply, Reply, State, Timeout} |
-%% {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, Reply, State} | (terminate/2 is called)
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-% handle_call({spawn_connection, _Url, Max_sess, Max_pipe, _}, _From,
-% #state{max_sessions = Max_sess,
-% ets_tid = Tid,
-% max_pipeline_size = Max_pipe_sz,
-% num_cur_sessions = Num} = State)
-% when Num >= Max ->
-% Reply = find_best_connection(Tid),
-% {reply, sorry_dude_reuse, State};
-
-%% Update max_sessions in #state with supplied value
-handle_call({spawn_connection, _Url, Max_sess, Max_pipe, _}, _From,
- #state{num_cur_sessions = Num} = State)
- when Num >= Max_sess ->
- State_1 = maybe_create_ets(State),
- Reply = find_best_connection(State_1#state.ets_tid, Max_pipe),
- {reply, Reply, State_1#state{max_sessions = Max_sess}};
-
-handle_call({spawn_connection, Url, _Max_sess, _Max_pipe, SSL_options}, _From,
- #state{num_cur_sessions = Cur} = State) ->
- State_1 = maybe_create_ets(State),
- Tid = State_1#state.ets_tid,
- {ok, Pid} = ibrowse_http_client:start_link({Tid, Url, SSL_options}),
- ets:insert(Tid, {{1, Pid}, []}),
- {reply, {ok, Pid}, State_1#state{num_cur_sessions = Cur + 1}};
-
-handle_call(stop, _From, #state{ets_tid = undefined} = State) ->
- gen_server:reply(_From, ok),
- {stop, normal, State};
-
-handle_call(stop, _From, #state{ets_tid = Tid} = State) ->
- ets:foldl(fun({{_, Pid}, _}, Acc) ->
- ibrowse_http_client:stop(Pid),
- Acc
- end, [], Tid),
- gen_server:reply(_From, ok),
- {stop, normal, State};
-
-handle_call(Request, _From, State) ->
- Reply = {unknown_request, Request},
- {reply, Reply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_cast/2
-%% Description: Handling cast messages
-%% Returns: {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-handle_cast(_Msg, State) ->
- {noreply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: handle_info/2
-%% Description: Handling all non call/cast messages
-%% Returns: {noreply, State} |
-%% {noreply, State, Timeout} |
-%% {stop, Reason, State} (terminate/2 is called)
-%%--------------------------------------------------------------------
-handle_info({'EXIT', Parent, _Reason}, #state{parent_pid = Parent} = State) ->
- {stop, normal, State};
-
-handle_info({'EXIT', _Pid, _Reason}, #state{ets_tid = undefined} = State) ->
- {noreply, State};
-
-handle_info({'EXIT', Pid, _Reason},
- #state{num_cur_sessions = Cur,
- ets_tid = Tid} = State) ->
- ets:match_delete(Tid, {{'_', Pid}, '_'}),
- Cur_1 = Cur - 1,
- State_1 = case Cur_1 of
- 0 ->
- ets:delete(Tid),
- State#state{ets_tid = undefined};
- _ ->
- State
- end,
- {noreply, State_1#state{num_cur_sessions = Cur_1}};
-
-handle_info({trace, Bool}, #state{ets_tid = undefined} = State) ->
- put(my_trace_flag, Bool),
- {noreply, State};
-
-handle_info({trace, Bool}, #state{ets_tid = Tid} = State) ->
- ets:foldl(fun({{_, Pid}, _}, Acc) when is_pid(Pid) ->
- catch Pid ! {trace, Bool},
- Acc;
- (_, Acc) ->
- Acc
- end, undefined, Tid),
- put(my_trace_flag, Bool),
- {noreply, State};
-
-handle_info(_Info, State) ->
- {noreply, State}.
-
-%%--------------------------------------------------------------------
-%% Function: terminate/2
-%% Description: Shutdown the server
-%% Returns: any (ignored by gen_server)
-%%--------------------------------------------------------------------
-terminate(_Reason, _State) ->
- ok.
-
-%%--------------------------------------------------------------------
-%% Func: code_change/3
-%% Purpose: Convert process state when code is changed
-%% Returns: {ok, NewState}
-%%--------------------------------------------------------------------
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-%%--------------------------------------------------------------------
-%%% Internal functions
-%%--------------------------------------------------------------------
-find_best_connection(Tid, Max_pipe) ->
- case ets:first(Tid) of
- {Cur_sz, Pid} when Cur_sz < Max_pipe ->
- ets:delete(Tid, {Cur_sz, Pid}),
- ets:insert(Tid, {{Cur_sz + 1, Pid}, []}),
- {ok, Pid};
- _ ->
- {error, retry_later}
- end.
-
-maybe_create_ets(#state{ets_tid = undefined} = State) ->
- Tid = ets:new(ibrowse_lb, [public, ordered_set]),
- State#state{ets_tid = Tid};
-maybe_create_ets(State) ->
- State.
diff --git a/1.1.x/src/ibrowse/ibrowse_lib.erl b/1.1.x/src/ibrowse/ibrowse_lib.erl
deleted file mode 100644
index 3cbe3ace..00000000
--- a/1.1.x/src/ibrowse/ibrowse_lib.erl
+++ /dev/null
@@ -1,391 +0,0 @@
-%%% File : ibrowse_lib.erl
-%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
-%%% Created : 27 Feb 2004 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%% @doc Module with a few useful functions
-
--module(ibrowse_lib).
--author('chandru').
--ifdef(debug).
--compile(export_all).
--endif.
-
--include("ibrowse.hrl").
-
--export([
- get_trace_status/2,
- do_trace/2,
- do_trace/3,
- url_encode/1,
- decode_rfc822_date/1,
- status_code/1,
- encode_base64/1,
- decode_base64/1,
- get_value/2,
- get_value/3,
- parse_url/1,
- printable_date/0
- ]).
-
-get_trace_status(Host, Port) ->
- ibrowse:get_config_value({trace, Host, Port}, false).
-
-%% @doc URL-encodes a string based on RFC 1738. Returns a flat list.
-%% @spec url_encode(Str) -> UrlEncodedStr
-%% Str = string()
-%% UrlEncodedStr = string()
-url_encode(Str) when is_list(Str) ->
- url_encode_char(lists:reverse(Str), []).
-
-url_encode_char([X | T], Acc) when X >= $0, X =< $9 ->
- url_encode_char(T, [X | Acc]);
-url_encode_char([X | T], Acc) when X >= $a, X =< $z ->
- url_encode_char(T, [X | Acc]);
-url_encode_char([X | T], Acc) when X >= $A, X =< $Z ->
- url_encode_char(T, [X | Acc]);
-url_encode_char([X | T], Acc) when X == $-; X == $_; X == $. ->
- url_encode_char(T, [X | Acc]);
-url_encode_char([32 | T], Acc) ->
- url_encode_char(T, [$+ | Acc]);
-url_encode_char([X | T], Acc) ->
- url_encode_char(T, [$%, d2h(X bsr 4), d2h(X band 16#0f) | Acc]);
-url_encode_char([], Acc) ->
- Acc.
-
-d2h(N) when N<10 -> N+$0;
-d2h(N) -> N+$a-10.
-
-decode_rfc822_date(String) when is_list(String) ->
- case catch decode_rfc822_date_1(string:tokens(String, ", \t\r\n")) of
- {'EXIT', _} ->
- {error, invalid_date};
- Res ->
- Res
- end.
-
-% TODO: Have to handle the Zone
-decode_rfc822_date_1([_,DayInt,Month,Year, Time,Zone]) ->
- decode_rfc822_date_1([DayInt,Month,Year, Time,Zone]);
-decode_rfc822_date_1([Day,Month,Year, Time,_Zone]) ->
- DayI = list_to_integer(Day),
- MonthI = month_int(Month),
- YearI = list_to_integer(Year),
- TimeTup = case string:tokens(Time, ":") of
- [H,M] ->
- {list_to_integer(H),
- list_to_integer(M),
- 0};
- [H,M,S] ->
- {list_to_integer(H),
- list_to_integer(M),
- list_to_integer(S)}
- end,
- {{YearI,MonthI,DayI}, TimeTup}.
-
-month_int("Jan") -> 1;
-month_int("Feb") -> 2;
-month_int("Mar") -> 3;
-month_int("Apr") -> 4;
-month_int("May") -> 5;
-month_int("Jun") -> 6;
-month_int("Jul") -> 7;
-month_int("Aug") -> 8;
-month_int("Sep") -> 9;
-month_int("Oct") -> 10;
-month_int("Nov") -> 11;
-month_int("Dec") -> 12.
-
-%% @doc Given a status code, returns an atom describing the status code.
-%% @spec status_code(StatusCode::status_code()) -> StatusDescription
-%% status_code() = string() | integer()
-%% StatusDescription = atom()
-status_code(100) -> continue;
-status_code(101) -> switching_protocols;
-status_code(102) -> processing;
-status_code(200) -> ok;
-status_code(201) -> created;
-status_code(202) -> accepted;
-status_code(203) -> non_authoritative_information;
-status_code(204) -> no_content;
-status_code(205) -> reset_content;
-status_code(206) -> partial_content;
-status_code(207) -> multi_status;
-status_code(300) -> multiple_choices;
-status_code(301) -> moved_permanently;
-status_code(302) -> found;
-status_code(303) -> see_other;
-status_code(304) -> not_modified;
-status_code(305) -> use_proxy;
-status_code(306) -> unused;
-status_code(307) -> temporary_redirect;
-status_code(400) -> bad_request;
-status_code(401) -> unauthorized;
-status_code(402) -> payment_required;
-status_code(403) -> forbidden;
-status_code(404) -> not_found;
-status_code(405) -> method_not_allowed;
-status_code(406) -> not_acceptable;
-status_code(407) -> proxy_authentication_required;
-status_code(408) -> request_timeout;
-status_code(409) -> conflict;
-status_code(410) -> gone;
-status_code(411) -> length_required;
-status_code(412) -> precondition_failed;
-status_code(413) -> request_entity_too_large;
-status_code(414) -> request_uri_too_long;
-status_code(415) -> unsupported_media_type;
-status_code(416) -> requested_range_not_satisfiable;
-status_code(417) -> expectation_failed;
-status_code(422) -> unprocessable_entity;
-status_code(423) -> locked;
-status_code(424) -> failed_dependency;
-status_code(500) -> internal_server_error;
-status_code(501) -> not_implemented;
-status_code(502) -> bad_gateway;
-status_code(503) -> service_unavailable;
-status_code(504) -> gateway_timeout;
-status_code(505) -> http_version_not_supported;
-status_code(507) -> insufficient_storage;
-status_code(X) when is_list(X) -> status_code(list_to_integer(X));
-status_code(_) -> unknown_status_code.
-
-%% @doc Implements the base64 encoding algorithm. The output data type matches in the input data type.
-%% @spec encode_base64(In) -> Out
-%% In = string() | binary()
-%% Out = string() | binary()
-encode_base64(List) when is_list(List) ->
- binary_to_list(base64:encode(List));
-encode_base64(Bin) when is_binary(Bin) ->
- base64:encode(Bin).
-
-%% @doc Implements the base64 decoding algorithm. The output data type matches in the input data type.
-%% @spec decode_base64(In) -> Out | exit({error, invalid_input})
-%% In = string() | binary()
-%% Out = string() | binary()
-decode_base64(List) when is_list(List) ->
- binary_to_list(base64:decode(List));
-decode_base64(Bin) when is_binary(Bin) ->
- base64:decode(Bin).
-
-get_value(Tag, TVL, DefVal) ->
- case lists:keysearch(Tag, 1, TVL) of
- false ->
- DefVal;
- {value, {_, Val}} ->
- Val
- end.
-
-get_value(Tag, TVL) ->
- {value, {_, V}} = lists:keysearch(Tag,1,TVL),
- V.
-
-parse_url(Url) ->
- case parse_url(Url, get_protocol, #url{abspath=Url}, []) of
- #url{host_type = undefined, host = Host} = UrlRec ->
- case inet_parse:address(Host) of
- {ok, {_, _, _, _, _, _, _, _}} ->
- UrlRec#url{host_type = ipv6_address};
- {ok, {_, _, _, _}} ->
- UrlRec#url{host_type = ipv4_address};
- _ ->
- UrlRec#url{host_type = hostname}
- end;
- Else ->
- Else
- end.
-
-parse_url([$:, $/, $/ | _], get_protocol, Url, []) ->
- {invalid_uri_1, Url};
-parse_url([$:, $/, $/ | T], get_protocol, Url, TmpAcc) ->
- Prot = list_to_existing_atom(lists:reverse(TmpAcc)),
- parse_url(T, get_username,
- Url#url{protocol = Prot},
- []);
-parse_url([H | T], get_username, Url, TmpAcc) when H == $/;
- H == $? ->
- Path = case H of
- $/ ->
- [$/ | T];
- $? ->
- [$/, $? | T]
- end,
- %% No username/password. No port number
- Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Url#url.protocol),
- path = Path};
-parse_url([$: | T], get_username, Url, TmpAcc) ->
- %% It is possible that no username/password has been
- %% specified. But we'll continue with the assumption that there is
- %% a username/password. If we encounter a '@' later on, there is a
- %% username/password indeed. If we encounter a '/', it was
- %% actually the hostname
- parse_url(T, get_password,
- Url#url{username = lists:reverse(TmpAcc)},
- []);
-parse_url([$@ | T], get_username, Url, TmpAcc) ->
- parse_url(T, get_host,
- Url#url{username = lists:reverse(TmpAcc),
- password = ""},
- []);
-parse_url([$[ | T], get_username, Url, []) ->
- % IPv6 address literals are enclosed by square brackets:
- % http://www.ietf.org/rfc/rfc2732.txt
- parse_url(T, get_ipv6_address, Url#url{host_type = ipv6_address}, []);
-parse_url([$[ | T], get_username, _Url, TmpAcc) ->
- {error, {invalid_username_or_host, lists:reverse(TmpAcc) ++ "[" ++ T}};
-parse_url([$[ | _], get_password, _Url, []) ->
- {error, missing_password};
-parse_url([$[ | T], get_password, Url, TmpAcc) ->
- % IPv6 address literals are enclosed by square brackets:
- % http://www.ietf.org/rfc/rfc2732.txt
- parse_url(T, get_ipv6_address,
- Url#url{host_type = ipv6_address,
- password = lists:reverse(TmpAcc)},
- []);
-parse_url([$@ | T], get_password, Url, TmpAcc) ->
- parse_url(T, get_host,
- Url#url{password = lists:reverse(TmpAcc)},
- []);
-parse_url([H | T], get_password, Url, TmpAcc) when H == $/;
- H == $? ->
- %% Ok, what we thought was the username/password was the hostname
- %% and portnumber
- #url{username=User} = Url,
- Port = list_to_integer(lists:reverse(TmpAcc)),
- Path = case H of
- $/ ->
- [$/ | T];
- $? ->
- [$/, $? | T]
- end,
- Url#url{host = User,
- port = Port,
- username = undefined,
- password = undefined,
- path = Path};
-parse_url([$] | T], get_ipv6_address, #url{protocol = Prot} = Url, TmpAcc) ->
- Addr = lists:reverse(TmpAcc),
- case inet_parse:address(Addr) of
- {ok, {_, _, _, _, _, _, _, _}} ->
- Url2 = Url#url{host = Addr, port = default_port(Prot)},
- case T of
- [$: | T2] ->
- parse_url(T2, get_port, Url2, []);
- [$/ | T2] ->
- Url2#url{path = [$/ | T2]};
- [$? | T2] ->
- Url2#url{path = [$/, $? | T2]};
- [] ->
- Url2#url{path = "/"};
- _ ->
- {error, {invalid_host, "[" ++ Addr ++ "]" ++ T}}
- end;
- _ ->
- {error, {invalid_ipv6_address, Addr}}
- end;
-parse_url([$[ | T], get_host, #url{} = Url, []) ->
- parse_url(T, get_ipv6_address, Url#url{host_type = ipv6_address}, []);
-parse_url([$: | T], get_host, #url{} = Url, TmpAcc) ->
- parse_url(T, get_port,
- Url#url{host = lists:reverse(TmpAcc)},
- []);
-parse_url([H | T], get_host, #url{protocol=Prot} = Url, TmpAcc) when H == $/;
- H == $? ->
- Path = case H of
- $/ ->
- [$/ | T];
- $? ->
- [$/, $? | T]
- end,
- Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Prot),
- path = Path};
-parse_url([H | T], get_port, #url{protocol=Prot} = Url, TmpAcc) when H == $/;
- H == $? ->
- Path = case H of
- $/ ->
- [$/ | T];
- $? ->
- [$/, $? | T]
- end,
- Port = case TmpAcc of
- [] ->
- default_port(Prot);
- _ ->
- list_to_integer(lists:reverse(TmpAcc))
- end,
- Url#url{port = Port, path = Path};
-parse_url([H | T], State, Url, TmpAcc) ->
- parse_url(T, State, Url, [H | TmpAcc]);
-parse_url([], get_host, Url, TmpAcc) when TmpAcc /= [] ->
- Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Url#url.protocol),
- path = "/"};
-parse_url([], get_username, Url, TmpAcc) when TmpAcc /= [] ->
- Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Url#url.protocol),
- path = "/"};
-parse_url([], get_port, #url{protocol=Prot} = Url, TmpAcc) ->
- Port = case TmpAcc of
- [] ->
- default_port(Prot);
- _ ->
- list_to_integer(lists:reverse(TmpAcc))
- end,
- Url#url{port = Port,
- path = "/"};
-parse_url([], get_password, Url, TmpAcc) ->
- %% Ok, what we thought was the username/password was the hostname
- %% and portnumber
- #url{username=User} = Url,
- Port = case TmpAcc of
- [] ->
- default_port(Url#url.protocol);
- _ ->
- list_to_integer(lists:reverse(TmpAcc))
- end,
- Url#url{host = User,
- port = Port,
- username = undefined,
- password = undefined,
- path = "/"};
-parse_url([], State, Url, TmpAcc) ->
- {invalid_uri_2, State, Url, TmpAcc}.
-
-default_port(http) -> 80;
-default_port(https) -> 443;
-default_port(ftp) -> 21.
-
-printable_date() ->
- {{Y,Mo,D},{H, M, S}} = calendar:local_time(),
- {_,_,MicroSecs} = now(),
- [integer_to_list(Y),
- $-,
- integer_to_list(Mo),
- $-,
- integer_to_list(D),
- $_,
- integer_to_list(H),
- $:,
- integer_to_list(M),
- $:,
- integer_to_list(S),
- $:,
- integer_to_list(MicroSecs div 1000)].
-
-do_trace(Fmt, Args) ->
- do_trace(get(my_trace_flag), Fmt, Args).
-
--ifdef(DEBUG).
-do_trace(_, Fmt, Args) ->
- io:format("~s -- (~s) - "++Fmt,
- [printable_date(),
- get(ibrowse_trace_token) | Args]).
--else.
-do_trace(true, Fmt, Args) ->
- io:format("~s -- (~s) - "++Fmt,
- [printable_date(),
- get(ibrowse_trace_token) | Args]);
-do_trace(_, _, _) ->
- ok.
--endif.
diff --git a/1.1.x/src/ibrowse/ibrowse_sup.erl b/1.1.x/src/ibrowse/ibrowse_sup.erl
deleted file mode 100644
index ace33d16..00000000
--- a/1.1.x/src/ibrowse/ibrowse_sup.erl
+++ /dev/null
@@ -1,63 +0,0 @@
-%%%-------------------------------------------------------------------
-%%% File : ibrowse_sup.erl
-%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
-%%%
-%%% Created : 15 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%%-------------------------------------------------------------------
--module(ibrowse_sup).
--behaviour(supervisor).
-%%--------------------------------------------------------------------
-%% Include files
-%%--------------------------------------------------------------------
-
-%%--------------------------------------------------------------------
-%% External exports
-%%--------------------------------------------------------------------
--export([
- start_link/0
- ]).
-
-%%--------------------------------------------------------------------
-%% Internal exports
-%%--------------------------------------------------------------------
--export([
- init/1
- ]).
-
-%%--------------------------------------------------------------------
-%% Macros
-%%--------------------------------------------------------------------
--define(SERVER, ?MODULE).
-
-%%--------------------------------------------------------------------
-%% Records
-%%--------------------------------------------------------------------
-
-%%====================================================================
-%% External functions
-%%====================================================================
-%%--------------------------------------------------------------------
-%% Function: start_link/0
-%% Description: Starts the supervisor
-%%--------------------------------------------------------------------
-start_link() ->
- supervisor:start_link({local, ?SERVER}, ?MODULE, []).
-
-%%====================================================================
-%% Server functions
-%%====================================================================
-%%--------------------------------------------------------------------
-%% Func: init/1
-%% Returns: {ok, {SupFlags, [ChildSpec]}} |
-%% ignore |
-%% {error, Reason}
-%%--------------------------------------------------------------------
-init([]) ->
- AChild = {ibrowse,{ibrowse,start_link,[]},
- permanent,2000,worker,[ibrowse, ibrowse_http_client]},
- {ok,{{one_for_all,10,1}, [AChild]}}.
-
-%%====================================================================
-%% Internal functions
-%%====================================================================
diff --git a/1.1.x/src/ibrowse/ibrowse_test.erl b/1.1.x/src/ibrowse/ibrowse_test.erl
deleted file mode 100644
index ff3b5304..00000000
--- a/1.1.x/src/ibrowse/ibrowse_test.erl
+++ /dev/null
@@ -1,513 +0,0 @@
-%%% File : ibrowse_test.erl
-%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description : Test ibrowse
-%%% Created : 14 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-
--module(ibrowse_test).
--export([
- load_test/3,
- send_reqs_1/3,
- do_send_req/2,
- unit_tests/0,
- unit_tests/1,
- unit_tests_1/2,
- ue_test/0,
- ue_test/1,
- verify_chunked_streaming/0,
- verify_chunked_streaming/1,
- test_chunked_streaming_once/0,
- i_do_async_req_list/4,
- test_stream_once/3,
- test_stream_once/4,
- test_20122010/0,
- test_20122010/1
- ]).
-
-test_stream_once(Url, Method, Options) ->
- test_stream_once(Url, Method, Options, 5000).
-
-test_stream_once(Url, Method, Options, Timeout) ->
- case ibrowse:send_req(Url, [], Method, [], [{stream_to, {self(), once}} | Options], Timeout) of
- {ibrowse_req_id, Req_id} ->
- case ibrowse:stream_next(Req_id) of
- ok ->
- test_stream_once(Req_id);
- Err ->
- Err
- end;
- Err ->
- Err
- end.
-
-test_stream_once(Req_id) ->
- receive
- {ibrowse_async_headers, Req_id, StatCode, Headers} ->
- io:format("Recvd headers~n~p~n", [{ibrowse_async_headers, Req_id, StatCode, Headers}]),
- case ibrowse:stream_next(Req_id) of
- ok ->
- test_stream_once(Req_id);
- Err ->
- Err
- end;
- {ibrowse_async_response, Req_id, {error, Err}} ->
- io:format("Recvd error: ~p~n", [Err]);
- {ibrowse_async_response, Req_id, Body_1} ->
- io:format("Recvd body part: ~n~p~n", [{ibrowse_async_response, Req_id, Body_1}]),
- case ibrowse:stream_next(Req_id) of
- ok ->
- test_stream_once(Req_id);
- Err ->
- Err
- end;
- {ibrowse_async_response_end, Req_id} ->
- ok
- end.
-%% Use ibrowse:set_max_sessions/3 and ibrowse:set_max_pipeline_size/3 to
-%% tweak settings before running the load test. The defaults are 10 and 10.
-load_test(Url, NumWorkers, NumReqsPerWorker) when is_list(Url),
- is_integer(NumWorkers),
- is_integer(NumReqsPerWorker),
- NumWorkers > 0,
- NumReqsPerWorker > 0 ->
- proc_lib:spawn(?MODULE, send_reqs_1, [Url, NumWorkers, NumReqsPerWorker]).
-
-send_reqs_1(Url, NumWorkers, NumReqsPerWorker) ->
- Start_time = now(),
- ets:new(pid_table, [named_table, public]),
- ets:new(ibrowse_test_results, [named_table, public]),
- ets:new(ibrowse_errors, [named_table, public, ordered_set]),
- init_results(),
- process_flag(trap_exit, true),
- log_msg("Starting spawning of workers...~n", []),
- spawn_workers(Url, NumWorkers, NumReqsPerWorker),
- log_msg("Finished spawning workers...~n", []),
- do_wait(),
- End_time = now(),
- log_msg("All workers are done...~n", []),
- log_msg("ibrowse_test_results table: ~n~p~n", [ets:tab2list(ibrowse_test_results)]),
- log_msg("Start time: ~1000.p~n", [calendar:now_to_local_time(Start_time)]),
- log_msg("End time : ~1000.p~n", [calendar:now_to_local_time(End_time)]),
- Elapsed_time_secs = trunc(timer:now_diff(End_time, Start_time) / 1000000),
- log_msg("Elapsed : ~p~n", [Elapsed_time_secs]),
- log_msg("Reqs/sec : ~p~n", [round(trunc((NumWorkers*NumReqsPerWorker) / Elapsed_time_secs))]),
- dump_errors().
-
-init_results() ->
- ets:insert(ibrowse_test_results, {crash, 0}),
- ets:insert(ibrowse_test_results, {send_failed, 0}),
- ets:insert(ibrowse_test_results, {other_error, 0}),
- ets:insert(ibrowse_test_results, {success, 0}),
- ets:insert(ibrowse_test_results, {retry_later, 0}),
- ets:insert(ibrowse_test_results, {trid_mismatch, 0}),
- ets:insert(ibrowse_test_results, {success_no_trid, 0}),
- ets:insert(ibrowse_test_results, {failed, 0}),
- ets:insert(ibrowse_test_results, {timeout, 0}),
- ets:insert(ibrowse_test_results, {req_id, 0}).
-
-spawn_workers(_Url, 0, _) ->
- ok;
-spawn_workers(Url, NumWorkers, NumReqsPerWorker) ->
- Pid = proc_lib:spawn_link(?MODULE, do_send_req, [Url, NumReqsPerWorker]),
- ets:insert(pid_table, {Pid, []}),
- spawn_workers(Url, NumWorkers - 1, NumReqsPerWorker).
-
-do_wait() ->
- receive
- {'EXIT', _, normal} ->
- do_wait();
- {'EXIT', Pid, Reason} ->
- ets:delete(pid_table, Pid),
- ets:insert(ibrowse_errors, {Pid, Reason}),
- ets:update_counter(ibrowse_test_results, crash, 1),
- do_wait();
- Msg ->
- io:format("Recvd unknown message...~p~n", [Msg]),
- do_wait()
- after 1000 ->
- case ets:info(pid_table, size) of
- 0 ->
- done;
- _ ->
- do_wait()
- end
- end.
-
-do_send_req(Url, NumReqs) ->
- do_send_req_1(Url, NumReqs).
-
-do_send_req_1(_Url, 0) ->
- ets:delete(pid_table, self());
-do_send_req_1(Url, NumReqs) ->
- Counter = integer_to_list(ets:update_counter(ibrowse_test_results, req_id, 1)),
- case ibrowse:send_req(Url, [{"ib_req_id", Counter}], get, [], [], 10000) of
- {ok, _Status, Headers, _Body} ->
- case lists:keysearch("ib_req_id", 1, Headers) of
- {value, {_, Counter}} ->
- ets:update_counter(ibrowse_test_results, success, 1);
- {value, _} ->
- ets:update_counter(ibrowse_test_results, trid_mismatch, 1);
- false ->
- ets:update_counter(ibrowse_test_results, success_no_trid, 1)
- end;
- {error, req_timedout} ->
- ets:update_counter(ibrowse_test_results, timeout, 1);
- {error, send_failed} ->
- ets:update_counter(ibrowse_test_results, send_failed, 1);
- {error, retry_later} ->
- ets:update_counter(ibrowse_test_results, retry_later, 1);
- Err ->
- ets:insert(ibrowse_errors, {now(), Err}),
- ets:update_counter(ibrowse_test_results, other_error, 1),
- ok
- end,
- do_send_req_1(Url, NumReqs-1).
-
-dump_errors() ->
- case ets:info(ibrowse_errors, size) of
- 0 ->
- ok;
- _ ->
- {A, B, C} = now(),
- Filename = lists:flatten(
- io_lib:format("ibrowse_errors_~p_~p_~p.txt" , [A, B, C])),
- case file:open(Filename, [write, delayed_write, raw]) of
- {ok, Iod} ->
- dump_errors(ets:first(ibrowse_errors), Iod);
- Err ->
- io:format("failed to create file ~s. Reason: ~p~n", [Filename, Err]),
- ok
- end
- end.
-
-dump_errors('$end_of_table', Iod) ->
- file:close(Iod);
-dump_errors(Key, Iod) ->
- [{_, Term}] = ets:lookup(ibrowse_errors, Key),
- file:write(Iod, io_lib:format("~p~n", [Term])),
- dump_errors(ets:next(ibrowse_errors, Key), Iod).
-
-%%------------------------------------------------------------------------------
-%% Unit Tests
-%%------------------------------------------------------------------------------
--define(TEST_LIST, [{"http://intranet/messenger", get},
- {"http://www.google.co.uk", get},
- {"http://www.google.com", get},
- {"http://www.google.com", options},
- {"https://mail.google.com", get},
- {"http://www.sun.com", get},
- {"http://www.oracle.com", get},
- {"http://www.bbc.co.uk", get},
- {"http://www.bbc.co.uk", trace},
- {"http://www.bbc.co.uk", options},
- {"http://yaws.hyber.org", get},
- {"http://jigsaw.w3.org/HTTP/ChunkedScript", get},
- {"http://jigsaw.w3.org/HTTP/TE/foo.txt", get},
- {"http://jigsaw.w3.org/HTTP/TE/bar.txt", get},
- {"http://jigsaw.w3.org/HTTP/connection.html", get},
- {"http://jigsaw.w3.org/HTTP/cc.html", get},
- {"http://jigsaw.w3.org/HTTP/cc-private.html", get},
- {"http://jigsaw.w3.org/HTTP/cc-proxy-revalidate.html", get},
- {"http://jigsaw.w3.org/HTTP/cc-nocache.html", get},
- {"http://jigsaw.w3.org/HTTP/h-content-md5.html", get},
- {"http://jigsaw.w3.org/HTTP/h-retry-after.html", get},
- {"http://jigsaw.w3.org/HTTP/h-retry-after-date.html", get},
- {"http://jigsaw.w3.org/HTTP/neg", get},
- {"http://jigsaw.w3.org/HTTP/negbad", get},
- {"http://jigsaw.w3.org/HTTP/400/toolong/", get},
- {"http://jigsaw.w3.org/HTTP/300/", get},
- {"http://jigsaw.w3.org/HTTP/Basic/", get, [{basic_auth, {"guest", "guest"}}]},
- {"http://jigsaw.w3.org/HTTP/CL/", get},
- {"http://www.httpwatch.com/httpgallery/chunked/", get},
- {"https://github.com", get, [{ssl_options, [{depth, 2}]}]},
- {local_test_fun, test_20122010, []}
- ]).
-
-unit_tests() ->
- unit_tests([]).
-
-unit_tests(Options) ->
- application:start(crypto),
- application:start(public_key),
- application:start(ssl),
- (catch ibrowse_test_server:start_server(8181, tcp)),
- ibrowse:start(),
- Options_1 = Options ++ [{connect_timeout, 5000}],
- {Pid, Ref} = erlang:spawn_monitor(?MODULE, unit_tests_1, [self(), Options_1]),
- receive
- {done, Pid} ->
- ok;
- {'DOWN', Ref, _, _, Info} ->
- io:format("Test process crashed: ~p~n", [Info])
- after 60000 ->
- exit(Pid, kill),
- io:format("Timed out waiting for tests to complete~n", [])
- end.
-
-unit_tests_1(Parent, Options) ->
- lists:foreach(fun({local_test_fun, Fun_name, Args}) ->
- execute_req(local_test_fun, Fun_name, Args);
- ({Url, Method}) ->
- execute_req(Url, Method, Options);
- ({Url, Method, X_Opts}) ->
- execute_req(Url, Method, X_Opts ++ Options)
- end, ?TEST_LIST),
- Parent ! {done, self()}.
-
-verify_chunked_streaming() ->
- verify_chunked_streaming([]).
-
-verify_chunked_streaming(Options) ->
- io:format("~nVerifying that chunked streaming is working...~n", []),
- Url = "http://www.httpwatch.com/httpgallery/chunked/",
- io:format(" URL: ~s~n", [Url]),
- io:format(" Fetching data without streaming...~n", []),
- Result_without_streaming = ibrowse:send_req(
- Url, [], get, [],
- [{response_format, binary} | Options]),
- io:format(" Fetching data with streaming as list...~n", []),
- Async_response_list = do_async_req_list(
- Url, get, [{response_format, list} | Options]),
- io:format(" Fetching data with streaming as binary...~n", []),
- Async_response_bin = do_async_req_list(
- Url, get, [{response_format, binary} | Options]),
- io:format(" Fetching data with streaming as binary, {active, once}...~n", []),
- Async_response_bin_once = do_async_req_list(
- Url, get, [once, {response_format, binary} | Options]),
- Res1 = compare_responses(Result_without_streaming, Async_response_list, Async_response_bin),
- Res2 = compare_responses(Result_without_streaming, Async_response_list, Async_response_bin_once),
- case {Res1, Res2} of
- {success, success} ->
- io:format(" Chunked streaming working~n", []);
- _ ->
- ok
- end.
-
-test_chunked_streaming_once() ->
- test_chunked_streaming_once([]).
-
-test_chunked_streaming_once(Options) ->
- io:format("~nTesting chunked streaming with the {stream_to, {Pid, once}} option...~n", []),
- Url = "http://www.httpwatch.com/httpgallery/chunked/",
- io:format(" URL: ~s~n", [Url]),
- io:format(" Fetching data with streaming as binary, {active, once}...~n", []),
- case do_async_req_list(Url, get, [once, {response_format, binary} | Options]) of
- {ok, _, _, _} ->
- io:format(" Success!~n", []);
- Err ->
- io:format(" Fail: ~p~n", [Err])
- end.
-
-compare_responses({ok, St_code, _, Body}, {ok, St_code, _, Body}, {ok, St_code, _, Body}) ->
- success;
-compare_responses({ok, St_code, _, Body_1}, {ok, St_code, _, Body_2}, {ok, St_code, _, Body_3}) ->
- case Body_1 of
- Body_2 ->
- io:format("Body_1 and Body_2 match~n", []);
- Body_3 ->
- io:format("Body_1 and Body_3 match~n", []);
- _ when Body_2 == Body_3 ->
- io:format("Body_2 and Body_3 match~n", []);
- _ ->
- io:format("All three bodies are different!~n", [])
- end,
- io:format("Body_1 -> ~p~n", [Body_1]),
- io:format("Body_2 -> ~p~n", [Body_2]),
- io:format("Body_3 -> ~p~n", [Body_3]),
- fail_bodies_mismatch;
-compare_responses(R1, R2, R3) ->
- io:format("R1 -> ~p~n", [R1]),
- io:format("R2 -> ~p~n", [R2]),
- io:format("R3 -> ~p~n", [R3]),
- fail.
-
-%% do_async_req_list(Url) ->
-%% do_async_req_list(Url, get).
-
-%% do_async_req_list(Url, Method) ->
-%% do_async_req_list(Url, Method, [{stream_to, self()},
-%% {stream_chunk_size, 1000}]).
-
-do_async_req_list(Url, Method, Options) ->
- {Pid,_} = erlang:spawn_monitor(?MODULE, i_do_async_req_list,
- [self(), Url, Method,
- Options ++ [{stream_chunk_size, 1000}]]),
-%% io:format("Spawned process ~p~n", [Pid]),
- wait_for_resp(Pid).
-
-wait_for_resp(Pid) ->
- receive
- {async_result, Pid, Res} ->
- Res;
- {async_result, Other_pid, _} ->
- io:format("~p: Waiting for result from ~p: got from ~p~n", [self(), Pid, Other_pid]),
- wait_for_resp(Pid);
- {'DOWN', _, _, Pid, Reason} ->
- {'EXIT', Reason};
- {'DOWN', _, _, _, _} ->
- wait_for_resp(Pid);
- Msg ->
- io:format("Recvd unknown message: ~p~n", [Msg]),
- wait_for_resp(Pid)
- after 100000 ->
- {error, timeout}
- end.
-
-i_do_async_req_list(Parent, Url, Method, Options) ->
- Options_1 = case lists:member(once, Options) of
- true ->
- [{stream_to, {self(), once}} | (Options -- [once])];
- false ->
- [{stream_to, self()} | Options]
- end,
- Res = ibrowse:send_req(Url, [], Method, [], Options_1),
- case Res of
- {ibrowse_req_id, Req_id} ->
- Result = wait_for_async_resp(Req_id, Options, undefined, undefined, []),
- Parent ! {async_result, self(), Result};
- Err ->
- Parent ! {async_result, self(), Err}
- end.
-
-wait_for_async_resp(Req_id, Options, Acc_Stat_code, Acc_Headers, Body) ->
- receive
- {ibrowse_async_headers, Req_id, StatCode, Headers} ->
- %% io:format("Recvd headers...~n", []),
- maybe_stream_next(Req_id, Options),
- wait_for_async_resp(Req_id, Options, StatCode, Headers, Body);
- {ibrowse_async_response_end, Req_id} ->
- %% io:format("Recvd end of response.~n", []),
- Body_1 = list_to_binary(lists:reverse(Body)),
- {ok, Acc_Stat_code, Acc_Headers, Body_1};
- {ibrowse_async_response, Req_id, Data} ->
- maybe_stream_next(Req_id, Options),
- %% io:format("Recvd data...~n", []),
- wait_for_async_resp(Req_id, Options, Acc_Stat_code, Acc_Headers, [Data | Body]);
- {ibrowse_async_response, Req_id, {error, _} = Err} ->
- {ok, Acc_Stat_code, Acc_Headers, Err};
- Err ->
- {ok, Acc_Stat_code, Acc_Headers, Err}
- after 10000 ->
- {timeout, Acc_Stat_code, Acc_Headers, Body}
- end.
-
-maybe_stream_next(Req_id, Options) ->
- case lists:member(once, Options) of
- true ->
- ibrowse:stream_next(Req_id);
- false ->
- ok
- end.
-
-execute_req(local_test_fun, Method, Args) ->
- io:format(" ~-54.54w: ", [Method]),
- Result = (catch apply(?MODULE, Method, Args)),
- io:format("~p~n", [Result]);
-execute_req(Url, Method, Options) ->
- io:format("~7.7w, ~50.50s: ", [Method, Url]),
- Result = (catch ibrowse:send_req(Url, [], Method, [], Options)),
- case Result of
- {ok, SCode, _H, _B} ->
- io:format("Status code: ~p~n", [SCode]);
- Err ->
- io:format("~p~n", [Err])
- end.
-
-ue_test() ->
- ue_test(lists:duplicate(1024, $?)).
-ue_test(Data) ->
- {Time, Res} = timer:tc(ibrowse_lib, url_encode, [Data]),
- io:format("Time -> ~p~n", [Time]),
- io:format("Data Length -> ~p~n", [length(Data)]),
- io:format("Res Length -> ~p~n", [length(Res)]).
-% io:format("Result -> ~s~n", [Res]).
-
-log_msg(Fmt, Args) ->
- io:format("~s -- " ++ Fmt,
- [ibrowse_lib:printable_date() | Args]).
-
-%%------------------------------------------------------------------------------
-%%
-%%------------------------------------------------------------------------------
-
-test_20122010() ->
- test_20122010("http://localhost:8181").
-
-test_20122010(Url) ->
- {ok, Pid} = ibrowse:spawn_worker_process(Url),
- Expected_resp = <<"1-2-3-4-5-6-7-8-9-10-11-12-13-14-15-16-17-18-19-20-21-22-23-24-25-26-27-28-29-30-31-32-33-34-35-36-37-38-39-40-41-42-43-44-45-46-47-48-49-50-51-52-53-54-55-56-57-58-59-60-61-62-63-64-65-66-67-68-69-70-71-72-73-74-75-76-77-78-79-80-81-82-83-84-85-86-87-88-89-90-91-92-93-94-95-96-97-98-99-100">>,
- Test_parent = self(),
- Fun = fun() ->
- do_test_20122010(Url, Pid, Expected_resp, Test_parent)
- end,
- Pids = [erlang:spawn_monitor(Fun) || _ <- lists:seq(1,10)],
- wait_for_workers(Pids).
-
-wait_for_workers([{Pid, _Ref} | Pids]) ->
- receive
- {Pid, success} ->
- wait_for_workers(Pids)
- after 60000 ->
- test_failed
- end;
-wait_for_workers([]) ->
- success.
-
-do_test_20122010(Url, Pid, Expected_resp, Test_parent) ->
- do_test_20122010(10, Url, Pid, Expected_resp, Test_parent).
-
-do_test_20122010(0, _Url, _Pid, _Expected_resp, Test_parent) ->
- Test_parent ! {self(), success};
-do_test_20122010(Rem_count, Url, Pid, Expected_resp, Test_parent) ->
- {ibrowse_req_id, Req_id} = ibrowse:send_req_direct(
- Pid,
- Url ++ "/ibrowse_stream_once_chunk_pipeline_test",
- [], get, [],
- [{stream_to, {self(), once}},
- {inactivity_timeout, 10000},
- {include_ibrowse_req_id, true}]),
- do_trace("~p -- sent request ~1000.p~n", [self(), Req_id]),
- Req_id_str = lists:flatten(io_lib:format("~1000.p",[Req_id])),
- receive
- {ibrowse_async_headers, Req_id, "200", Headers} ->
- case lists:keysearch("x-ibrowse-request-id", 1, Headers) of
- {value, {_, Req_id_str}} ->
- ok;
- {value, {_, Req_id_1}} ->
- do_trace("~p -- Sent req-id: ~1000.p. Recvd: ~1000.p~n",
- [self(), Req_id, Req_id_1]),
- exit(req_id_mismatch)
- end
- after 5000 ->
- do_trace("~p -- response headers not received~n", [self()]),
- exit({timeout, test_failed})
- end,
- do_trace("~p -- response headers received~n", [self()]),
- ok = ibrowse:stream_next(Req_id),
- case do_test_20122010_1(Expected_resp, Req_id, []) of
- true ->
- do_test_20122010(Rem_count - 1, Url, Pid, Expected_resp, Test_parent);
- false ->
- Test_parent ! {self(), failed}
- end.
-
-do_test_20122010_1(Expected_resp, Req_id, Acc) ->
- receive
- {ibrowse_async_response, Req_id, Body_part} ->
- ok = ibrowse:stream_next(Req_id),
- do_test_20122010_1(Expected_resp, Req_id, [Body_part | Acc]);
- {ibrowse_async_response_end, Req_id} ->
- Acc_1 = list_to_binary(lists:reverse(Acc)),
- Result = Acc_1 == Expected_resp,
- do_trace("~p -- End of response. Result: ~p~n", [self(), Result]),
- Result
- after 1000 ->
- exit({timeout, test_failed})
- end.
-
-do_trace(Fmt, Args) ->
- do_trace(get(my_trace_flag), Fmt, Args).
-
-do_trace(true, Fmt, Args) ->
- io:format("~s -- " ++ Fmt, [ibrowse_lib:printable_date() | Args]);
-do_trace(_, _, _) ->
- ok.
diff --git a/1.1.x/src/mochiweb/Makefile.am b/1.1.x/src/mochiweb/Makefile.am
deleted file mode 100644
index 752118df..00000000
--- a/1.1.x/src/mochiweb/Makefile.am
+++ /dev/null
@@ -1,102 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-## http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-mochiwebebindir = $(localerlanglibdir)/mochiweb-7c2bc2/ebin
-
-mochiweb_file_collection = \
- mochifmt.erl \
- mochifmt_records.erl \
- mochifmt_std.erl \
- mochiglobal.erl \
- mochihex.erl \
- mochijson.erl \
- mochijson2.erl \
- mochilists.erl \
- mochilogfile2.erl \
- mochinum.erl \
- mochitemp.erl \
- mochiutf8.erl \
- mochiweb.app.in \
- mochiweb.erl \
- mochiweb_acceptor.erl \
- mochiweb_app.erl \
- mochiweb_charref.erl \
- mochiweb_cookies.erl \
- mochiweb_cover.erl \
- mochiweb_echo.erl \
- mochiweb_headers.erl \
- mochiweb_html.erl \
- mochiweb_http.erl \
- mochiweb_io.erl \
- mochiweb_mime.erl \
- mochiweb_multipart.erl \
- mochiweb_request.erl \
- mochiweb_response.erl \
- mochiweb_skel.erl \
- mochiweb_socket.erl \
- mochiweb_socket_server.erl \
- mochiweb_sup.erl \
- mochiweb_util.erl \
- reloader.erl
-
-mochiwebebin_make_generated_file_list = \
- mochifmt.beam \
- mochifmt_records.beam \
- mochifmt_std.beam \
- mochiglobal.beam \
- mochihex.beam \
- mochijson.beam \
- mochijson2.beam \
- mochilists.beam \
- mochilogfile2.beam \
- mochinum.beam \
- mochitemp.beam \
- mochiutf8.beam \
- mochiweb.app \
- mochiweb.beam \
- mochiweb_acceptor.beam \
- mochiweb_app.beam \
- mochiweb_charref.beam \
- mochiweb_cookies.beam \
- mochiweb_cover.beam \
- mochiweb_echo.beam \
- mochiweb_headers.beam \
- mochiweb_html.beam \
- mochiweb_http.beam \
- mochiweb_io.beam \
- mochiweb_mime.beam \
- mochiweb_multipart.beam \
- mochiweb_request.beam \
- mochiweb_response.beam \
- mochiweb_skel.beam \
- mochiweb_socket.beam \
- mochiweb_socket_server.beam \
- mochiweb_sup.beam \
- mochiweb_util.beam \
- reloader.beam
-
-mochiwebebin_DATA = \
- $(mochiwebebin_make_generated_file_list)
-
-EXTRA_DIST = \
- $(mochiweb_file_collection) \
- internal.hrl
-
-CLEANFILES = \
- $(mochiwebebin_make_generated_file_list)
-
-%.app: %.app.in
- cp $< $@
-
-%.beam: %.erl
-
- $(ERLC) $(ERLC_FLAGS) $<
diff --git a/1.1.x/src/mochiweb/internal.hrl b/1.1.x/src/mochiweb/internal.hrl
deleted file mode 100644
index 6db899a0..00000000
--- a/1.1.x/src/mochiweb/internal.hrl
+++ /dev/null
@@ -1,3 +0,0 @@
-
--define(RECBUF_SIZE, 8192).
-
diff --git a/1.1.x/src/mochiweb/mochifmt.erl b/1.1.x/src/mochiweb/mochifmt.erl
deleted file mode 100644
index 5bc6b9c4..00000000
--- a/1.1.x/src/mochiweb/mochifmt.erl
+++ /dev/null
@@ -1,425 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2008 Mochi Media, Inc.
-
-%% @doc String Formatting for Erlang, inspired by Python 2.6
-%% (<a href="http://www.python.org/dev/peps/pep-3101/">PEP 3101</a>).
-%%
--module(mochifmt).
--author('bob@mochimedia.com').
--export([format/2, format_field/2, convert_field/2, get_value/2, get_field/2]).
--export([tokenize/1, format/3, get_field/3, format_field/3]).
--export([bformat/2, bformat/3]).
--export([f/2, f/3]).
-
--record(conversion, {length, precision, ctype, align, fill_char, sign}).
-
-%% @spec tokenize(S::string()) -> tokens()
-%% @doc Tokenize a format string into mochifmt's internal format.
-tokenize(S) ->
- {?MODULE, tokenize(S, "", [])}.
-
-%% @spec convert_field(Arg, Conversion::conversion()) -> term()
-%% @doc Process Arg according to the given explicit conversion specifier.
-convert_field(Arg, "") ->
- Arg;
-convert_field(Arg, "r") ->
- repr(Arg);
-convert_field(Arg, "s") ->
- str(Arg).
-
-%% @spec get_value(Key::string(), Args::args()) -> term()
-%% @doc Get the Key from Args. If Args is a tuple then convert Key to
-%% an integer and get element(1 + Key, Args). If Args is a list and Key
-%% can be parsed as an integer then use lists:nth(1 + Key, Args),
-%% otherwise try and look for Key in Args as a proplist, converting
-%% Key to an atom or binary if necessary.
-get_value(Key, Args) when is_tuple(Args) ->
- element(1 + list_to_integer(Key), Args);
-get_value(Key, Args) when is_list(Args) ->
- try lists:nth(1 + list_to_integer(Key), Args)
- catch error:_ ->
- {_K, V} = proplist_lookup(Key, Args),
- V
- end.
-
-%% @spec get_field(Key::string(), Args) -> term()
-%% @doc Consecutively call get_value/2 on parts of Key delimited by ".",
-%% replacing Args with the result of the previous get_value. This
-%% is used to implement formats such as {0.0}.
-get_field(Key, Args) ->
- get_field(Key, Args, ?MODULE).
-
-%% @spec get_field(Key::string(), Args, Module) -> term()
-%% @doc Consecutively call Module:get_value/2 on parts of Key delimited by ".",
-%% replacing Args with the result of the previous get_value. This
-%% is used to implement formats such as {0.0}.
-get_field(Key, Args, Module) ->
- {Name, Next} = lists:splitwith(fun (C) -> C =/= $. end, Key),
- Res = try Module:get_value(Name, Args)
- catch error:undef -> get_value(Name, Args) end,
- case Next of
- "" ->
- Res;
- "." ++ S1 ->
- get_field(S1, Res, Module)
- end.
-
-%% @spec format(Format::string(), Args) -> iolist()
-%% @doc Format Args with Format.
-format(Format, Args) ->
- format(Format, Args, ?MODULE).
-
-%% @spec format(Format::string(), Args, Module) -> iolist()
-%% @doc Format Args with Format using Module.
-format({?MODULE, Parts}, Args, Module) ->
- format2(Parts, Args, Module, []);
-format(S, Args, Module) ->
- format(tokenize(S), Args, Module).
-
-%% @spec format_field(Arg, Format) -> iolist()
-%% @doc Format Arg with Format.
-format_field(Arg, Format) ->
- format_field(Arg, Format, ?MODULE).
-
-%% @spec format_field(Arg, Format, _Module) -> iolist()
-%% @doc Format Arg with Format.
-format_field(Arg, Format, _Module) ->
- F = default_ctype(Arg, parse_std_conversion(Format)),
- fix_padding(fix_sign(convert2(Arg, F), F), F).
-
-%% @spec f(Format::string(), Args) -> string()
-%% @doc Format Args with Format and return a string().
-f(Format, Args) ->
- f(Format, Args, ?MODULE).
-
-%% @spec f(Format::string(), Args, Module) -> string()
-%% @doc Format Args with Format using Module and return a string().
-f(Format, Args, Module) ->
- case lists:member(${, Format) of
- true ->
- binary_to_list(bformat(Format, Args, Module));
- false ->
- Format
- end.
-
-%% @spec bformat(Format::string(), Args) -> binary()
-%% @doc Format Args with Format and return a binary().
-bformat(Format, Args) ->
- iolist_to_binary(format(Format, Args)).
-
-%% @spec bformat(Format::string(), Args, Module) -> binary()
-%% @doc Format Args with Format using Module and return a binary().
-bformat(Format, Args, Module) ->
- iolist_to_binary(format(Format, Args, Module)).
-
-%% Internal API
-
-add_raw("", Acc) ->
- Acc;
-add_raw(S, Acc) ->
- [{raw, lists:reverse(S)} | Acc].
-
-tokenize([], S, Acc) ->
- lists:reverse(add_raw(S, Acc));
-tokenize("{{" ++ Rest, S, Acc) ->
- tokenize(Rest, "{" ++ S, Acc);
-tokenize("{" ++ Rest, S, Acc) ->
- {Format, Rest1} = tokenize_format(Rest),
- tokenize(Rest1, "", [{format, make_format(Format)} | add_raw(S, Acc)]);
-tokenize("}}" ++ Rest, S, Acc) ->
- tokenize(Rest, "}" ++ S, Acc);
-tokenize([C | Rest], S, Acc) ->
- tokenize(Rest, [C | S], Acc).
-
-tokenize_format(S) ->
- tokenize_format(S, 1, []).
-
-tokenize_format("}" ++ Rest, 1, Acc) ->
- {lists:reverse(Acc), Rest};
-tokenize_format("}" ++ Rest, N, Acc) ->
- tokenize_format(Rest, N - 1, "}" ++ Acc);
-tokenize_format("{" ++ Rest, N, Acc) ->
- tokenize_format(Rest, 1 + N, "{" ++ Acc);
-tokenize_format([C | Rest], N, Acc) ->
- tokenize_format(Rest, N, [C | Acc]).
-
-make_format(S) ->
- {Name0, Spec} = case lists:splitwith(fun (C) -> C =/= $: end, S) of
- {_, ""} ->
- {S, ""};
- {SN, ":" ++ SS} ->
- {SN, SS}
- end,
- {Name, Transform} = case lists:splitwith(fun (C) -> C =/= $! end, Name0) of
- {_, ""} ->
- {Name0, ""};
- {TN, "!" ++ TT} ->
- {TN, TT}
- end,
- {Name, Transform, Spec}.
-
-proplist_lookup(S, P) ->
- A = try list_to_existing_atom(S)
- catch error:_ -> make_ref() end,
- B = try list_to_binary(S)
- catch error:_ -> make_ref() end,
- proplist_lookup2({S, A, B}, P).
-
-proplist_lookup2({KS, KA, KB}, [{K, V} | _])
- when KS =:= K orelse KA =:= K orelse KB =:= K ->
- {K, V};
-proplist_lookup2(Keys, [_ | Rest]) ->
- proplist_lookup2(Keys, Rest).
-
-format2([], _Args, _Module, Acc) ->
- lists:reverse(Acc);
-format2([{raw, S} | Rest], Args, Module, Acc) ->
- format2(Rest, Args, Module, [S | Acc]);
-format2([{format, {Key, Convert, Format0}} | Rest], Args, Module, Acc) ->
- Format = f(Format0, Args, Module),
- V = case Module of
- ?MODULE ->
- V0 = get_field(Key, Args),
- V1 = convert_field(V0, Convert),
- format_field(V1, Format);
- _ ->
- V0 = try Module:get_field(Key, Args)
- catch error:undef -> get_field(Key, Args, Module) end,
- V1 = try Module:convert_field(V0, Convert)
- catch error:undef -> convert_field(V0, Convert) end,
- try Module:format_field(V1, Format)
- catch error:undef -> format_field(V1, Format, Module) end
- end,
- format2(Rest, Args, Module, [V | Acc]).
-
-default_ctype(_Arg, C=#conversion{ctype=N}) when N =/= undefined ->
- C;
-default_ctype(Arg, C) when is_integer(Arg) ->
- C#conversion{ctype=decimal};
-default_ctype(Arg, C) when is_float(Arg) ->
- C#conversion{ctype=general};
-default_ctype(_Arg, C) ->
- C#conversion{ctype=string}.
-
-fix_padding(Arg, #conversion{length=undefined}) ->
- Arg;
-fix_padding(Arg, F=#conversion{length=Length, fill_char=Fill0, align=Align0,
- ctype=Type}) ->
- Padding = Length - iolist_size(Arg),
- Fill = case Fill0 of
- undefined ->
- $\s;
- _ ->
- Fill0
- end,
- Align = case Align0 of
- undefined ->
- case Type of
- string ->
- left;
- _ ->
- right
- end;
- _ ->
- Align0
- end,
- case Padding > 0 of
- true ->
- do_padding(Arg, Padding, Fill, Align, F);
- false ->
- Arg
- end.
-
-do_padding(Arg, Padding, Fill, right, _F) ->
- [lists:duplicate(Padding, Fill), Arg];
-do_padding(Arg, Padding, Fill, center, _F) ->
- LPadding = lists:duplicate(Padding div 2, Fill),
- RPadding = case Padding band 1 of
- 1 ->
- [Fill | LPadding];
- _ ->
- LPadding
- end,
- [LPadding, Arg, RPadding];
-do_padding([$- | Arg], Padding, Fill, sign_right, _F) ->
- [[$- | lists:duplicate(Padding, Fill)], Arg];
-do_padding(Arg, Padding, Fill, sign_right, #conversion{sign=$-}) ->
- [lists:duplicate(Padding, Fill), Arg];
-do_padding([S | Arg], Padding, Fill, sign_right, #conversion{sign=S}) ->
- [[S | lists:duplicate(Padding, Fill)], Arg];
-do_padding(Arg, Padding, Fill, sign_right, #conversion{sign=undefined}) ->
- [lists:duplicate(Padding, Fill), Arg];
-do_padding(Arg, Padding, Fill, left, _F) ->
- [Arg | lists:duplicate(Padding, Fill)].
-
-fix_sign(Arg, #conversion{sign=$+}) when Arg >= 0 ->
- [$+, Arg];
-fix_sign(Arg, #conversion{sign=$\s}) when Arg >= 0 ->
- [$\s, Arg];
-fix_sign(Arg, _F) ->
- Arg.
-
-ctype($\%) -> percent;
-ctype($s) -> string;
-ctype($b) -> bin;
-ctype($o) -> oct;
-ctype($X) -> upper_hex;
-ctype($x) -> hex;
-ctype($c) -> char;
-ctype($d) -> decimal;
-ctype($g) -> general;
-ctype($f) -> fixed;
-ctype($e) -> exp.
-
-align($<) -> left;
-align($>) -> right;
-align($^) -> center;
-align($=) -> sign_right.
-
-convert2(Arg, F=#conversion{ctype=percent}) ->
- [convert2(100.0 * Arg, F#conversion{ctype=fixed}), $\%];
-convert2(Arg, #conversion{ctype=string}) ->
- str(Arg);
-convert2(Arg, #conversion{ctype=bin}) ->
- erlang:integer_to_list(Arg, 2);
-convert2(Arg, #conversion{ctype=oct}) ->
- erlang:integer_to_list(Arg, 8);
-convert2(Arg, #conversion{ctype=upper_hex}) ->
- erlang:integer_to_list(Arg, 16);
-convert2(Arg, #conversion{ctype=hex}) ->
- string:to_lower(erlang:integer_to_list(Arg, 16));
-convert2(Arg, #conversion{ctype=char}) when Arg < 16#80 ->
- [Arg];
-convert2(Arg, #conversion{ctype=char}) ->
- xmerl_ucs:to_utf8(Arg);
-convert2(Arg, #conversion{ctype=decimal}) ->
- integer_to_list(Arg);
-convert2(Arg, #conversion{ctype=general, precision=undefined}) ->
- try mochinum:digits(Arg)
- catch error:undef -> io_lib:format("~g", [Arg]) end;
-convert2(Arg, #conversion{ctype=fixed, precision=undefined}) ->
- io_lib:format("~f", [Arg]);
-convert2(Arg, #conversion{ctype=exp, precision=undefined}) ->
- io_lib:format("~e", [Arg]);
-convert2(Arg, #conversion{ctype=general, precision=P}) ->
- io_lib:format("~." ++ integer_to_list(P) ++ "g", [Arg]);
-convert2(Arg, #conversion{ctype=fixed, precision=P}) ->
- io_lib:format("~." ++ integer_to_list(P) ++ "f", [Arg]);
-convert2(Arg, #conversion{ctype=exp, precision=P}) ->
- io_lib:format("~." ++ integer_to_list(P) ++ "e", [Arg]).
-
-str(A) when is_atom(A) ->
- atom_to_list(A);
-str(I) when is_integer(I) ->
- integer_to_list(I);
-str(F) when is_float(F) ->
- try mochinum:digits(F)
- catch error:undef -> io_lib:format("~g", [F]) end;
-str(L) when is_list(L) ->
- L;
-str(B) when is_binary(B) ->
- B;
-str(P) ->
- repr(P).
-
-repr(P) when is_float(P) ->
- try mochinum:digits(P)
- catch error:undef -> float_to_list(P) end;
-repr(P) ->
- io_lib:format("~p", [P]).
-
-parse_std_conversion(S) ->
- parse_std_conversion(S, #conversion{}).
-
-parse_std_conversion("", Acc) ->
- Acc;
-parse_std_conversion([Fill, Align | Spec], Acc)
- when Align =:= $< orelse Align =:= $> orelse Align =:= $= orelse Align =:= $^ ->
- parse_std_conversion(Spec, Acc#conversion{fill_char=Fill,
- align=align(Align)});
-parse_std_conversion([Align | Spec], Acc)
- when Align =:= $< orelse Align =:= $> orelse Align =:= $= orelse Align =:= $^ ->
- parse_std_conversion(Spec, Acc#conversion{align=align(Align)});
-parse_std_conversion([Sign | Spec], Acc)
- when Sign =:= $+ orelse Sign =:= $- orelse Sign =:= $\s ->
- parse_std_conversion(Spec, Acc#conversion{sign=Sign});
-parse_std_conversion("0" ++ Spec, Acc) ->
- Align = case Acc#conversion.align of
- undefined ->
- sign_right;
- A ->
- A
- end,
- parse_std_conversion(Spec, Acc#conversion{fill_char=$0, align=Align});
-parse_std_conversion(Spec=[D|_], Acc) when D >= $0 andalso D =< $9 ->
- {W, Spec1} = lists:splitwith(fun (C) -> C >= $0 andalso C =< $9 end, Spec),
- parse_std_conversion(Spec1, Acc#conversion{length=list_to_integer(W)});
-parse_std_conversion([$. | Spec], Acc) ->
- case lists:splitwith(fun (C) -> C >= $0 andalso C =< $9 end, Spec) of
- {"", Spec1} ->
- parse_std_conversion(Spec1, Acc);
- {P, Spec1} ->
- parse_std_conversion(Spec1,
- Acc#conversion{precision=list_to_integer(P)})
- end;
-parse_std_conversion([Type], Acc) ->
- parse_std_conversion("", Acc#conversion{ctype=ctype(Type)}).
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-tokenize_test() ->
- {?MODULE, [{raw, "ABC"}]} = tokenize("ABC"),
- {?MODULE, [{format, {"0", "", ""}}]} = tokenize("{0}"),
- {?MODULE, [{raw, "ABC"}, {format, {"1", "", ""}}, {raw, "DEF"}]} =
- tokenize("ABC{1}DEF"),
- ok.
-
-format_test() ->
- <<" -4">> = bformat("{0:4}", [-4]),
- <<" 4">> = bformat("{0:4}", [4]),
- <<" 4">> = bformat("{0:{0}}", [4]),
- <<"4 ">> = bformat("{0:4}", ["4"]),
- <<"4 ">> = bformat("{0:{0}}", ["4"]),
- <<"1.2yoDEF">> = bformat("{2}{0}{1}{3}", {yo, "DE", 1.2, <<"F">>}),
- <<"cafebabe">> = bformat("{0:x}", {16#cafebabe}),
- <<"CAFEBABE">> = bformat("{0:X}", {16#cafebabe}),
- <<"CAFEBABE">> = bformat("{0:X}", {16#cafebabe}),
- <<"755">> = bformat("{0:o}", {8#755}),
- <<"a">> = bformat("{0:c}", {97}),
- %% Horizontal ellipsis
- <<226, 128, 166>> = bformat("{0:c}", {16#2026}),
- <<"11">> = bformat("{0:b}", {3}),
- <<"11">> = bformat("{0:b}", [3]),
- <<"11">> = bformat("{three:b}", [{three, 3}]),
- <<"11">> = bformat("{three:b}", [{"three", 3}]),
- <<"11">> = bformat("{three:b}", [{<<"three">>, 3}]),
- <<"\"foo\"">> = bformat("{0!r}", {"foo"}),
- <<"2008-5-4">> = bformat("{0.0}-{0.1}-{0.2}", {{2008,5,4}}),
- <<"2008-05-04">> = bformat("{0.0:04}-{0.1:02}-{0.2:02}", {{2008,5,4}}),
- <<"foo6bar-6">> = bformat("foo{1}{0}-{1}", {bar, 6}),
- <<"-'atom test'-">> = bformat("-{arg!r}-", [{arg, 'atom test'}]),
- <<"2008-05-04">> = bformat("{0.0:0{1.0}}-{0.1:0{1.1}}-{0.2:0{1.2}}",
- {{2008,5,4}, {4, 2, 2}}),
- ok.
-
-std_test() ->
- M = mochifmt_std:new(),
- <<"01">> = bformat("{0}{1}", [0, 1], M),
- ok.
-
-records_test() ->
- M = mochifmt_records:new([{conversion, record_info(fields, conversion)}]),
- R = #conversion{length=long, precision=hard, sign=peace},
- long = M:get_value("length", R),
- hard = M:get_value("precision", R),
- peace = M:get_value("sign", R),
- <<"long hard">> = bformat("{length} {precision}", R, M),
- <<"long hard">> = bformat("{0.length} {0.precision}", [R], M),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochifmt_records.erl b/1.1.x/src/mochiweb/mochifmt_records.erl
deleted file mode 100644
index 2326d1dd..00000000
--- a/1.1.x/src/mochiweb/mochifmt_records.erl
+++ /dev/null
@@ -1,38 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2008 Mochi Media, Inc.
-
-%% @doc Formatter that understands records.
-%%
-%% Usage:
-%%
-%% 1> M = mochifmt_records:new([{rec, record_info(fields, rec)}]),
-%% M:format("{0.bar}", [#rec{bar=foo}]).
-%% foo
-
--module(mochifmt_records, [Recs]).
--author('bob@mochimedia.com').
--export([get_value/2]).
-
-get_value(Key, Rec) when is_tuple(Rec) and is_atom(element(1, Rec)) ->
- try begin
- Atom = list_to_existing_atom(Key),
- {_, Fields} = proplists:lookup(element(1, Rec), Recs),
- element(get_rec_index(Atom, Fields, 2), Rec)
- end
- catch error:_ -> mochifmt:get_value(Key, Rec)
- end;
-get_value(Key, Args) ->
- mochifmt:get_value(Key, Args).
-
-get_rec_index(Atom, [Atom | _], Index) ->
- Index;
-get_rec_index(Atom, [_ | Rest], Index) ->
- get_rec_index(Atom, Rest, 1 + Index).
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochifmt_std.erl b/1.1.x/src/mochiweb/mochifmt_std.erl
deleted file mode 100644
index d4d74f6f..00000000
--- a/1.1.x/src/mochiweb/mochifmt_std.erl
+++ /dev/null
@@ -1,30 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2008 Mochi Media, Inc.
-
-%% @doc Template module for a mochifmt formatter.
-
--module(mochifmt_std, []).
--author('bob@mochimedia.com').
--export([format/2, get_value/2, format_field/2, get_field/2, convert_field/2]).
-
-format(Format, Args) ->
- mochifmt:format(Format, Args, THIS).
-
-get_field(Key, Args) ->
- mochifmt:get_field(Key, Args, THIS).
-
-convert_field(Key, Args) ->
- mochifmt:convert_field(Key, Args).
-
-get_value(Key, Args) ->
- mochifmt:get_value(Key, Args).
-
-format_field(Arg, Format) ->
- mochifmt:format_field(Arg, Format, THIS).
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiglobal.erl b/1.1.x/src/mochiweb/mochiglobal.erl
deleted file mode 100644
index c740b878..00000000
--- a/1.1.x/src/mochiweb/mochiglobal.erl
+++ /dev/null
@@ -1,107 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2010 Mochi Media, Inc.
-%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6)
-%% <a href="http://www.erlang.org/pipermail/erlang-questions/2009-March/042503.html">[1]</a>.
--module(mochiglobal).
--author("Bob Ippolito <bob@mochimedia.com>").
--export([get/1, get/2, put/2, delete/1]).
-
--spec get(atom()) -> any() | undefined.
-%% @equiv get(K, undefined)
-get(K) ->
- get(K, undefined).
-
--spec get(atom(), T) -> any() | T.
-%% @doc Get the term for K or return Default.
-get(K, Default) ->
- get(K, Default, key_to_module(K)).
-
-get(_K, Default, Mod) ->
- try Mod:term()
- catch error:undef ->
- Default
- end.
-
--spec put(atom(), any()) -> ok.
-%% @doc Store term V at K, replaces an existing term if present.
-put(K, V) ->
- put(K, V, key_to_module(K)).
-
-put(_K, V, Mod) ->
- Bin = compile(Mod, V),
- code:purge(Mod),
- code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin),
- ok.
-
--spec delete(atom()) -> boolean().
-%% @doc Delete term stored at K, no-op if non-existent.
-delete(K) ->
- delete(K, key_to_module(K)).
-
-delete(_K, Mod) ->
- code:purge(Mod),
- code:delete(Mod).
-
--spec key_to_module(atom()) -> atom().
-key_to_module(K) ->
- list_to_atom("mochiglobal:" ++ atom_to_list(K)).
-
--spec compile(atom(), any()) -> binary().
-compile(Module, T) ->
- {ok, Module, Bin} = compile:forms(forms(Module, T),
- [verbose, report_errors]),
- Bin.
-
--spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
-forms(Module, T) ->
- [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)].
-
--spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
-term_to_abstract(Module, Getter, T) ->
- [%% -module(Module).
- erl_syntax:attribute(
- erl_syntax:atom(module),
- [erl_syntax:atom(Module)]),
- %% -export([Getter/0]).
- erl_syntax:attribute(
- erl_syntax:atom(export),
- [erl_syntax:list(
- [erl_syntax:arity_qualifier(
- erl_syntax:atom(Getter),
- erl_syntax:integer(0))])]),
- %% Getter() -> T.
- erl_syntax:function(
- erl_syntax:atom(Getter),
- [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])].
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-get_put_delete_test() ->
- K = '$$test$$mochiglobal',
- delete(K),
- ?assertEqual(
- bar,
- get(K, bar)),
- try
- ?MODULE:put(K, baz),
- ?assertEqual(
- baz,
- get(K, bar)),
- ?MODULE:put(K, wibble),
- ?assertEqual(
- wibble,
- ?MODULE:get(K))
- after
- delete(K)
- end,
- ?assertEqual(
- bar,
- get(K, bar)),
- ?assertEqual(
- undefined,
- ?MODULE:get(K)),
- ok.
--endif.
diff --git a/1.1.x/src/mochiweb/mochihex.erl b/1.1.x/src/mochiweb/mochihex.erl
deleted file mode 100644
index 44a2aa7f..00000000
--- a/1.1.x/src/mochiweb/mochihex.erl
+++ /dev/null
@@ -1,91 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2006 Mochi Media, Inc.
-
-%% @doc Utilities for working with hexadecimal strings.
-
--module(mochihex).
--author('bob@mochimedia.com').
-
--export([to_hex/1, to_bin/1, to_int/1, dehex/1, hexdigit/1]).
-
-%% @type iolist() = [char() | binary() | iolist()]
-%% @type iodata() = iolist() | binary()
-
-%% @spec to_hex(integer | iolist()) -> string()
-%% @doc Convert an iolist to a hexadecimal string.
-to_hex(0) ->
- "0";
-to_hex(I) when is_integer(I), I > 0 ->
- to_hex_int(I, []);
-to_hex(B) ->
- to_hex(iolist_to_binary(B), []).
-
-%% @spec to_bin(string()) -> binary()
-%% @doc Convert a hexadecimal string to a binary.
-to_bin(L) ->
- to_bin(L, []).
-
-%% @spec to_int(string()) -> integer()
-%% @doc Convert a hexadecimal string to an integer.
-to_int(L) ->
- erlang:list_to_integer(L, 16).
-
-%% @spec dehex(char()) -> integer()
-%% @doc Convert a hex digit to its integer value.
-dehex(C) when C >= $0, C =< $9 ->
- C - $0;
-dehex(C) when C >= $a, C =< $f ->
- C - $a + 10;
-dehex(C) when C >= $A, C =< $F ->
- C - $A + 10.
-
-%% @spec hexdigit(integer()) -> char()
-%% @doc Convert an integer less than 16 to a hex digit.
-hexdigit(C) when C >= 0, C =< 9 ->
- C + $0;
-hexdigit(C) when C =< 15 ->
- C + $a - 10.
-
-%% Internal API
-
-to_hex(<<>>, Acc) ->
- lists:reverse(Acc);
-to_hex(<<C1:4, C2:4, Rest/binary>>, Acc) ->
- to_hex(Rest, [hexdigit(C2), hexdigit(C1) | Acc]).
-
-to_hex_int(0, Acc) ->
- Acc;
-to_hex_int(I, Acc) ->
- to_hex_int(I bsr 4, [hexdigit(I band 15) | Acc]).
-
-to_bin([], Acc) ->
- iolist_to_binary(lists:reverse(Acc));
-to_bin([C1, C2 | Rest], Acc) ->
- to_bin(Rest, [(dehex(C1) bsl 4) bor dehex(C2) | Acc]).
-
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-to_hex_test() ->
- "ff000ff1" = to_hex([255, 0, 15, 241]),
- "ff000ff1" = to_hex(16#ff000ff1),
- "0" = to_hex(16#0),
- ok.
-
-to_bin_test() ->
- <<255, 0, 15, 241>> = to_bin("ff000ff1"),
- <<255, 0, 10, 161>> = to_bin("Ff000aA1"),
- ok.
-
-to_int_test() ->
- 16#ff000ff1 = to_int("ff000ff1"),
- 16#ff000aa1 = to_int("FF000Aa1"),
- 16#0 = to_int("0"),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochijson.erl b/1.1.x/src/mochiweb/mochijson.erl
deleted file mode 100644
index 2e3d1452..00000000
--- a/1.1.x/src/mochiweb/mochijson.erl
+++ /dev/null
@@ -1,531 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2006 Mochi Media, Inc.
-
-%% @doc Yet another JSON (RFC 4627) library for Erlang.
--module(mochijson).
--author('bob@mochimedia.com').
--export([encoder/1, encode/1]).
--export([decoder/1, decode/1]).
--export([binary_encoder/1, binary_encode/1]).
--export([binary_decoder/1, binary_decode/1]).
-
-% This is a macro to placate syntax highlighters..
--define(Q, $\").
--define(ADV_COL(S, N), S#decoder{column=N+S#decoder.column}).
--define(INC_COL(S), S#decoder{column=1+S#decoder.column}).
--define(INC_LINE(S), S#decoder{column=1, line=1+S#decoder.line}).
-
-%% @type iolist() = [char() | binary() | iolist()]
-%% @type iodata() = iolist() | binary()
-%% @type json_string() = atom | string() | binary()
-%% @type json_number() = integer() | float()
-%% @type json_array() = {array, [json_term()]}
-%% @type json_object() = {struct, [{json_string(), json_term()}]}
-%% @type json_term() = json_string() | json_number() | json_array() |
-%% json_object()
-%% @type encoding() = utf8 | unicode
-%% @type encoder_option() = {input_encoding, encoding()} |
-%% {handler, function()}
-%% @type decoder_option() = {input_encoding, encoding()} |
-%% {object_hook, function()}
-%% @type bjson_string() = binary()
-%% @type bjson_number() = integer() | float()
-%% @type bjson_array() = [bjson_term()]
-%% @type bjson_object() = {struct, [{bjson_string(), bjson_term()}]}
-%% @type bjson_term() = bjson_string() | bjson_number() | bjson_array() |
-%% bjson_object()
-%% @type binary_encoder_option() = {handler, function()}
-%% @type binary_decoder_option() = {object_hook, function()}
-
--record(encoder, {input_encoding=unicode,
- handler=null}).
-
--record(decoder, {input_encoding=utf8,
- object_hook=null,
- line=1,
- column=1,
- state=null}).
-
-%% @spec encoder([encoder_option()]) -> function()
-%% @doc Create an encoder/1 with the given options.
-encoder(Options) ->
- State = parse_encoder_options(Options, #encoder{}),
- fun (O) -> json_encode(O, State) end.
-
-%% @spec encode(json_term()) -> iolist()
-%% @doc Encode the given as JSON to an iolist.
-encode(Any) ->
- json_encode(Any, #encoder{}).
-
-%% @spec decoder([decoder_option()]) -> function()
-%% @doc Create a decoder/1 with the given options.
-decoder(Options) ->
- State = parse_decoder_options(Options, #decoder{}),
- fun (O) -> json_decode(O, State) end.
-
-%% @spec decode(iolist()) -> json_term()
-%% @doc Decode the given iolist to Erlang terms.
-decode(S) ->
- json_decode(S, #decoder{}).
-
-%% @spec binary_decoder([binary_decoder_option()]) -> function()
-%% @doc Create a binary_decoder/1 with the given options.
-binary_decoder(Options) ->
- mochijson2:decoder(Options).
-
-%% @spec binary_encoder([binary_encoder_option()]) -> function()
-%% @doc Create a binary_encoder/1 with the given options.
-binary_encoder(Options) ->
- mochijson2:encoder(Options).
-
-%% @spec binary_encode(bjson_term()) -> iolist()
-%% @doc Encode the given as JSON to an iolist, using lists for arrays and
-%% binaries for strings.
-binary_encode(Any) ->
- mochijson2:encode(Any).
-
-%% @spec binary_decode(iolist()) -> bjson_term()
-%% @doc Decode the given iolist to Erlang terms, using lists for arrays and
-%% binaries for strings.
-binary_decode(S) ->
- mochijson2:decode(S).
-
-%% Internal API
-
-parse_encoder_options([], State) ->
- State;
-parse_encoder_options([{input_encoding, Encoding} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{input_encoding=Encoding});
-parse_encoder_options([{handler, Handler} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{handler=Handler}).
-
-parse_decoder_options([], State) ->
- State;
-parse_decoder_options([{input_encoding, Encoding} | Rest], State) ->
- parse_decoder_options(Rest, State#decoder{input_encoding=Encoding});
-parse_decoder_options([{object_hook, Hook} | Rest], State) ->
- parse_decoder_options(Rest, State#decoder{object_hook=Hook}).
-
-json_encode(true, _State) ->
- "true";
-json_encode(false, _State) ->
- "false";
-json_encode(null, _State) ->
- "null";
-json_encode(I, _State) when is_integer(I) ->
- integer_to_list(I);
-json_encode(F, _State) when is_float(F) ->
- mochinum:digits(F);
-json_encode(L, State) when is_list(L); is_binary(L); is_atom(L) ->
- json_encode_string(L, State);
-json_encode({array, Props}, State) when is_list(Props) ->
- json_encode_array(Props, State);
-json_encode({struct, Props}, State) when is_list(Props) ->
- json_encode_proplist(Props, State);
-json_encode(Bad, #encoder{handler=null}) ->
- exit({json_encode, {bad_term, Bad}});
-json_encode(Bad, State=#encoder{handler=Handler}) ->
- json_encode(Handler(Bad), State).
-
-json_encode_array([], _State) ->
- "[]";
-json_encode_array(L, State) ->
- F = fun (O, Acc) ->
- [$,, json_encode(O, State) | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "[", L),
- lists:reverse([$\] | Acc1]).
-
-json_encode_proplist([], _State) ->
- "{}";
-json_encode_proplist(Props, State) ->
- F = fun ({K, V}, Acc) ->
- KS = case K of
- K when is_atom(K) ->
- json_encode_string_utf8(atom_to_list(K));
- K when is_integer(K) ->
- json_encode_string(integer_to_list(K), State);
- K when is_list(K); is_binary(K) ->
- json_encode_string(K, State)
- end,
- VS = json_encode(V, State),
- [$,, VS, $:, KS | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "{", Props),
- lists:reverse([$\} | Acc1]).
-
-json_encode_string(A, _State) when is_atom(A) ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(atom_to_list(A)));
-json_encode_string(B, _State) when is_binary(B) ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(B));
-json_encode_string(S, #encoder{input_encoding=utf8}) ->
- json_encode_string_utf8(S);
-json_encode_string(S, #encoder{input_encoding=unicode}) ->
- json_encode_string_unicode(S).
-
-json_encode_string_utf8(S) ->
- [?Q | json_encode_string_utf8_1(S)].
-
-json_encode_string_utf8_1([C | Cs]) when C >= 0, C =< 16#7f ->
- NewC = case C of
- $\\ -> "\\\\";
- ?Q -> "\\\"";
- _ when C >= $\s, C < 16#7f -> C;
- $\t -> "\\t";
- $\n -> "\\n";
- $\r -> "\\r";
- $\f -> "\\f";
- $\b -> "\\b";
- _ when C >= 0, C =< 16#7f -> unihex(C);
- _ -> exit({json_encode, {bad_char, C}})
- end,
- [NewC | json_encode_string_utf8_1(Cs)];
-json_encode_string_utf8_1(All=[C | _]) when C >= 16#80, C =< 16#10FFFF ->
- [?Q | Rest] = json_encode_string_unicode(xmerl_ucs:from_utf8(All)),
- Rest;
-json_encode_string_utf8_1([]) ->
- "\"".
-
-json_encode_string_unicode(S) ->
- [?Q | json_encode_string_unicode_1(S)].
-
-json_encode_string_unicode_1([C | Cs]) ->
- NewC = case C of
- $\\ -> "\\\\";
- ?Q -> "\\\"";
- _ when C >= $\s, C < 16#7f -> C;
- $\t -> "\\t";
- $\n -> "\\n";
- $\r -> "\\r";
- $\f -> "\\f";
- $\b -> "\\b";
- _ when C >= 0, C =< 16#10FFFF -> unihex(C);
- _ -> exit({json_encode, {bad_char, C}})
- end,
- [NewC | json_encode_string_unicode_1(Cs)];
-json_encode_string_unicode_1([]) ->
- "\"".
-
-dehex(C) when C >= $0, C =< $9 ->
- C - $0;
-dehex(C) when C >= $a, C =< $f ->
- C - $a + 10;
-dehex(C) when C >= $A, C =< $F ->
- C - $A + 10.
-
-hexdigit(C) when C >= 0, C =< 9 ->
- C + $0;
-hexdigit(C) when C =< 15 ->
- C + $a - 10.
-
-unihex(C) when C < 16#10000 ->
- <<D3:4, D2:4, D1:4, D0:4>> = <<C:16>>,
- Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
- [$\\, $u | Digits];
-unihex(C) when C =< 16#10FFFF ->
- N = C - 16#10000,
- S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
- S2 = 16#dc00 bor (N band 16#3ff),
- [unihex(S1), unihex(S2)].
-
-json_decode(B, S) when is_binary(B) ->
- json_decode(binary_to_list(B), S);
-json_decode(L, S) ->
- {Res, L1, S1} = decode1(L, S),
- {eof, [], _} = tokenize(L1, S1#decoder{state=trim}),
- Res.
-
-decode1(L, S=#decoder{state=null}) ->
- case tokenize(L, S#decoder{state=any}) of
- {{const, C}, L1, S1} ->
- {C, L1, S1};
- {start_array, L1, S1} ->
- decode_array(L1, S1#decoder{state=any}, []);
- {start_object, L1, S1} ->
- decode_object(L1, S1#decoder{state=key}, [])
- end.
-
-make_object(V, #decoder{object_hook=null}) ->
- V;
-make_object(V, #decoder{object_hook=Hook}) ->
- Hook(V).
-
-decode_object(L, S=#decoder{state=key}, Acc) ->
- case tokenize(L, S) of
- {end_object, Rest, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
- {V, Rest, S1#decoder{state=null}};
- {{const, K}, Rest, S1} when is_list(K) ->
- {colon, L2, S2} = tokenize(Rest, S1),
- {V, L3, S3} = decode1(L2, S2#decoder{state=null}),
- decode_object(L3, S3#decoder{state=comma}, [{K, V} | Acc])
- end;
-decode_object(L, S=#decoder{state=comma}, Acc) ->
- case tokenize(L, S) of
- {end_object, Rest, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
- {V, Rest, S1#decoder{state=null}};
- {comma, Rest, S1} ->
- decode_object(Rest, S1#decoder{state=key}, Acc)
- end.
-
-decode_array(L, S=#decoder{state=any}, Acc) ->
- case tokenize(L, S) of
- {end_array, Rest, S1} ->
- {{array, lists:reverse(Acc)}, Rest, S1#decoder{state=null}};
- {start_array, Rest, S1} ->
- {Array, Rest1, S2} = decode_array(Rest, S1#decoder{state=any}, []),
- decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
- {start_object, Rest, S1} ->
- {Array, Rest1, S2} = decode_object(Rest, S1#decoder{state=key}, []),
- decode_array(Rest1, S2#decoder{state=comma}, [Array | Acc]);
- {{const, Const}, Rest, S1} ->
- decode_array(Rest, S1#decoder{state=comma}, [Const | Acc])
- end;
-decode_array(L, S=#decoder{state=comma}, Acc) ->
- case tokenize(L, S) of
- {end_array, Rest, S1} ->
- {{array, lists:reverse(Acc)}, Rest, S1#decoder{state=null}};
- {comma, Rest, S1} ->
- decode_array(Rest, S1#decoder{state=any}, Acc)
- end.
-
-tokenize_string(IoList=[C | _], S=#decoder{input_encoding=utf8}, Acc)
- when is_list(C); is_binary(C); C >= 16#7f ->
- List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
- tokenize_string(List, S#decoder{input_encoding=unicode}, Acc);
-tokenize_string("\"" ++ Rest, S, Acc) ->
- {lists:reverse(Acc), Rest, ?INC_COL(S)};
-tokenize_string("\\\"" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\" | Acc]);
-tokenize_string("\\\\" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\\ | Acc]);
-tokenize_string("\\/" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$/ | Acc]);
-tokenize_string("\\b" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\b | Acc]);
-tokenize_string("\\f" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\f | Acc]);
-tokenize_string("\\n" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\n | Acc]);
-tokenize_string("\\r" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\r | Acc]);
-tokenize_string("\\t" ++ Rest, S, Acc) ->
- tokenize_string(Rest, ?ADV_COL(S, 2), [$\t | Acc]);
-tokenize_string([$\\, $u, C3, C2, C1, C0 | Rest], S, Acc) ->
- % coalesce UTF-16 surrogate pair?
- C = dehex(C0) bor
- (dehex(C1) bsl 4) bor
- (dehex(C2) bsl 8) bor
- (dehex(C3) bsl 12),
- tokenize_string(Rest, ?ADV_COL(S, 6), [C | Acc]);
-tokenize_string([C | Rest], S, Acc) when C >= $\s; C < 16#10FFFF ->
- tokenize_string(Rest, ?ADV_COL(S, 1), [C | Acc]).
-
-tokenize_number(IoList=[C | _], Mode, S=#decoder{input_encoding=utf8}, Acc)
- when is_list(C); is_binary(C); C >= 16#7f ->
- List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
- tokenize_number(List, Mode, S#decoder{input_encoding=unicode}, Acc);
-tokenize_number([$- | Rest], sign, S, []) ->
- tokenize_number(Rest, int, ?INC_COL(S), [$-]);
-tokenize_number(Rest, sign, S, []) ->
- tokenize_number(Rest, int, S, []);
-tokenize_number([$0 | Rest], int, S, Acc) ->
- tokenize_number(Rest, frac, ?INC_COL(S), [$0 | Acc]);
-tokenize_number([C | Rest], int, S, Acc) when C >= $1, C =< $9 ->
- tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
-tokenize_number([C | Rest], int1, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, int1, ?INC_COL(S), [C | Acc]);
-tokenize_number(Rest, int1, S, Acc) ->
- tokenize_number(Rest, frac, S, Acc);
-tokenize_number([$., C | Rest], frac, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
-tokenize_number([E | Rest], frac, S, Acc) when E == $e; E == $E ->
- tokenize_number(Rest, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
-tokenize_number(Rest, frac, S, Acc) ->
- {{int, lists:reverse(Acc)}, Rest, S};
-tokenize_number([C | Rest], frac1, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, frac1, ?INC_COL(S), [C | Acc]);
-tokenize_number([E | Rest], frac1, S, Acc) when E == $e; E == $E ->
- tokenize_number(Rest, esign, ?INC_COL(S), [$e | Acc]);
-tokenize_number(Rest, frac1, S, Acc) ->
- {{float, lists:reverse(Acc)}, Rest, S};
-tokenize_number([C | Rest], esign, S, Acc) when C == $-; C == $+ ->
- tokenize_number(Rest, eint, ?INC_COL(S), [C | Acc]);
-tokenize_number(Rest, esign, S, Acc) ->
- tokenize_number(Rest, eint, S, Acc);
-tokenize_number([C | Rest], eint, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
-tokenize_number([C | Rest], eint1, S, Acc) when C >= $0, C =< $9 ->
- tokenize_number(Rest, eint1, ?INC_COL(S), [C | Acc]);
-tokenize_number(Rest, eint1, S, Acc) ->
- {{float, lists:reverse(Acc)}, Rest, S}.
-
-tokenize([], S=#decoder{state=trim}) ->
- {eof, [], S};
-tokenize([L | Rest], S) when is_list(L) ->
- tokenize(L ++ Rest, S);
-tokenize([B | Rest], S) when is_binary(B) ->
- tokenize(xmerl_ucs:from_utf8(B) ++ Rest, S);
-tokenize("\r\n" ++ Rest, S) ->
- tokenize(Rest, ?INC_LINE(S));
-tokenize("\n" ++ Rest, S) ->
- tokenize(Rest, ?INC_LINE(S));
-tokenize([C | Rest], S) when C == $\s; C == $\t ->
- tokenize(Rest, ?INC_COL(S));
-tokenize("{" ++ Rest, S) ->
- {start_object, Rest, ?INC_COL(S)};
-tokenize("}" ++ Rest, S) ->
- {end_object, Rest, ?INC_COL(S)};
-tokenize("[" ++ Rest, S) ->
- {start_array, Rest, ?INC_COL(S)};
-tokenize("]" ++ Rest, S) ->
- {end_array, Rest, ?INC_COL(S)};
-tokenize("," ++ Rest, S) ->
- {comma, Rest, ?INC_COL(S)};
-tokenize(":" ++ Rest, S) ->
- {colon, Rest, ?INC_COL(S)};
-tokenize("null" ++ Rest, S) ->
- {{const, null}, Rest, ?ADV_COL(S, 4)};
-tokenize("true" ++ Rest, S) ->
- {{const, true}, Rest, ?ADV_COL(S, 4)};
-tokenize("false" ++ Rest, S) ->
- {{const, false}, Rest, ?ADV_COL(S, 5)};
-tokenize("\"" ++ Rest, S) ->
- {String, Rest1, S1} = tokenize_string(Rest, ?INC_COL(S), []),
- {{const, String}, Rest1, S1};
-tokenize(L=[C | _], S) when C >= $0, C =< $9; C == $- ->
- case tokenize_number(L, sign, S, []) of
- {{int, Int}, Rest, S1} ->
- {{const, list_to_integer(Int)}, Rest, S1};
- {{float, Float}, Rest, S1} ->
- {{const, list_to_float(Float)}, Rest, S1}
- end.
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-%% testing constructs borrowed from the Yaws JSON implementation.
-
-%% Create an object from a list of Key/Value pairs.
-
-obj_new() ->
- {struct, []}.
-
-is_obj({struct, Props}) ->
- F = fun ({K, _}) when is_list(K) ->
- true;
- (_) ->
- false
- end,
- lists:all(F, Props).
-
-obj_from_list(Props) ->
- Obj = {struct, Props},
- case is_obj(Obj) of
- true -> Obj;
- false -> exit(json_bad_object)
- end.
-
-%% Test for equivalence of Erlang terms.
-%% Due to arbitrary order of construction, equivalent objects might
-%% compare unequal as erlang terms, so we need to carefully recurse
-%% through aggregates (tuples and objects).
-
-equiv({struct, Props1}, {struct, Props2}) ->
- equiv_object(Props1, Props2);
-equiv({array, L1}, {array, L2}) ->
- equiv_list(L1, L2);
-equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
-equiv(S1, S2) when is_list(S1), is_list(S2) -> S1 == S2;
-equiv(true, true) -> true;
-equiv(false, false) -> true;
-equiv(null, null) -> true.
-
-%% Object representation and traversal order is unknown.
-%% Use the sledgehammer and sort property lists.
-
-equiv_object(Props1, Props2) ->
- L1 = lists:keysort(1, Props1),
- L2 = lists:keysort(1, Props2),
- Pairs = lists:zip(L1, L2),
- true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
- equiv(K1, K2) and equiv(V1, V2)
- end, Pairs).
-
-%% Recursively compare tuple elements for equivalence.
-
-equiv_list([], []) ->
- true;
-equiv_list([V1 | L1], [V2 | L2]) ->
- equiv(V1, V2) andalso equiv_list(L1, L2).
-
-e2j_vec_test() ->
- test_one(e2j_test_vec(utf8), 1).
-
-issue33_test() ->
- %% http://code.google.com/p/mochiweb/issues/detail?id=33
- Js = {struct, [{"key", [194, 163]}]},
- Encoder = encoder([{input_encoding, utf8}]),
- "{\"key\":\"\\u00a3\"}" = lists:flatten(Encoder(Js)).
-
-test_one([], _N) ->
- %% io:format("~p tests passed~n", [N-1]),
- ok;
-test_one([{E, J} | Rest], N) ->
- %% io:format("[~p] ~p ~p~n", [N, E, J]),
- true = equiv(E, decode(J)),
- true = equiv(E, decode(encode(E))),
- test_one(Rest, 1+N).
-
-e2j_test_vec(utf8) ->
- [
- {1, "1"},
- {3.1416, "3.14160"}, % text representation may truncate, trail zeroes
- {-1, "-1"},
- {-3.1416, "-3.14160"},
- {12.0e10, "1.20000e+11"},
- {1.234E+10, "1.23400e+10"},
- {-1.234E-10, "-1.23400e-10"},
- {10.0, "1.0e+01"},
- {123.456, "1.23456E+2"},
- {10.0, "1e1"},
- {"foo", "\"foo\""},
- {"foo" ++ [5] ++ "bar", "\"foo\\u0005bar\""},
- {"", "\"\""},
- {"\"", "\"\\\"\""},
- {"\n\n\n", "\"\\n\\n\\n\""},
- {"\\", "\"\\\\\""},
- {"\" \b\f\r\n\t\"", "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
- {obj_new(), "{}"},
- {obj_from_list([{"foo", "bar"}]), "{\"foo\":\"bar\"}"},
- {obj_from_list([{"foo", "bar"}, {"baz", 123}]),
- "{\"foo\":\"bar\",\"baz\":123}"},
- {{array, []}, "[]"},
- {{array, [{array, []}]}, "[[]]"},
- {{array, [1, "foo"]}, "[1,\"foo\"]"},
-
- % json array in a json object
- {obj_from_list([{"foo", {array, [123]}}]),
- "{\"foo\":[123]}"},
-
- % json object in a json object
- {obj_from_list([{"foo", obj_from_list([{"bar", true}])}]),
- "{\"foo\":{\"bar\":true}}"},
-
- % fold evaluation order
- {obj_from_list([{"foo", {array, []}},
- {"bar", obj_from_list([{"baz", true}])},
- {"alice", "bob"}]),
- "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
-
- % json object in a json array
- {{array, [-123, "foo", obj_from_list([{"bar", {array, []}}]), null]},
- "[-123,\"foo\",{\"bar\":[]},null]"}
- ].
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochijson2.erl b/1.1.x/src/mochiweb/mochijson2.erl
deleted file mode 100644
index 64cabc86..00000000
--- a/1.1.x/src/mochiweb/mochijson2.erl
+++ /dev/null
@@ -1,802 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works
-%% with binaries as strings, arrays as lists (without an {array, _})
-%% wrapper and it only knows how to decode UTF-8 (and ASCII).
-
--module(mochijson2).
--author('bob@mochimedia.com').
--export([encoder/1, encode/1]).
--export([decoder/1, decode/1]).
-
-% This is a macro to placate syntax highlighters..
--define(Q, $\").
--define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset,
- column=N+S#decoder.column}).
--define(INC_COL(S), S#decoder{offset=1+S#decoder.offset,
- column=1+S#decoder.column}).
--define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset,
- column=1,
- line=1+S#decoder.line}).
--define(INC_CHAR(S, C),
- case C of
- $\n ->
- S#decoder{column=1,
- line=1+S#decoder.line,
- offset=1+S#decoder.offset};
- _ ->
- S#decoder{column=1+S#decoder.column,
- offset=1+S#decoder.offset}
- end).
--define(IS_WHITESPACE(C),
- (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
-
-%% @type iolist() = [char() | binary() | iolist()]
-%% @type iodata() = iolist() | binary()
-%% @type json_string() = atom | binary()
-%% @type json_number() = integer() | float()
-%% @type json_array() = [json_term()]
-%% @type json_object() = {struct, [{json_string(), json_term()}]}
-%% @type json_iolist() = {json, iolist()}
-%% @type json_term() = json_string() | json_number() | json_array() |
-%% json_object() | json_iolist()
-
--record(encoder, {handler=null,
- utf8=false}).
-
--record(decoder, {object_hook=null,
- offset=0,
- line=1,
- column=1,
- state=null}).
-
-%% @spec encoder([encoder_option()]) -> function()
-%% @doc Create an encoder/1 with the given options.
-%% @type encoder_option() = handler_option() | utf8_option()
-%% @type utf8_option() = boolean(). Emit unicode as utf8 (default - false)
-encoder(Options) ->
- State = parse_encoder_options(Options, #encoder{}),
- fun (O) -> json_encode(O, State) end.
-
-%% @spec encode(json_term()) -> iolist()
-%% @doc Encode the given as JSON to an iolist.
-encode(Any) ->
- json_encode(Any, #encoder{}).
-
-%% @spec decoder([decoder_option()]) -> function()
-%% @doc Create a decoder/1 with the given options.
-decoder(Options) ->
- State = parse_decoder_options(Options, #decoder{}),
- fun (O) -> json_decode(O, State) end.
-
-%% @spec decode(iolist()) -> json_term()
-%% @doc Decode the given iolist to Erlang terms.
-decode(S) ->
- json_decode(S, #decoder{}).
-
-%% Internal API
-
-parse_encoder_options([], State) ->
- State;
-parse_encoder_options([{handler, Handler} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{handler=Handler});
-parse_encoder_options([{utf8, Switch} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{utf8=Switch}).
-
-parse_decoder_options([], State) ->
- State;
-parse_decoder_options([{object_hook, Hook} | Rest], State) ->
- parse_decoder_options(Rest, State#decoder{object_hook=Hook}).
-
-json_encode(true, _State) ->
- <<"true">>;
-json_encode(false, _State) ->
- <<"false">>;
-json_encode(null, _State) ->
- <<"null">>;
-json_encode(I, _State) when is_integer(I) ->
- integer_to_list(I);
-json_encode(F, _State) when is_float(F) ->
- mochinum:digits(F);
-json_encode(S, State) when is_binary(S); is_atom(S) ->
- json_encode_string(S, State);
-json_encode(Array, State) when is_list(Array) ->
- json_encode_array(Array, State);
-json_encode({struct, Props}, State) when is_list(Props) ->
- json_encode_proplist(Props, State);
-json_encode({json, IoList}, _State) ->
- IoList;
-json_encode(Bad, #encoder{handler=null}) ->
- exit({json_encode, {bad_term, Bad}});
-json_encode(Bad, State=#encoder{handler=Handler}) ->
- json_encode(Handler(Bad), State).
-
-json_encode_array([], _State) ->
- <<"[]">>;
-json_encode_array(L, State) ->
- F = fun (O, Acc) ->
- [$,, json_encode(O, State) | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "[", L),
- lists:reverse([$\] | Acc1]).
-
-json_encode_proplist([], _State) ->
- <<"{}">>;
-json_encode_proplist(Props, State) ->
- F = fun ({K, V}, Acc) ->
- KS = json_encode_string(K, State),
- VS = json_encode(V, State),
- [$,, VS, $:, KS | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "{", Props),
- lists:reverse([$\} | Acc1]).
-
-json_encode_string(A, State) when is_atom(A) ->
- L = atom_to_list(A),
- case json_string_is_safe(L) of
- true ->
- [?Q, L, ?Q];
- false ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(L), State, [?Q])
- end;
-json_encode_string(B, State) when is_binary(B) ->
- case json_bin_is_safe(B) of
- true ->
- [?Q, B, ?Q];
- false ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(B), State, [?Q])
- end;
-json_encode_string(I, _State) when is_integer(I) ->
- [?Q, integer_to_list(I), ?Q];
-json_encode_string(L, State) when is_list(L) ->
- case json_string_is_safe(L) of
- true ->
- [?Q, L, ?Q];
- false ->
- json_encode_string_unicode(L, State, [?Q])
- end.
-
-json_string_is_safe([]) ->
- true;
-json_string_is_safe([C | Rest]) ->
- case C of
- ?Q ->
- false;
- $\\ ->
- false;
- $\b ->
- false;
- $\f ->
- false;
- $\n ->
- false;
- $\r ->
- false;
- $\t ->
- false;
- C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
- false;
- C when C < 16#7f ->
- json_string_is_safe(Rest);
- _ ->
- false
- end.
-
-json_bin_is_safe(<<>>) ->
- true;
-json_bin_is_safe(<<C, Rest/binary>>) ->
- case C of
- ?Q ->
- false;
- $\\ ->
- false;
- $\b ->
- false;
- $\f ->
- false;
- $\n ->
- false;
- $\r ->
- false;
- $\t ->
- false;
- C when C >= 0, C < $\s; C >= 16#7f ->
- false;
- C when C < 16#7f ->
- json_bin_is_safe(Rest)
- end.
-
-json_encode_string_unicode([], _State, Acc) ->
- lists:reverse([$\" | Acc]);
-json_encode_string_unicode([C | Cs], State, Acc) ->
- Acc1 = case C of
- ?Q ->
- [?Q, $\\ | Acc];
- %% Escaping solidus is only useful when trying to protect
- %% against "</script>" injection attacks which are only
- %% possible when JSON is inserted into a HTML document
- %% in-line. mochijson2 does not protect you from this, so
- %% if you do insert directly into HTML then you need to
- %% uncomment the following case or escape the output of encode.
- %%
- %% $/ ->
- %% [$/, $\\ | Acc];
- %%
- $\\ ->
- [$\\, $\\ | Acc];
- $\b ->
- [$b, $\\ | Acc];
- $\f ->
- [$f, $\\ | Acc];
- $\n ->
- [$n, $\\ | Acc];
- $\r ->
- [$r, $\\ | Acc];
- $\t ->
- [$t, $\\ | Acc];
- C when C >= 0, C < $\s ->
- [unihex(C) | Acc];
- C when C >= 16#7f, C =< 16#10FFFF, State#encoder.utf8 ->
- [xmerl_ucs:to_utf8(C) | Acc];
- C when C >= 16#7f, C =< 16#10FFFF, not State#encoder.utf8 ->
- [unihex(C) | Acc];
- C when C < 16#7f ->
- [C | Acc];
- _ ->
- exit({json_encode, {bad_char, C}})
- end,
- json_encode_string_unicode(Cs, State, Acc1).
-
-hexdigit(C) when C >= 0, C =< 9 ->
- C + $0;
-hexdigit(C) when C =< 15 ->
- C + $a - 10.
-
-unihex(C) when C < 16#10000 ->
- <<D3:4, D2:4, D1:4, D0:4>> = <<C:16>>,
- Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
- [$\\, $u | Digits];
-unihex(C) when C =< 16#10FFFF ->
- N = C - 16#10000,
- S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
- S2 = 16#dc00 bor (N band 16#3ff),
- [unihex(S1), unihex(S2)].
-
-json_decode(L, S) when is_list(L) ->
- json_decode(iolist_to_binary(L), S);
-json_decode(B, S) ->
- {Res, S1} = decode1(B, S),
- {eof, _} = tokenize(B, S1#decoder{state=trim}),
- Res.
-
-decode1(B, S=#decoder{state=null}) ->
- case tokenize(B, S#decoder{state=any}) of
- {{const, C}, S1} ->
- {C, S1};
- {start_array, S1} ->
- decode_array(B, S1);
- {start_object, S1} ->
- decode_object(B, S1)
- end.
-
-make_object(V, #decoder{object_hook=null}) ->
- V;
-make_object(V, #decoder{object_hook=Hook}) ->
- Hook(V).
-
-decode_object(B, S) ->
- decode_object(B, S#decoder{state=key}, []).
-
-decode_object(B, S=#decoder{state=key}, Acc) ->
- case tokenize(B, S) of
- {end_object, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
- {V, S1#decoder{state=null}};
- {{const, K}, S1} ->
- {colon, S2} = tokenize(B, S1),
- {V, S3} = decode1(B, S2#decoder{state=null}),
- decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc])
- end;
-decode_object(B, S=#decoder{state=comma}, Acc) ->
- case tokenize(B, S) of
- {end_object, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
- {V, S1#decoder{state=null}};
- {comma, S1} ->
- decode_object(B, S1#decoder{state=key}, Acc)
- end.
-
-decode_array(B, S) ->
- decode_array(B, S#decoder{state=any}, []).
-
-decode_array(B, S=#decoder{state=any}, Acc) ->
- case tokenize(B, S) of
- {end_array, S1} ->
- {lists:reverse(Acc), S1#decoder{state=null}};
- {start_array, S1} ->
- {Array, S2} = decode_array(B, S1),
- decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
- {start_object, S1} ->
- {Array, S2} = decode_object(B, S1),
- decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
- {{const, Const}, S1} ->
- decode_array(B, S1#decoder{state=comma}, [Const | Acc])
- end;
-decode_array(B, S=#decoder{state=comma}, Acc) ->
- case tokenize(B, S) of
- {end_array, S1} ->
- {lists:reverse(Acc), S1#decoder{state=null}};
- {comma, S1} ->
- decode_array(B, S1#decoder{state=any}, Acc)
- end.
-
-tokenize_string(B, S=#decoder{offset=O}) ->
- case tokenize_string_fast(B, O) of
- {escape, O1} ->
- Length = O1 - O,
- S1 = ?ADV_COL(S, Length),
- <<_:O/binary, Head:Length/binary, _/binary>> = B,
- tokenize_string(B, S1, lists:reverse(binary_to_list(Head)));
- O1 ->
- Length = O1 - O,
- <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B,
- {{const, String}, ?ADV_COL(S, Length + 1)}
- end.
-
-tokenize_string_fast(B, O) ->
- case B of
- <<_:O/binary, ?Q, _/binary>> ->
- O;
- <<_:O/binary, $\\, _/binary>> ->
- {escape, O};
- <<_:O/binary, C1, _/binary>> when C1 < 128 ->
- tokenize_string_fast(B, 1 + O);
- <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
- C2 >= 128, C2 =< 191 ->
- tokenize_string_fast(B, 2 + O);
- <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191 ->
- tokenize_string_fast(B, 3 + O);
- <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191,
- C4 >= 128, C4 =< 191 ->
- tokenize_string_fast(B, 4 + O);
- _ ->
- throw(invalid_utf8)
- end.
-
-tokenize_string(B, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, ?Q, _/binary>> ->
- {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)};
- <<_:O/binary, "\\\"", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]);
- <<_:O/binary, "\\\\", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]);
- <<_:O/binary, "\\/", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]);
- <<_:O/binary, "\\b", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]);
- <<_:O/binary, "\\f", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]);
- <<_:O/binary, "\\n", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]);
- <<_:O/binary, "\\r", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]);
- <<_:O/binary, "\\t", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]);
- <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> ->
- C = erlang:list_to_integer([C3, C2, C1, C0], 16),
- if C > 16#D7FF, C < 16#DC00 ->
- %% coalesce UTF-16 surrogate pair
- <<"\\u", D3, D2, D1, D0, _/binary>> = Rest,
- D = erlang:list_to_integer([D3,D2,D1,D0], 16),
- [CodePoint] = xmerl_ucs:from_utf16be(<<C:16/big-unsigned-integer,
- D:16/big-unsigned-integer>>),
- Acc1 = lists:reverse(xmerl_ucs:to_utf8(CodePoint), Acc),
- tokenize_string(B, ?ADV_COL(S, 12), Acc1);
- true ->
- Acc1 = lists:reverse(xmerl_ucs:to_utf8(C), Acc),
- tokenize_string(B, ?ADV_COL(S, 6), Acc1)
- end;
- <<_:O/binary, C1, _/binary>> when C1 < 128 ->
- tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]);
- <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
- C2 >= 128, C2 =< 191 ->
- tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]);
- <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191 ->
- tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]);
- <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191,
- C4 >= 128, C4 =< 191 ->
- tokenize_string(B, ?ADV_COL(S, 4), [C4, C3, C2, C1 | Acc]);
- _ ->
- throw(invalid_utf8)
- end.
-
-tokenize_number(B, S) ->
- case tokenize_number(B, sign, S, []) of
- {{int, Int}, S1} ->
- {{const, list_to_integer(Int)}, S1};
- {{float, Float}, S1} ->
- {{const, list_to_float(Float)}, S1}
- end.
-
-tokenize_number(B, sign, S=#decoder{offset=O}, []) ->
- case B of
- <<_:O/binary, $-, _/binary>> ->
- tokenize_number(B, int, ?INC_COL(S), [$-]);
- _ ->
- tokenize_number(B, int, S, [])
- end;
-tokenize_number(B, int, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, $0, _/binary>> ->
- tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]);
- <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 ->
- tokenize_number(B, int1, ?INC_COL(S), [C | Acc])
- end;
-tokenize_number(B, int1, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, int1, ?INC_COL(S), [C | Acc]);
- _ ->
- tokenize_number(B, frac, S, Acc)
- end;
-tokenize_number(B, frac, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 ->
- tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
- <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
- tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
- _ ->
- {{int, lists:reverse(Acc)}, S}
- end;
-tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]);
- <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
- tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]);
- _ ->
- {{float, lists:reverse(Acc)}, S}
- end;
-tokenize_number(B, esign, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ ->
- tokenize_number(B, eint, ?INC_COL(S), [C | Acc]);
- _ ->
- tokenize_number(B, eint, S, Acc)
- end;
-tokenize_number(B, eint, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, eint1, ?INC_COL(S), [C | Acc])
- end;
-tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]);
- _ ->
- {{float, lists:reverse(Acc)}, S}
- end.
-
-tokenize(B, S=#decoder{offset=O}) ->
- case B of
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
- tokenize(B, ?INC_CHAR(S, C));
- <<_:O/binary, "{", _/binary>> ->
- {start_object, ?INC_COL(S)};
- <<_:O/binary, "}", _/binary>> ->
- {end_object, ?INC_COL(S)};
- <<_:O/binary, "[", _/binary>> ->
- {start_array, ?INC_COL(S)};
- <<_:O/binary, "]", _/binary>> ->
- {end_array, ?INC_COL(S)};
- <<_:O/binary, ",", _/binary>> ->
- {comma, ?INC_COL(S)};
- <<_:O/binary, ":", _/binary>> ->
- {colon, ?INC_COL(S)};
- <<_:O/binary, "null", _/binary>> ->
- {{const, null}, ?ADV_COL(S, 4)};
- <<_:O/binary, "true", _/binary>> ->
- {{const, true}, ?ADV_COL(S, 4)};
- <<_:O/binary, "false", _/binary>> ->
- {{const, false}, ?ADV_COL(S, 5)};
- <<_:O/binary, "\"", _/binary>> ->
- tokenize_string(B, ?INC_COL(S));
- <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9)
- orelse C =:= $- ->
- tokenize_number(B, S);
- <<_:O/binary>> ->
- trim = S#decoder.state,
- {eof, S}
- end.
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-
-%% testing constructs borrowed from the Yaws JSON implementation.
-
-%% Create an object from a list of Key/Value pairs.
-
-obj_new() ->
- {struct, []}.
-
-is_obj({struct, Props}) ->
- F = fun ({K, _}) when is_binary(K) -> true end,
- lists:all(F, Props).
-
-obj_from_list(Props) ->
- Obj = {struct, Props},
- ?assert(is_obj(Obj)),
- Obj.
-
-%% Test for equivalence of Erlang terms.
-%% Due to arbitrary order of construction, equivalent objects might
-%% compare unequal as erlang terms, so we need to carefully recurse
-%% through aggregates (tuples and objects).
-
-equiv({struct, Props1}, {struct, Props2}) ->
- equiv_object(Props1, Props2);
-equiv(L1, L2) when is_list(L1), is_list(L2) ->
- equiv_list(L1, L2);
-equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
-equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2;
-equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true.
-
-%% Object representation and traversal order is unknown.
-%% Use the sledgehammer and sort property lists.
-
-equiv_object(Props1, Props2) ->
- L1 = lists:keysort(1, Props1),
- L2 = lists:keysort(1, Props2),
- Pairs = lists:zip(L1, L2),
- true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
- equiv(K1, K2) and equiv(V1, V2)
- end, Pairs).
-
-%% Recursively compare tuple elements for equivalence.
-
-equiv_list([], []) ->
- true;
-equiv_list([V1 | L1], [V2 | L2]) ->
- equiv(V1, V2) andalso equiv_list(L1, L2).
-
-decode_test() ->
- [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>),
- <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]).
-
-e2j_vec_test() ->
- test_one(e2j_test_vec(utf8), 1).
-
-test_one([], _N) ->
- %% io:format("~p tests passed~n", [N-1]),
- ok;
-test_one([{E, J} | Rest], N) ->
- %% io:format("[~p] ~p ~p~n", [N, E, J]),
- true = equiv(E, decode(J)),
- true = equiv(E, decode(encode(E))),
- test_one(Rest, 1+N).
-
-e2j_test_vec(utf8) ->
- [
- {1, "1"},
- {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes
- {-1, "-1"},
- {-3.1416, "-3.14160"},
- {12.0e10, "1.20000e+11"},
- {1.234E+10, "1.23400e+10"},
- {-1.234E-10, "-1.23400e-10"},
- {10.0, "1.0e+01"},
- {123.456, "1.23456E+2"},
- {10.0, "1e1"},
- {<<"foo">>, "\"foo\""},
- {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""},
- {<<"">>, "\"\""},
- {<<"\n\n\n">>, "\"\\n\\n\\n\""},
- {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
- {obj_new(), "{}"},
- {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"},
- {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]),
- "{\"foo\":\"bar\",\"baz\":123}"},
- {[], "[]"},
- {[[]], "[[]]"},
- {[1, <<"foo">>], "[1,\"foo\"]"},
-
- %% json array in a json object
- {obj_from_list([{<<"foo">>, [123]}]),
- "{\"foo\":[123]}"},
-
- %% json object in a json object
- {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]),
- "{\"foo\":{\"bar\":true}}"},
-
- %% fold evaluation order
- {obj_from_list([{<<"foo">>, []},
- {<<"bar">>, obj_from_list([{<<"baz">>, true}])},
- {<<"alice">>, <<"bob">>}]),
- "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
-
- %% json object in a json array
- {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null],
- "[-123,\"foo\",{\"bar\":[]},null]"}
- ].
-
-%% test utf8 encoding
-encoder_utf8_test() ->
- %% safe conversion case (default)
- [34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] =
- encode(<<1,"\321\202\320\265\321\201\321\202">>),
-
- %% raw utf8 output (optional)
- Enc = mochijson2:encoder([{utf8, true}]),
- [34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] =
- Enc(<<1,"\321\202\320\265\321\201\321\202">>).
-
-input_validation_test() ->
- Good = [
- {16#00A3, <<?Q, 16#C2, 16#A3, ?Q>>}, %% pound
- {16#20AC, <<?Q, 16#E2, 16#82, 16#AC, ?Q>>}, %% euro
- {16#10196, <<?Q, 16#F0, 16#90, 16#86, 16#96, ?Q>>} %% denarius
- ],
- lists:foreach(fun({CodePoint, UTF8}) ->
- Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)),
- Expect = decode(UTF8)
- end, Good),
-
- Bad = [
- %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
- <<?Q, 16#80, ?Q>>,
- %% missing continuations, last byte in each should be 80-BF
- <<?Q, 16#C2, 16#7F, ?Q>>,
- <<?Q, 16#E0, 16#80,16#7F, ?Q>>,
- <<?Q, 16#F0, 16#80, 16#80, 16#7F, ?Q>>,
- %% we don't support code points > 10FFFF per RFC 3629
- <<?Q, 16#F5, 16#80, 16#80, 16#80, ?Q>>,
- %% escape characters trigger a different code path
- <<?Q, $\\, $\n, 16#80, ?Q>>
- ],
- lists:foreach(
- fun(X) ->
- ok = try decode(X) catch invalid_utf8 -> ok end,
- %% could be {ucs,{bad_utf8_character_code}} or
- %% {json_encode,{bad_char,_}}
- {'EXIT', _} = (catch encode(X))
- end, Bad).
-
-inline_json_test() ->
- ?assertEqual(<<"\"iodata iodata\"">>,
- iolist_to_binary(
- encode({json, [<<"\"iodata">>, " iodata\""]}))),
- ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]},
- decode(
- encode({struct,
- [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))),
- ok.
-
-big_unicode_test() ->
- UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)),
- ?assertEqual(
- <<"\"\\ud834\\udd20\"">>,
- iolist_to_binary(encode(UTF8Seq))),
- ?assertEqual(
- UTF8Seq,
- decode(iolist_to_binary(encode(UTF8Seq)))),
- ok.
-
-custom_decoder_test() ->
- ?assertEqual(
- {struct, [{<<"key">>, <<"value">>}]},
- (decoder([]))("{\"key\": \"value\"}")),
- F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end,
- ?assertEqual(
- win,
- (decoder([{object_hook, F}]))("{\"key\": \"value\"}")),
- ok.
-
-atom_test() ->
- %% JSON native atoms
- [begin
- ?assertEqual(A, decode(atom_to_list(A))),
- ?assertEqual(iolist_to_binary(atom_to_list(A)),
- iolist_to_binary(encode(A)))
- end || A <- [true, false, null]],
- %% Atom to string
- ?assertEqual(
- <<"\"foo\"">>,
- iolist_to_binary(encode(foo))),
- ?assertEqual(
- <<"\"\\ud834\\udd20\"">>,
- iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))),
- ok.
-
-key_encode_test() ->
- %% Some forms are accepted as keys that would not be strings in other
- %% cases
- ?assertEqual(
- <<"{\"foo\":1}">>,
- iolist_to_binary(encode({struct, [{foo, 1}]}))),
- ?assertEqual(
- <<"{\"foo\":1}">>,
- iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))),
- ?assertEqual(
- <<"{\"foo\":1}">>,
- iolist_to_binary(encode({struct, [{"foo", 1}]}))),
- ?assertEqual(
- <<"{\"\\ud834\\udd20\":1}">>,
- iolist_to_binary(
- encode({struct, [{[16#0001d120], 1}]}))),
- ?assertEqual(
- <<"{\"1\":1}">>,
- iolist_to_binary(encode({struct, [{1, 1}]}))),
- ok.
-
-unsafe_chars_test() ->
- Chars = "\"\\\b\f\n\r\t",
- [begin
- ?assertEqual(false, json_string_is_safe([C])),
- ?assertEqual(false, json_bin_is_safe(<<C>>)),
- ?assertEqual(<<C>>, decode(encode(<<C>>)))
- end || C <- Chars],
- ?assertEqual(
- false,
- json_string_is_safe([16#0001d120])),
- ?assertEqual(
- false,
- json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))),
- ?assertEqual(
- [16#0001d120],
- xmerl_ucs:from_utf8(
- binary_to_list(
- decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))),
- ?assertEqual(
- false,
- json_string_is_safe([16#110000])),
- ?assertEqual(
- false,
- json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))),
- %% solidus can be escaped but isn't unsafe by default
- ?assertEqual(
- <<"/">>,
- decode(<<"\"\\/\"">>)),
- ok.
-
-int_test() ->
- ?assertEqual(0, decode("0")),
- ?assertEqual(1, decode("1")),
- ?assertEqual(11, decode("11")),
- ok.
-
-large_int_test() ->
- ?assertEqual(<<"-2147483649214748364921474836492147483649">>,
- iolist_to_binary(encode(-2147483649214748364921474836492147483649))),
- ?assertEqual(<<"2147483649214748364921474836492147483649">>,
- iolist_to_binary(encode(2147483649214748364921474836492147483649))),
- ok.
-
-float_test() ->
- ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))),
- ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))),
- ok.
-
-handler_test() ->
- ?assertEqual(
- {'EXIT',{json_encode,{bad_term,{}}}},
- catch encode({})),
- F = fun ({}) -> [] end,
- ?assertEqual(
- <<"[]">>,
- iolist_to_binary((encoder([{handler, F}]))({}))),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochilists.erl b/1.1.x/src/mochiweb/mochilists.erl
deleted file mode 100644
index 8981e7b6..00000000
--- a/1.1.x/src/mochiweb/mochilists.erl
+++ /dev/null
@@ -1,104 +0,0 @@
-%% @copyright Copyright (c) 2010 Mochi Media, Inc.
-%% @author David Reid <dreid@mochimedia.com>
-
-%% @doc Utility functions for dealing with proplists.
-
--module(mochilists).
--author("David Reid <dreid@mochimedia.com>").
--export([get_value/2, get_value/3, is_defined/2, set_default/2, set_defaults/2]).
-
-%% @spec set_default({Key::term(), Value::term()}, Proplist::list()) -> list()
-%%
-%% @doc Return new Proplist with {Key, Value} set if not is_defined(Key, Proplist).
-set_default({Key, Value}, Proplist) ->
- case is_defined(Key, Proplist) of
- true ->
- Proplist;
- false ->
- [{Key, Value} | Proplist]
- end.
-
-%% @spec set_defaults([{Key::term(), Value::term()}], Proplist::list()) -> list()
-%%
-%% @doc Return new Proplist with {Key, Value} set if not is_defined(Key, Proplist).
-set_defaults(DefaultProps, Proplist) ->
- lists:foldl(fun set_default/2, Proplist, DefaultProps).
-
-
-%% @spec is_defined(Key::term(), Proplist::list()) -> bool()
-%%
-%% @doc Returns true if Propist contains at least one entry associated
-%% with Key, otherwise false is returned.
-is_defined(Key, Proplist) ->
- lists:keyfind(Key, 1, Proplist) =/= false.
-
-
-%% @spec get_value(Key::term(), Proplist::list()) -> term() | undefined
-%%
-%% @doc Return the value of <code>Key</code> or undefined
-get_value(Key, Proplist) ->
- get_value(Key, Proplist, undefined).
-
-%% @spec get_value(Key::term(), Proplist::list(), Default::term()) -> term()
-%%
-%% @doc Return the value of <code>Key</code> or <code>Default</code>
-get_value(_Key, [], Default) ->
- Default;
-get_value(Key, Proplist, Default) ->
- case lists:keyfind(Key, 1, Proplist) of
- false ->
- Default;
- {Key, Value} ->
- Value
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-set_defaults_test() ->
- ?assertEqual(
- [{k, v}],
- set_defaults([{k, v}], [])),
- ?assertEqual(
- [{k, v}],
- set_defaults([{k, vee}], [{k, v}])),
- ?assertEqual(
- lists:sort([{kay, vee}, {k, v}]),
- lists:sort(set_defaults([{k, vee}, {kay, vee}], [{k, v}]))),
- ok.
-
-set_default_test() ->
- ?assertEqual(
- [{k, v}],
- set_default({k, v}, [])),
- ?assertEqual(
- [{k, v}],
- set_default({k, vee}, [{k, v}])),
- ok.
-
-get_value_test() ->
- ?assertEqual(
- undefined,
- get_value(foo, [])),
- ?assertEqual(
- undefined,
- get_value(foo, [{bar, baz}])),
- ?assertEqual(
- bar,
- get_value(foo, [{foo, bar}])),
- ?assertEqual(
- default,
- get_value(foo, [], default)),
- ?assertEqual(
- default,
- get_value(foo, [{bar, baz}], default)),
- ?assertEqual(
- bar,
- get_value(foo, [{foo, bar}], default)),
- ok.
-
--endif.
-
diff --git a/1.1.x/src/mochiweb/mochilogfile2.erl b/1.1.x/src/mochiweb/mochilogfile2.erl
deleted file mode 100644
index c34ee73a..00000000
--- a/1.1.x/src/mochiweb/mochilogfile2.erl
+++ /dev/null
@@ -1,140 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2010 Mochi Media, Inc.
-
-%% @doc Write newline delimited log files, ensuring that if a truncated
-%% entry is found on log open then it is fixed before writing. Uses
-%% delayed writes and raw files for performance.
--module(mochilogfile2).
--author('bob@mochimedia.com').
-
--export([open/1, write/2, close/1, name/1]).
-
-%% @spec open(Name) -> Handle
-%% @doc Open the log file Name, creating or appending as necessary. All data
-%% at the end of the file will be truncated until a newline is found, to
-%% ensure that all records are complete.
-open(Name) ->
- {ok, FD} = file:open(Name, [raw, read, write, delayed_write, binary]),
- fix_log(FD),
- {?MODULE, Name, FD}.
-
-%% @spec name(Handle) -> string()
-%% @doc Return the path of the log file.
-name({?MODULE, Name, _FD}) ->
- Name.
-
-%% @spec write(Handle, IoData) -> ok
-%% @doc Write IoData to the log file referenced by Handle.
-write({?MODULE, _Name, FD}, IoData) ->
- ok = file:write(FD, [IoData, $\n]),
- ok.
-
-%% @spec close(Handle) -> ok
-%% @doc Close the log file referenced by Handle.
-close({?MODULE, _Name, FD}) ->
- ok = file:sync(FD),
- ok = file:close(FD),
- ok.
-
-fix_log(FD) ->
- {ok, Location} = file:position(FD, eof),
- Seek = find_last_newline(FD, Location),
- {ok, Seek} = file:position(FD, Seek),
- ok = file:truncate(FD),
- ok.
-
-%% Seek backwards to the last valid log entry
-find_last_newline(_FD, N) when N =< 1 ->
- 0;
-find_last_newline(FD, Location) ->
- case file:pread(FD, Location - 1, 1) of
- {ok, <<$\n>>} ->
- Location;
- {ok, _} ->
- find_last_newline(FD, Location - 1)
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-name_test() ->
- D = mochitemp:mkdtemp(),
- FileName = filename:join(D, "open_close_test.log"),
- H = open(FileName),
- ?assertEqual(
- FileName,
- name(H)),
- close(H),
- file:delete(FileName),
- file:del_dir(D),
- ok.
-
-open_close_test() ->
- D = mochitemp:mkdtemp(),
- FileName = filename:join(D, "open_close_test.log"),
- OpenClose = fun () ->
- H = open(FileName),
- ?assertEqual(
- true,
- filelib:is_file(FileName)),
- ok = close(H),
- ?assertEqual(
- {ok, <<>>},
- file:read_file(FileName)),
- ok
- end,
- OpenClose(),
- OpenClose(),
- file:delete(FileName),
- file:del_dir(D),
- ok.
-
-write_test() ->
- D = mochitemp:mkdtemp(),
- FileName = filename:join(D, "write_test.log"),
- F = fun () ->
- H = open(FileName),
- write(H, "test line"),
- close(H),
- ok
- end,
- F(),
- ?assertEqual(
- {ok, <<"test line\n">>},
- file:read_file(FileName)),
- F(),
- ?assertEqual(
- {ok, <<"test line\ntest line\n">>},
- file:read_file(FileName)),
- file:delete(FileName),
- file:del_dir(D),
- ok.
-
-fix_log_test() ->
- D = mochitemp:mkdtemp(),
- FileName = filename:join(D, "write_test.log"),
- file:write_file(FileName, <<"first line good\nsecond line bad">>),
- F = fun () ->
- H = open(FileName),
- write(H, "test line"),
- close(H),
- ok
- end,
- F(),
- ?assertEqual(
- {ok, <<"first line good\ntest line\n">>},
- file:read_file(FileName)),
- file:write_file(FileName, <<"first line bad">>),
- F(),
- ?assertEqual(
- {ok, <<"test line\n">>},
- file:read_file(FileName)),
- F(),
- ?assertEqual(
- {ok, <<"test line\ntest line\n">>},
- file:read_file(FileName)),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochinum.erl b/1.1.x/src/mochiweb/mochinum.erl
deleted file mode 100644
index a7e2bfbc..00000000
--- a/1.1.x/src/mochiweb/mochinum.erl
+++ /dev/null
@@ -1,331 +0,0 @@
-%% @copyright 2007 Mochi Media, Inc.
-%% @author Bob Ippolito <bob@mochimedia.com>
-
-%% @doc Useful numeric algorithms for floats that cover some deficiencies
-%% in the math module. More interesting is digits/1, which implements
-%% the algorithm from:
-%% http://www.cs.indiana.edu/~burger/fp/index.html
-%% See also "Printing Floating-Point Numbers Quickly and Accurately"
-%% in Proceedings of the SIGPLAN '96 Conference on Programming Language
-%% Design and Implementation.
-
--module(mochinum).
--author("Bob Ippolito <bob@mochimedia.com>").
--export([digits/1, frexp/1, int_pow/2, int_ceil/1]).
-
-%% IEEE 754 Float exponent bias
--define(FLOAT_BIAS, 1022).
--define(MIN_EXP, -1074).
--define(BIG_POW, 4503599627370496).
-
-%% External API
-
-%% @spec digits(number()) -> string()
-%% @doc Returns a string that accurately represents the given integer or float
-%% using a conservative amount of digits. Great for generating
-%% human-readable output, or compact ASCII serializations for floats.
-digits(N) when is_integer(N) ->
- integer_to_list(N);
-digits(0.0) ->
- "0.0";
-digits(Float) ->
- {Frac, Exp} = frexp(Float),
- Exp1 = Exp - 53,
- Frac1 = trunc(abs(Frac) * (1 bsl 53)),
- [Place | Digits] = digits1(Float, Exp1, Frac1),
- R = insert_decimal(Place, [$0 + D || D <- Digits]),
- case Float < 0 of
- true ->
- [$- | R];
- _ ->
- R
- end.
-
-%% @spec frexp(F::float()) -> {Frac::float(), Exp::float()}
-%% @doc Return the fractional and exponent part of an IEEE 754 double,
-%% equivalent to the libc function of the same name.
-%% F = Frac * pow(2, Exp).
-frexp(F) ->
- frexp1(unpack(F)).
-
-%% @spec int_pow(X::integer(), N::integer()) -> Y::integer()
-%% @doc Moderately efficient way to exponentiate integers.
-%% int_pow(10, 2) = 100.
-int_pow(_X, 0) ->
- 1;
-int_pow(X, N) when N > 0 ->
- int_pow(X, N, 1).
-
-%% @spec int_ceil(F::float()) -> integer()
-%% @doc Return the ceiling of F as an integer. The ceiling is defined as
-%% F when F == trunc(F);
-%% trunc(F) when F &lt; 0;
-%% trunc(F) + 1 when F &gt; 0.
-int_ceil(X) ->
- T = trunc(X),
- case (X - T) of
- Neg when Neg < 0 -> T;
- Pos when Pos > 0 -> T + 1;
- _ -> T
- end.
-
-
-%% Internal API
-
-int_pow(X, N, R) when N < 2 ->
- R * X;
-int_pow(X, N, R) ->
- int_pow(X * X, N bsr 1, case N band 1 of 1 -> R * X; 0 -> R end).
-
-insert_decimal(0, S) ->
- "0." ++ S;
-insert_decimal(Place, S) when Place > 0 ->
- L = length(S),
- case Place - L of
- 0 ->
- S ++ ".0";
- N when N < 0 ->
- {S0, S1} = lists:split(L + N, S),
- S0 ++ "." ++ S1;
- N when N < 6 ->
- %% More places than digits
- S ++ lists:duplicate(N, $0) ++ ".0";
- _ ->
- insert_decimal_exp(Place, S)
- end;
-insert_decimal(Place, S) when Place > -6 ->
- "0." ++ lists:duplicate(abs(Place), $0) ++ S;
-insert_decimal(Place, S) ->
- insert_decimal_exp(Place, S).
-
-insert_decimal_exp(Place, S) ->
- [C | S0] = S,
- S1 = case S0 of
- [] ->
- "0";
- _ ->
- S0
- end,
- Exp = case Place < 0 of
- true ->
- "e-";
- false ->
- "e+"
- end,
- [C] ++ "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)).
-
-
-digits1(Float, Exp, Frac) ->
- Round = ((Frac band 1) =:= 0),
- case Exp >= 0 of
- true ->
- BExp = 1 bsl Exp,
- case (Frac =/= ?BIG_POW) of
- true ->
- scale((Frac * BExp * 2), 2, BExp, BExp,
- Round, Round, Float);
- false ->
- scale((Frac * BExp * 4), 4, (BExp * 2), BExp,
- Round, Round, Float)
- end;
- false ->
- case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of
- true ->
- scale((Frac * 2), 1 bsl (1 - Exp), 1, 1,
- Round, Round, Float);
- false ->
- scale((Frac * 4), 1 bsl (2 - Exp), 2, 1,
- Round, Round, Float)
- end
- end.
-
-scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) ->
- Est = int_ceil(math:log10(abs(Float)) - 1.0e-10),
- %% Note that the scheme implementation uses a 326 element look-up table
- %% for int_pow(10, N) where we do not.
- case Est >= 0 of
- true ->
- fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est,
- LowOk, HighOk);
- false ->
- Scale = int_pow(10, -Est),
- fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est,
- LowOk, HighOk)
- end.
-
-fixup(R, S, MPlus, MMinus, K, LowOk, HighOk) ->
- TooLow = case HighOk of
- true ->
- (R + MPlus) >= S;
- false ->
- (R + MPlus) > S
- end,
- case TooLow of
- true ->
- [(K + 1) | generate(R, S, MPlus, MMinus, LowOk, HighOk)];
- false ->
- [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, HighOk)]
- end.
-
-generate(R0, S, MPlus, MMinus, LowOk, HighOk) ->
- D = R0 div S,
- R = R0 rem S,
- TC1 = case LowOk of
- true ->
- R =< MMinus;
- false ->
- R < MMinus
- end,
- TC2 = case HighOk of
- true ->
- (R + MPlus) >= S;
- false ->
- (R + MPlus) > S
- end,
- case TC1 of
- false ->
- case TC2 of
- false ->
- [D | generate(R * 10, S, MPlus * 10, MMinus * 10,
- LowOk, HighOk)];
- true ->
- [D + 1]
- end;
- true ->
- case TC2 of
- false ->
- [D];
- true ->
- case R * 2 < S of
- true ->
- [D];
- false ->
- [D + 1]
- end
- end
- end.
-
-unpack(Float) ->
- <<Sign:1, Exp:11, Frac:52>> = <<Float:64/float>>,
- {Sign, Exp, Frac}.
-
-frexp1({_Sign, 0, 0}) ->
- {0.0, 0};
-frexp1({Sign, 0, Frac}) ->
- Exp = log2floor(Frac),
- <<Frac1:64/float>> = <<Sign:1, ?FLOAT_BIAS:11, (Frac-1):52>>,
- {Frac1, -(?FLOAT_BIAS) - 52 + Exp};
-frexp1({Sign, Exp, Frac}) ->
- <<Frac1:64/float>> = <<Sign:1, ?FLOAT_BIAS:11, Frac:52>>,
- {Frac1, Exp - ?FLOAT_BIAS}.
-
-log2floor(Int) ->
- log2floor(Int, 0).
-
-log2floor(0, N) ->
- N;
-log2floor(Int, N) ->
- log2floor(Int bsr 1, 1 + N).
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-int_ceil_test() ->
- 1 = int_ceil(0.0001),
- 0 = int_ceil(0.0),
- 1 = int_ceil(0.99),
- 1 = int_ceil(1.0),
- -1 = int_ceil(-1.5),
- -2 = int_ceil(-2.0),
- ok.
-
-int_pow_test() ->
- 1 = int_pow(1, 1),
- 1 = int_pow(1, 0),
- 1 = int_pow(10, 0),
- 10 = int_pow(10, 1),
- 100 = int_pow(10, 2),
- 1000 = int_pow(10, 3),
- ok.
-
-digits_test() ->
- ?assertEqual("0",
- digits(0)),
- ?assertEqual("0.0",
- digits(0.0)),
- ?assertEqual("1.0",
- digits(1.0)),
- ?assertEqual("-1.0",
- digits(-1.0)),
- ?assertEqual("0.1",
- digits(0.1)),
- ?assertEqual("0.01",
- digits(0.01)),
- ?assertEqual("0.001",
- digits(0.001)),
- ?assertEqual("1.0e+6",
- digits(1000000.0)),
- ?assertEqual("0.5",
- digits(0.5)),
- ?assertEqual("4503599627370496.0",
- digits(4503599627370496.0)),
- %% small denormalized number
- %% 4.94065645841246544177e-324
- <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
- ?assertEqual("4.9406564584124654e-324",
- digits(SmallDenorm)),
- ?assertEqual(SmallDenorm,
- list_to_float(digits(SmallDenorm))),
- %% large denormalized number
- %% 2.22507385850720088902e-308
- <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
- ?assertEqual("2.225073858507201e-308",
- digits(BigDenorm)),
- ?assertEqual(BigDenorm,
- list_to_float(digits(BigDenorm))),
- %% small normalized number
- %% 2.22507385850720138309e-308
- <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
- ?assertEqual("2.2250738585072014e-308",
- digits(SmallNorm)),
- ?assertEqual(SmallNorm,
- list_to_float(digits(SmallNorm))),
- %% large normalized number
- %% 1.79769313486231570815e+308
- <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
- ?assertEqual("1.7976931348623157e+308",
- digits(LargeNorm)),
- ?assertEqual(LargeNorm,
- list_to_float(digits(LargeNorm))),
- ok.
-
-frexp_test() ->
- %% zero
- {0.0, 0} = frexp(0.0),
- %% one
- {0.5, 1} = frexp(1.0),
- %% negative one
- {-0.5, 1} = frexp(-1.0),
- %% small denormalized number
- %% 4.94065645841246544177e-324
- <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
- {0.5, -1073} = frexp(SmallDenorm),
- %% large denormalized number
- %% 2.22507385850720088902e-308
- <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
- {0.99999999999999978, -1022} = frexp(BigDenorm),
- %% small normalized number
- %% 2.22507385850720138309e-308
- <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
- {0.5, -1021} = frexp(SmallNorm),
- %% large normalized number
- %% 1.79769313486231570815e+308
- <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
- {0.99999999999999989, 1024} = frexp(LargeNorm),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochitemp.erl b/1.1.x/src/mochiweb/mochitemp.erl
deleted file mode 100644
index bb23d2a6..00000000
--- a/1.1.x/src/mochiweb/mochitemp.erl
+++ /dev/null
@@ -1,310 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2010 Mochi Media, Inc.
-
-%% @doc Create temporary files and directories. Requires crypto to be started.
-
--module(mochitemp).
--export([gettempdir/0]).
--export([mkdtemp/0, mkdtemp/3]).
--export([rmtempdir/1]).
-%% -export([mkstemp/4]).
--define(SAFE_CHARS, {$a, $b, $c, $d, $e, $f, $g, $h, $i, $j, $k, $l, $m,
- $n, $o, $p, $q, $r, $s, $t, $u, $v, $w, $x, $y, $z,
- $A, $B, $C, $D, $E, $F, $G, $H, $I, $J, $K, $L, $M,
- $N, $O, $P, $Q, $R, $S, $T, $U, $V, $W, $X, $Y, $Z,
- $0, $1, $2, $3, $4, $5, $6, $7, $8, $9, $_}).
--define(TMP_MAX, 10000).
-
--include_lib("kernel/include/file.hrl").
-
-%% TODO: An ugly wrapper over the mktemp tool with open_port and sadness?
-%% We can't implement this race-free in Erlang without the ability
-%% to issue O_CREAT|O_EXCL. I suppose we could hack something with
-%% mkdtemp, del_dir, open.
-%% mkstemp(Suffix, Prefix, Dir, Options) ->
-%% ok.
-
-rmtempdir(Dir) ->
- case file:del_dir(Dir) of
- {error, eexist} ->
- ok = rmtempdirfiles(Dir),
- ok = file:del_dir(Dir);
- ok ->
- ok
- end.
-
-rmtempdirfiles(Dir) ->
- {ok, Files} = file:list_dir(Dir),
- ok = rmtempdirfiles(Dir, Files).
-
-rmtempdirfiles(_Dir, []) ->
- ok;
-rmtempdirfiles(Dir, [Basename | Rest]) ->
- Path = filename:join([Dir, Basename]),
- case filelib:is_dir(Path) of
- true ->
- ok = rmtempdir(Path);
- false ->
- ok = file:delete(Path)
- end,
- rmtempdirfiles(Dir, Rest).
-
-mkdtemp() ->
- mkdtemp("", "tmp", gettempdir()).
-
-mkdtemp(Suffix, Prefix, Dir) ->
- mkdtemp_n(rngpath_fun(Suffix, Prefix, Dir), ?TMP_MAX).
-
-
-
-mkdtemp_n(RngPath, 1) ->
- make_dir(RngPath());
-mkdtemp_n(RngPath, N) ->
- try make_dir(RngPath())
- catch throw:{error, eexist} ->
- mkdtemp_n(RngPath, N - 1)
- end.
-
-make_dir(Path) ->
- case file:make_dir(Path) of
- ok ->
- ok;
- E={error, eexist} ->
- throw(E)
- end,
- %% Small window for a race condition here because dir is created 777
- ok = file:write_file_info(Path, #file_info{mode=8#0700}),
- Path.
-
-rngpath_fun(Prefix, Suffix, Dir) ->
- fun () ->
- filename:join([Dir, Prefix ++ rngchars(6) ++ Suffix])
- end.
-
-rngchars(0) ->
- "";
-rngchars(N) ->
- [rngchar() | rngchars(N - 1)].
-
-rngchar() ->
- rngchar(crypto:rand_uniform(0, tuple_size(?SAFE_CHARS))).
-
-rngchar(C) ->
- element(1 + C, ?SAFE_CHARS).
-
-%% @spec gettempdir() -> string()
-%% @doc Get a usable temporary directory using the first of these that is a directory:
-%% $TMPDIR, $TMP, $TEMP, "/tmp", "/var/tmp", "/usr/tmp", ".".
-gettempdir() ->
- gettempdir(gettempdir_checks(), fun normalize_dir/1).
-
-gettempdir_checks() ->
- [{fun os:getenv/1, ["TMPDIR", "TMP", "TEMP"]},
- {fun gettempdir_identity/1, ["/tmp", "/var/tmp", "/usr/tmp"]},
- {fun gettempdir_cwd/1, [cwd]}].
-
-gettempdir_identity(L) ->
- L.
-
-gettempdir_cwd(cwd) ->
- {ok, L} = file:get_cwd(),
- L.
-
-gettempdir([{_F, []} | RestF], Normalize) ->
- gettempdir(RestF, Normalize);
-gettempdir([{F, [L | RestL]} | RestF], Normalize) ->
- case Normalize(F(L)) of
- false ->
- gettempdir([{F, RestL} | RestF], Normalize);
- Dir ->
- Dir
- end.
-
-normalize_dir(False) when False =:= false orelse False =:= "" ->
- %% Erlang doesn't have an unsetenv, wtf.
- false;
-normalize_dir(L) ->
- Dir = filename:absname(L),
- case filelib:is_dir(Dir) of
- false ->
- false;
- true ->
- Dir
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-pushenv(L) ->
- [{K, os:getenv(K)} || K <- L].
-popenv(L) ->
- F = fun ({K, false}) ->
- %% Erlang doesn't have an unsetenv, wtf.
- os:putenv(K, "");
- ({K, V}) ->
- os:putenv(K, V)
- end,
- lists:foreach(F, L).
-
-gettempdir_fallback_test() ->
- ?assertEqual(
- "/",
- gettempdir([{fun gettempdir_identity/1, ["/--not-here--/"]},
- {fun gettempdir_identity/1, ["/"]}],
- fun normalize_dir/1)),
- ?assertEqual(
- "/",
- %% simulate a true os:getenv unset env
- gettempdir([{fun gettempdir_identity/1, [false]},
- {fun gettempdir_identity/1, ["/"]}],
- fun normalize_dir/1)),
- ok.
-
-gettempdir_identity_test() ->
- ?assertEqual(
- "/",
- gettempdir([{fun gettempdir_identity/1, ["/"]}], fun normalize_dir/1)),
- ok.
-
-gettempdir_cwd_test() ->
- {ok, Cwd} = file:get_cwd(),
- ?assertEqual(
- normalize_dir(Cwd),
- gettempdir([{fun gettempdir_cwd/1, [cwd]}], fun normalize_dir/1)),
- ok.
-
-rngchars_test() ->
- crypto:start(),
- ?assertEqual(
- "",
- rngchars(0)),
- ?assertEqual(
- 10,
- length(rngchars(10))),
- ok.
-
-rngchar_test() ->
- ?assertEqual(
- $a,
- rngchar(0)),
- ?assertEqual(
- $A,
- rngchar(26)),
- ?assertEqual(
- $_,
- rngchar(62)),
- ok.
-
-mkdtemp_n_failonce_test() ->
- crypto:start(),
- D = mkdtemp(),
- Path = filename:join([D, "testdir"]),
- %% Toggle the existence of a dir so that it fails
- %% the first time and succeeds the second.
- F = fun () ->
- case filelib:is_dir(Path) of
- true ->
- file:del_dir(Path);
- false ->
- file:make_dir(Path)
- end,
- Path
- end,
- try
- %% Fails the first time
- ?assertThrow(
- {error, eexist},
- mkdtemp_n(F, 1)),
- %% Reset state
- file:del_dir(Path),
- %% Succeeds the second time
- ?assertEqual(
- Path,
- mkdtemp_n(F, 2))
- after rmtempdir(D)
- end,
- ok.
-
-mkdtemp_n_fail_test() ->
- {ok, Cwd} = file:get_cwd(),
- ?assertThrow(
- {error, eexist},
- mkdtemp_n(fun () -> Cwd end, 1)),
- ?assertThrow(
- {error, eexist},
- mkdtemp_n(fun () -> Cwd end, 2)),
- ok.
-
-make_dir_fail_test() ->
- {ok, Cwd} = file:get_cwd(),
- ?assertThrow(
- {error, eexist},
- make_dir(Cwd)),
- ok.
-
-mkdtemp_test() ->
- crypto:start(),
- D = mkdtemp(),
- ?assertEqual(
- true,
- filelib:is_dir(D)),
- ?assertEqual(
- ok,
- file:del_dir(D)),
- ok.
-
-rmtempdir_test() ->
- crypto:start(),
- D1 = mkdtemp(),
- ?assertEqual(
- true,
- filelib:is_dir(D1)),
- ?assertEqual(
- ok,
- rmtempdir(D1)),
- D2 = mkdtemp(),
- ?assertEqual(
- true,
- filelib:is_dir(D2)),
- ok = file:write_file(filename:join([D2, "foo"]), <<"bytes">>),
- D3 = mkdtemp("suffix", "prefix", D2),
- ?assertEqual(
- true,
- filelib:is_dir(D3)),
- ok = file:write_file(filename:join([D3, "foo"]), <<"bytes">>),
- ?assertEqual(
- ok,
- rmtempdir(D2)),
- ?assertEqual(
- {error, enoent},
- file:consult(D3)),
- ?assertEqual(
- {error, enoent},
- file:consult(D2)),
- ok.
-
-gettempdir_env_test() ->
- Env = pushenv(["TMPDIR", "TEMP", "TMP"]),
- FalseEnv = [{"TMPDIR", false}, {"TEMP", false}, {"TMP", false}],
- try
- popenv(FalseEnv),
- popenv([{"TMPDIR", "/"}]),
- ?assertEqual(
- "/",
- os:getenv("TMPDIR")),
- ?assertEqual(
- "/",
- gettempdir()),
- {ok, Cwd} = file:get_cwd(),
- popenv(FalseEnv),
- popenv([{"TMP", Cwd}]),
- ?assertEqual(
- normalize_dir(Cwd),
- gettempdir())
- after popenv(Env)
- end,
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiutf8.erl b/1.1.x/src/mochiweb/mochiutf8.erl
deleted file mode 100644
index 206e1186..00000000
--- a/1.1.x/src/mochiweb/mochiutf8.erl
+++ /dev/null
@@ -1,316 +0,0 @@
-%% @copyright 2010 Mochi Media, Inc.
-%% @author Bob Ippolito <bob@mochimedia.com>
-
-%% @doc Algorithm to convert any binary to a valid UTF-8 sequence by ignoring
-%% invalid bytes.
-
--module(mochiutf8).
--export([valid_utf8_bytes/1, codepoint_to_bytes/1, bytes_to_codepoints/1]).
--export([bytes_foldl/3, codepoint_foldl/3, read_codepoint/1, len/1]).
-
-%% External API
-
--type unichar_low() :: 0..16#d7ff.
--type unichar_high() :: 16#e000..16#10ffff.
--type unichar() :: unichar_low() | unichar_high().
-
--spec codepoint_to_bytes(unichar()) -> binary().
-%% @doc Convert a unicode codepoint to UTF-8 bytes.
-codepoint_to_bytes(C) when (C >= 16#00 andalso C =< 16#7f) ->
- %% U+0000 - U+007F - 7 bits
- <<C>>;
-codepoint_to_bytes(C) when (C >= 16#080 andalso C =< 16#07FF) ->
- %% U+0080 - U+07FF - 11 bits
- <<0:5, B1:5, B0:6>> = <<C:16>>,
- <<2#110:3, B1:5,
- 2#10:2, B0:6>>;
-codepoint_to_bytes(C) when (C >= 16#0800 andalso C =< 16#FFFF) andalso
- (C < 16#D800 orelse C > 16#DFFF) ->
- %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
- <<B2:4, B1:6, B0:6>> = <<C:16>>,
- <<2#1110:4, B2:4,
- 2#10:2, B1:6,
- 2#10:2, B0:6>>;
-codepoint_to_bytes(C) when (C >= 16#010000 andalso C =< 16#10FFFF) ->
- %% U+10000 - U+10FFFF - 21 bits
- <<0:3, B3:3, B2:6, B1:6, B0:6>> = <<C:24>>,
- <<2#11110:5, B3:3,
- 2#10:2, B2:6,
- 2#10:2, B1:6,
- 2#10:2, B0:6>>.
-
--spec codepoints_to_bytes([unichar()]) -> binary().
-%% @doc Convert a list of codepoints to a UTF-8 binary.
-codepoints_to_bytes(L) ->
- <<<<(codepoint_to_bytes(C))/binary>> || C <- L>>.
-
--spec read_codepoint(binary()) -> {unichar(), binary(), binary()}.
-read_codepoint(Bin = <<2#0:1, C:7, Rest/binary>>) ->
- %% U+0000 - U+007F - 7 bits
- <<B:1/binary, _/binary>> = Bin,
- {C, B, Rest};
-read_codepoint(Bin = <<2#110:3, B1:5,
- 2#10:2, B0:6,
- Rest/binary>>) ->
- %% U+0080 - U+07FF - 11 bits
- case <<B1:5, B0:6>> of
- <<C:11>> when C >= 16#80 ->
- <<B:2/binary, _/binary>> = Bin,
- {C, B, Rest}
- end;
-read_codepoint(Bin = <<2#1110:4, B2:4,
- 2#10:2, B1:6,
- 2#10:2, B0:6,
- Rest/binary>>) ->
- %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
- case <<B2:4, B1:6, B0:6>> of
- <<C:16>> when (C >= 16#0800 andalso C =< 16#FFFF) andalso
- (C < 16#D800 orelse C > 16#DFFF) ->
- <<B:3/binary, _/binary>> = Bin,
- {C, B, Rest}
- end;
-read_codepoint(Bin = <<2#11110:5, B3:3,
- 2#10:2, B2:6,
- 2#10:2, B1:6,
- 2#10:2, B0:6,
- Rest/binary>>) ->
- %% U+10000 - U+10FFFF - 21 bits
- case <<B3:3, B2:6, B1:6, B0:6>> of
- <<C:21>> when (C >= 16#010000 andalso C =< 16#10FFFF) ->
- <<B:4/binary, _/binary>> = Bin,
- {C, B, Rest}
- end.
-
--spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _.
-codepoint_foldl(F, Acc, <<>>) when is_function(F, 2) ->
- Acc;
-codepoint_foldl(F, Acc, Bin) ->
- {C, _, Rest} = read_codepoint(Bin),
- codepoint_foldl(F, F(C, Acc), Rest).
-
--spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _.
-bytes_foldl(F, Acc, <<>>) when is_function(F, 2) ->
- Acc;
-bytes_foldl(F, Acc, Bin) ->
- {_, B, Rest} = read_codepoint(Bin),
- bytes_foldl(F, F(B, Acc), Rest).
-
--spec bytes_to_codepoints(binary()) -> [unichar()].
-bytes_to_codepoints(B) ->
- lists:reverse(codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], B)).
-
--spec len(binary()) -> non_neg_integer().
-len(<<>>) ->
- 0;
-len(B) ->
- {_, _, Rest} = read_codepoint(B),
- 1 + len(Rest).
-
--spec valid_utf8_bytes(B::binary()) -> binary().
-%% @doc Return only the bytes in B that represent valid UTF-8. Uses
-%% the following recursive algorithm: skip one byte if B does not
-%% follow UTF-8 syntax (a 1-4 byte encoding of some number),
-%% skip sequence of 2-4 bytes if it represents an overlong encoding
-%% or bad code point (surrogate U+D800 - U+DFFF or > U+10FFFF).
-valid_utf8_bytes(B) when is_binary(B) ->
- binary_skip_bytes(B, invalid_utf8_indexes(B)).
-
-%% Internal API
-
--spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary().
-%% @doc Return B, but skipping the 0-based indexes in L.
-binary_skip_bytes(B, []) ->
- B;
-binary_skip_bytes(B, L) ->
- binary_skip_bytes(B, L, 0, []).
-
-%% @private
--spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary().
-binary_skip_bytes(B, [], _N, Acc) ->
- iolist_to_binary(lists:reverse([B | Acc]));
-binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) ->
- binary_skip_bytes(RestB, RestL, 1 + N, Acc);
-binary_skip_bytes(<<C, RestB/binary>>, L, N, Acc) ->
- binary_skip_bytes(RestB, L, 1 + N, [C | Acc]).
-
--spec invalid_utf8_indexes(binary()) -> [non_neg_integer()].
-%% @doc Return the 0-based indexes in B that are not valid UTF-8.
-invalid_utf8_indexes(B) ->
- invalid_utf8_indexes(B, 0, []).
-
-%% @private.
--spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()].
-invalid_utf8_indexes(<<C, Rest/binary>>, N, Acc) when C < 16#80 ->
- %% U+0000 - U+007F - 7 bits
- invalid_utf8_indexes(Rest, 1 + N, Acc);
-invalid_utf8_indexes(<<C1, C2, Rest/binary>>, N, Acc)
- when C1 band 16#E0 =:= 16#C0,
- C2 band 16#C0 =:= 16#80 ->
- %% U+0080 - U+07FF - 11 bits
- case ((C1 band 16#1F) bsl 6) bor (C2 band 16#3F) of
- C when C < 16#80 ->
- %% Overlong encoding.
- invalid_utf8_indexes(Rest, 2 + N, [1 + N, N | Acc]);
- _ ->
- %% Upper bound U+07FF does not need to be checked
- invalid_utf8_indexes(Rest, 2 + N, Acc)
- end;
-invalid_utf8_indexes(<<C1, C2, C3, Rest/binary>>, N, Acc)
- when C1 band 16#F0 =:= 16#E0,
- C2 band 16#C0 =:= 16#80,
- C3 band 16#C0 =:= 16#80 ->
- %% U+0800 - U+FFFF - 16 bits
- case ((((C1 band 16#0F) bsl 6) bor (C2 band 16#3F)) bsl 6) bor
- (C3 band 16#3F) of
- C when (C < 16#800) orelse (C >= 16#D800 andalso C =< 16#DFFF) ->
- %% Overlong encoding or surrogate.
- invalid_utf8_indexes(Rest, 3 + N, [2 + N, 1 + N, N | Acc]);
- _ ->
- %% Upper bound U+FFFF does not need to be checked
- invalid_utf8_indexes(Rest, 3 + N, Acc)
- end;
-invalid_utf8_indexes(<<C1, C2, C3, C4, Rest/binary>>, N, Acc)
- when C1 band 16#F8 =:= 16#F0,
- C2 band 16#C0 =:= 16#80,
- C3 band 16#C0 =:= 16#80,
- C4 band 16#C0 =:= 16#80 ->
- %% U+10000 - U+10FFFF - 21 bits
- case ((((((C1 band 16#0F) bsl 6) bor (C2 band 16#3F)) bsl 6) bor
- (C3 band 16#3F)) bsl 6) bor (C4 band 16#3F) of
- C when (C < 16#10000) orelse (C > 16#10FFFF) ->
- %% Overlong encoding or invalid code point.
- invalid_utf8_indexes(Rest, 4 + N, [3 + N, 2 + N, 1 + N, N | Acc]);
- _ ->
- invalid_utf8_indexes(Rest, 4 + N, Acc)
- end;
-invalid_utf8_indexes(<<_, Rest/binary>>, N, Acc) ->
- %% Invalid char
- invalid_utf8_indexes(Rest, 1 + N, [N | Acc]);
-invalid_utf8_indexes(<<>>, _N, Acc) ->
- lists:reverse(Acc).
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-binary_skip_bytes_test() ->
- ?assertEqual(<<"foo">>,
- binary_skip_bytes(<<"foo">>, [])),
- ?assertEqual(<<"foobar">>,
- binary_skip_bytes(<<"foo bar">>, [3])),
- ?assertEqual(<<"foo">>,
- binary_skip_bytes(<<"foo bar">>, [3, 4, 5, 6])),
- ?assertEqual(<<"oo bar">>,
- binary_skip_bytes(<<"foo bar">>, [0])),
- ok.
-
-invalid_utf8_indexes_test() ->
- ?assertEqual(
- [],
- invalid_utf8_indexes(<<"unicode snowman for you: ", 226, 152, 131>>)),
- ?assertEqual(
- [0],
- invalid_utf8_indexes(<<128>>)),
- ?assertEqual(
- [57,59,60,64,66,67],
- invalid_utf8_indexes(<<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (",
- 167, 65, 170, 186, 73, 83, 80, 166, 87, 186, 217, 41, 41>>)),
- ok.
-
-codepoint_to_bytes_test() ->
- %% U+0000 - U+007F - 7 bits
- %% U+0080 - U+07FF - 11 bits
- %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
- %% U+10000 - U+10FFFF - 21 bits
- ?assertEqual(
- <<"a">>,
- codepoint_to_bytes($a)),
- ?assertEqual(
- <<16#c2, 16#80>>,
- codepoint_to_bytes(16#80)),
- ?assertEqual(
- <<16#df, 16#bf>>,
- codepoint_to_bytes(16#07ff)),
- ?assertEqual(
- <<16#ef, 16#bf, 16#bf>>,
- codepoint_to_bytes(16#ffff)),
- ?assertEqual(
- <<16#f4, 16#8f, 16#bf, 16#bf>>,
- codepoint_to_bytes(16#10ffff)),
- ok.
-
-bytes_foldl_test() ->
- ?assertEqual(
- <<"abc">>,
- bytes_foldl(fun (B, Acc) -> <<Acc/binary, B/binary>> end, <<>>, <<"abc">>)),
- ?assertEqual(
- <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>,
- bytes_foldl(fun (B, Acc) -> <<Acc/binary, B/binary>> end, <<>>,
- <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
- ok.
-
-bytes_to_codepoints_test() ->
- ?assertEqual(
- "abc" ++ [16#2603, 16#4e2d, 16#85, 16#10ffff],
- bytes_to_codepoints(<<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
- ok.
-
-codepoint_foldl_test() ->
- ?assertEqual(
- "cba",
- codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], <<"abc">>)),
- ?assertEqual(
- [16#10ffff, 16#85, 16#4e2d, 16#2603 | "cba"],
- codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [],
- <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
- ok.
-
-len_test() ->
- ?assertEqual(
- 29,
- len(<<"unicode snowman for you: ", 226, 152, 131, 228, 184, 173, 194, 133, 244, 143, 191, 191>>)),
- ok.
-
-codepoints_to_bytes_test() ->
- ?assertEqual(
- iolist_to_binary(lists:map(fun codepoint_to_bytes/1, lists:seq(1, 1000))),
- codepoints_to_bytes(lists:seq(1, 1000))),
- ok.
-
-valid_utf8_bytes_test() ->
- ?assertEqual(
- <<"invalid U+11ffff: ">>,
- valid_utf8_bytes(<<"invalid U+11ffff: ", 244, 159, 191, 191>>)),
- ?assertEqual(
- <<"U+10ffff: ", 244, 143, 191, 191>>,
- valid_utf8_bytes(<<"U+10ffff: ", 244, 143, 191, 191>>)),
- ?assertEqual(
- <<"overlong 2-byte encoding (a): ">>,
- valid_utf8_bytes(<<"overlong 2-byte encoding (a): ", 2#11000001, 2#10100001>>)),
- ?assertEqual(
- <<"overlong 2-byte encoding (!): ">>,
- valid_utf8_bytes(<<"overlong 2-byte encoding (!): ", 2#11000000, 2#10100001>>)),
- ?assertEqual(
- <<"mu: ", 194, 181>>,
- valid_utf8_bytes(<<"mu: ", 194, 181>>)),
- ?assertEqual(
- <<"bad coding bytes: ">>,
- valid_utf8_bytes(<<"bad coding bytes: ", 2#10011111, 2#10111111, 2#11111111>>)),
- ?assertEqual(
- <<"low surrogate (unpaired): ">>,
- valid_utf8_bytes(<<"low surrogate (unpaired): ", 237, 176, 128>>)),
- ?assertEqual(
- <<"high surrogate (unpaired): ">>,
- valid_utf8_bytes(<<"high surrogate (unpaired): ", 237, 191, 191>>)),
- ?assertEqual(
- <<"unicode snowman for you: ", 226, 152, 131>>,
- valid_utf8_bytes(<<"unicode snowman for you: ", 226, 152, 131>>)),
- ?assertEqual(
- <<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (AISPW))">>,
- valid_utf8_bytes(<<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (",
- 167, 65, 170, 186, 73, 83, 80, 166, 87, 186, 217, 41, 41>>)),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb.app.in b/1.1.x/src/mochiweb/mochiweb.app.in
deleted file mode 100644
index c6a2630b..00000000
--- a/1.1.x/src/mochiweb/mochiweb.app.in
+++ /dev/null
@@ -1,32 +0,0 @@
-{application, mochiweb,
- [{description, "MochiMedia Web Server"},
- {vsn, "7c2bc2"},
- {modules, [
- mochihex,
- mochijson,
- mochijson2,
- mochinum,
- mochiweb,
- mochiweb_app,
- mochiweb_charref,
- mochiweb_cookies,
- mochiweb_echo,
- mochiweb_headers,
- mochiweb_html,
- mochiweb_http,
- mochiweb_multipart,
- mochiweb_request,
- mochiweb_response,
- mochiweb_skel,
- mochiweb_socket_server,
- mochiweb_sup,
- mochiweb_util,
- reloader,
- mochifmt,
- mochifmt_std,
- mochifmt_records
- ]},
- {registered, []},
- {mod, {mochiweb_app, []}},
- {env, []},
- {applications, [kernel, stdlib]}]}.
diff --git a/1.1.x/src/mochiweb/mochiweb.app.src b/1.1.x/src/mochiweb/mochiweb.app.src
deleted file mode 100644
index a1c95aae..00000000
--- a/1.1.x/src/mochiweb/mochiweb.app.src
+++ /dev/null
@@ -1,9 +0,0 @@
-%% This is generated from src/mochiweb.app.src
-{application, mochiweb,
- [{description, "MochiMedia Web Server"},
- {vsn, "7c2bc2"},
- {modules, []},
- {registered, []},
- {mod, {mochiweb_app, []}},
- {env, []},
- {applications, [kernel, stdlib, crypto, inets]}]}.
diff --git a/1.1.x/src/mochiweb/mochiweb.erl b/1.1.x/src/mochiweb/mochiweb.erl
deleted file mode 100644
index 3118028b..00000000
--- a/1.1.x/src/mochiweb/mochiweb.erl
+++ /dev/null
@@ -1,289 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Start and stop the MochiWeb server.
-
--module(mochiweb).
--author('bob@mochimedia.com').
-
--export([start/0, stop/0]).
--export([new_request/1, new_response/1]).
--export([all_loaded/0, all_loaded/1, reload/0]).
-
-%% @spec start() -> ok
-%% @doc Start the MochiWeb server.
-start() ->
- ensure_started(crypto),
- application:start(mochiweb).
-
-%% @spec stop() -> ok
-%% @doc Stop the MochiWeb server.
-stop() ->
- Res = application:stop(mochiweb),
- application:stop(crypto),
- Res.
-
-reload() ->
- [c:l(Module) || Module <- all_loaded()].
-
-all_loaded() ->
- all_loaded(filename:dirname(code:which(?MODULE))).
-
-all_loaded(Base) when is_atom(Base) ->
- [];
-all_loaded(Base) ->
- FullBase = Base ++ "/",
- F = fun ({_Module, Loaded}, Acc) when is_atom(Loaded) ->
- Acc;
- ({Module, Loaded}, Acc) ->
- case lists:prefix(FullBase, Loaded) of
- true ->
- [Module | Acc];
- false ->
- Acc
- end
- end,
- lists:foldl(F, [], code:all_loaded()).
-
-
-%% @spec new_request({Socket, Request, Headers}) -> MochiWebRequest
-%% @doc Return a mochiweb_request data structure.
-new_request({Socket, {Method, {abs_path, Uri}, Version}, Headers}) ->
- mochiweb_request:new(Socket,
- Method,
- Uri,
- Version,
- mochiweb_headers:make(Headers));
-% this case probably doesn't "exist".
-new_request({Socket, {Method, {absoluteURI, _Protocol, _Host, _Port, Uri},
- Version}, Headers}) ->
- mochiweb_request:new(Socket,
- Method,
- Uri,
- Version,
- mochiweb_headers:make(Headers));
-%% Request-URI is "*"
-%% From http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
-new_request({Socket, {Method, '*'=Uri, Version}, Headers}) ->
- mochiweb_request:new(Socket,
- Method,
- Uri,
- Version,
- mochiweb_headers:make(Headers)).
-
-%% @spec new_response({Request, integer(), Headers}) -> MochiWebResponse
-%% @doc Return a mochiweb_response data structure.
-new_response({Request, Code, Headers}) ->
- mochiweb_response:new(Request,
- Code,
- mochiweb_headers:make(Headers)).
-
-%% Internal API
-
-ensure_started(App) ->
- case application:start(App) of
- ok ->
- ok;
- {error, {already_started, App}} ->
- ok
- end.
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
--record(treq, {path, body= <<>>, xreply= <<>>}).
-
-ssl_cert_opts() ->
- EbinDir = filename:dirname(code:which(?MODULE)),
- CertDir = filename:join([EbinDir, "..", "support", "test-materials"]),
- CertFile = filename:join(CertDir, "test_ssl_cert.pem"),
- KeyFile = filename:join(CertDir, "test_ssl_key.pem"),
- [{certfile, CertFile}, {keyfile, KeyFile}].
-
-with_server(Transport, ServerFun, ClientFun) ->
- ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}],
- ServerOpts = case Transport of
- plain ->
- ServerOpts0;
- ssl ->
- ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}]
- end,
- {ok, Server} = mochiweb_http:start(ServerOpts),
- Port = mochiweb_socket_server:get(Server, port),
- Res = (catch ClientFun(Transport, Port)),
- mochiweb_http:stop(Server),
- Res.
-
-request_test() ->
- R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []),
- "/foo/bar/baz wibble quux" = R:get(path),
- ok.
-
-single_http_GET_test() ->
- do_GET(plain, 1).
-
-single_https_GET_test() ->
- do_GET(ssl, 1).
-
-multiple_http_GET_test() ->
- do_GET(plain, 3).
-
-multiple_https_GET_test() ->
- do_GET(ssl, 3).
-
-hundred_http_GET_test() ->
- do_GET(plain, 100).
-
-hundred_https_GET_test() ->
- do_GET(ssl, 100).
-
-single_128_http_POST_test() ->
- do_POST(plain, 128, 1).
-
-single_128_https_POST_test() ->
- do_POST(ssl, 128, 1).
-
-single_2k_http_POST_test() ->
- do_POST(plain, 2048, 1).
-
-single_2k_https_POST_test() ->
- do_POST(ssl, 2048, 1).
-
-single_100k_http_POST_test() ->
- do_POST(plain, 102400, 1).
-
-single_100k_https_POST_test() ->
- do_POST(ssl, 102400, 1).
-
-multiple_100k_http_POST_test() ->
- do_POST(plain, 102400, 3).
-
-multiple_100K_https_POST_test() ->
- do_POST(ssl, 102400, 3).
-
-hundred_128_http_POST_test() ->
- do_POST(plain, 128, 100).
-
-hundred_128_https_POST_test() ->
- do_POST(ssl, 128, 100).
-
-do_GET(Transport, Times) ->
- PathPrefix = "/whatever/",
- ReplyPrefix = "You requested: ",
- ServerFun = fun (Req) ->
- Reply = ReplyPrefix ++ Req:get(path),
- Req:ok({"text/plain", Reply})
- end,
- TestReqs = [begin
- Path = PathPrefix ++ integer_to_list(N),
- ExpectedReply = list_to_binary(ReplyPrefix ++ Path),
- #treq{path=Path, xreply=ExpectedReply}
- end || N <- lists:seq(1, Times)],
- ClientFun = new_client_fun('GET', TestReqs),
- ok = with_server(Transport, ServerFun, ClientFun),
- ok.
-
-do_POST(Transport, Size, Times) ->
- ServerFun = fun (Req) ->
- Body = Req:recv_body(),
- Headers = [{"Content-Type", "application/octet-stream"}],
- Req:respond({201, Headers, Body})
- end,
- TestReqs = [begin
- Path = "/stuff/" ++ integer_to_list(N),
- Body = crypto:rand_bytes(Size),
- #treq{path=Path, body=Body, xreply=Body}
- end || N <- lists:seq(1, Times)],
- ClientFun = new_client_fun('POST', TestReqs),
- ok = with_server(Transport, ServerFun, ClientFun),
- ok.
-
-new_client_fun(Method, TestReqs) ->
- fun (Transport, Port) ->
- client_request(Transport, Port, Method, TestReqs)
- end.
-
-client_request(Transport, Port, Method, TestReqs) ->
- Opts = [binary, {active, false}, {packet, http}],
- SockFun = case Transport of
- plain ->
- {ok, Socket} = gen_tcp:connect("127.0.0.1", Port, Opts),
- fun (recv) ->
- gen_tcp:recv(Socket, 0);
- ({recv, Length}) ->
- gen_tcp:recv(Socket, Length);
- ({send, Data}) ->
- gen_tcp:send(Socket, Data);
- ({setopts, L}) ->
- inet:setopts(Socket, L)
- end;
- ssl ->
- {ok, Socket} = ssl:connect("127.0.0.1", Port, [{ssl_imp, new} | Opts]),
- fun (recv) ->
- ssl:recv(Socket, 0);
- ({recv, Length}) ->
- ssl:recv(Socket, Length);
- ({send, Data}) ->
- ssl:send(Socket, Data);
- ({setopts, L}) ->
- ssl:setopts(Socket, L)
- end
- end,
- client_request(SockFun, Method, TestReqs).
-
-client_request(SockFun, _Method, []) ->
- {the_end, {error, closed}} = {the_end, SockFun(recv)},
- ok;
-client_request(SockFun, Method,
- [#treq{path=Path, body=Body, xreply=ExReply} | Rest]) ->
- Request = [atom_to_list(Method), " ", Path, " HTTP/1.1\r\n",
- client_headers(Body, Rest =:= []),
- "\r\n",
- Body],
- ok = SockFun({send, Request}),
- case Method of
- 'GET' ->
- {ok, {http_response, {1,1}, 200, "OK"}} = SockFun(recv);
- 'POST' ->
- {ok, {http_response, {1,1}, 201, "Created"}} = SockFun(recv)
- end,
- ok = SockFun({setopts, [{packet, httph}]}),
- {ok, {http_header, _, 'Server', _, "MochiWeb" ++ _}} = SockFun(recv),
- {ok, {http_header, _, 'Date', _, _}} = SockFun(recv),
- {ok, {http_header, _, 'Content-Type', _, _}} = SockFun(recv),
- {ok, {http_header, _, 'Content-Length', _, ConLenStr}} = SockFun(recv),
- ContentLength = list_to_integer(ConLenStr),
- {ok, http_eoh} = SockFun(recv),
- ok = SockFun({setopts, [{packet, raw}]}),
- {payload, ExReply} = {payload, drain_reply(SockFun, ContentLength, <<>>)},
- ok = SockFun({setopts, [{packet, http}]}),
- client_request(SockFun, Method, Rest).
-
-client_headers(Body, IsLastRequest) ->
- ["Host: localhost\r\n",
- case Body of
- <<>> ->
- "";
- _ ->
- ["Content-Type: application/octet-stream\r\n",
- "Content-Length: ", integer_to_list(byte_size(Body)), "\r\n"]
- end,
- case IsLastRequest of
- true ->
- "Connection: close\r\n";
- false ->
- ""
- end].
-
-drain_reply(_SockFun, 0, Acc) ->
- Acc;
-drain_reply(SockFun, Length, Acc) ->
- Sz = erlang:min(Length, 1024),
- {ok, B} = SockFun({recv, Sz}),
- drain_reply(SockFun, Length - Sz, <<Acc/bytes, B/bytes>>).
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_acceptor.erl b/1.1.x/src/mochiweb/mochiweb_acceptor.erl
deleted file mode 100644
index 79d172c3..00000000
--- a/1.1.x/src/mochiweb/mochiweb_acceptor.erl
+++ /dev/null
@@ -1,48 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2010 Mochi Media, Inc.
-
-%% @doc MochiWeb acceptor.
-
--module(mochiweb_acceptor).
--author('bob@mochimedia.com').
-
--include("internal.hrl").
-
--export([start_link/3, init/3]).
-
-start_link(Server, Listen, Loop) ->
- proc_lib:spawn_link(?MODULE, init, [Server, Listen, Loop]).
-
-init(Server, Listen, Loop) ->
- T1 = now(),
- case catch mochiweb_socket:accept(Listen) of
- {ok, Socket} ->
- gen_server:cast(Server, {accepted, self(), timer:now_diff(now(), T1)}),
- call_loop(Loop, Socket);
- {error, closed} ->
- exit(normal);
- {error, timeout} ->
- exit(normal);
- {error, esslaccept} ->
- exit(normal);
- Other ->
- error_logger:error_report(
- [{application, mochiweb},
- "Accept failed error",
- lists:flatten(io_lib:format("~p", [Other]))]),
- exit({error, accept_failed})
- end.
-
-call_loop({M, F}, Socket) ->
- M:F(Socket);
-call_loop({M, F, A}, Socket) ->
- erlang:apply(M, F, [Socket | A]);
-call_loop(Loop, Socket) ->
- Loop(Socket).
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_app.erl b/1.1.x/src/mochiweb/mochiweb_app.erl
deleted file mode 100644
index 5d67787b..00000000
--- a/1.1.x/src/mochiweb/mochiweb_app.erl
+++ /dev/null
@@ -1,27 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Callbacks for the mochiweb application.
-
--module(mochiweb_app).
--author('bob@mochimedia.com').
-
--behaviour(application).
--export([start/2,stop/1]).
-
-%% @spec start(_Type, _StartArgs) -> ServerRet
-%% @doc application start callback for mochiweb.
-start(_Type, _StartArgs) ->
- mochiweb_sup:start_link().
-
-%% @spec stop(_State) -> ServerRet
-%% @doc application stop callback for mochiweb.
-stop(_State) ->
- ok.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_charref.erl b/1.1.x/src/mochiweb/mochiweb_charref.erl
deleted file mode 100644
index 99cd5502..00000000
--- a/1.1.x/src/mochiweb/mochiweb_charref.erl
+++ /dev/null
@@ -1,308 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Converts HTML 4 charrefs and entities to codepoints.
--module(mochiweb_charref).
--export([charref/1]).
-
-%% External API.
-
-%% @spec charref(S) -> integer() | undefined
-%% @doc Convert a decimal charref, hex charref, or html entity to a unicode
-%% codepoint, or return undefined on failure.
-%% The input should not include an ampersand or semicolon.
-%% charref("#38") = 38, charref("#x26") = 38, charref("amp") = 38.
-charref(B) when is_binary(B) ->
- charref(binary_to_list(B));
-charref([$#, C | L]) when C =:= $x orelse C =:= $X ->
- try erlang:list_to_integer(L, 16)
- catch
- error:badarg -> undefined
- end;
-charref([$# | L]) ->
- try list_to_integer(L)
- catch
- error:badarg -> undefined
- end;
-charref(L) ->
- entity(L).
-
-%% Internal API.
-
-entity("nbsp") -> 160;
-entity("iexcl") -> 161;
-entity("cent") -> 162;
-entity("pound") -> 163;
-entity("curren") -> 164;
-entity("yen") -> 165;
-entity("brvbar") -> 166;
-entity("sect") -> 167;
-entity("uml") -> 168;
-entity("copy") -> 169;
-entity("ordf") -> 170;
-entity("laquo") -> 171;
-entity("not") -> 172;
-entity("shy") -> 173;
-entity("reg") -> 174;
-entity("macr") -> 175;
-entity("deg") -> 176;
-entity("plusmn") -> 177;
-entity("sup2") -> 178;
-entity("sup3") -> 179;
-entity("acute") -> 180;
-entity("micro") -> 181;
-entity("para") -> 182;
-entity("middot") -> 183;
-entity("cedil") -> 184;
-entity("sup1") -> 185;
-entity("ordm") -> 186;
-entity("raquo") -> 187;
-entity("frac14") -> 188;
-entity("frac12") -> 189;
-entity("frac34") -> 190;
-entity("iquest") -> 191;
-entity("Agrave") -> 192;
-entity("Aacute") -> 193;
-entity("Acirc") -> 194;
-entity("Atilde") -> 195;
-entity("Auml") -> 196;
-entity("Aring") -> 197;
-entity("AElig") -> 198;
-entity("Ccedil") -> 199;
-entity("Egrave") -> 200;
-entity("Eacute") -> 201;
-entity("Ecirc") -> 202;
-entity("Euml") -> 203;
-entity("Igrave") -> 204;
-entity("Iacute") -> 205;
-entity("Icirc") -> 206;
-entity("Iuml") -> 207;
-entity("ETH") -> 208;
-entity("Ntilde") -> 209;
-entity("Ograve") -> 210;
-entity("Oacute") -> 211;
-entity("Ocirc") -> 212;
-entity("Otilde") -> 213;
-entity("Ouml") -> 214;
-entity("times") -> 215;
-entity("Oslash") -> 216;
-entity("Ugrave") -> 217;
-entity("Uacute") -> 218;
-entity("Ucirc") -> 219;
-entity("Uuml") -> 220;
-entity("Yacute") -> 221;
-entity("THORN") -> 222;
-entity("szlig") -> 223;
-entity("agrave") -> 224;
-entity("aacute") -> 225;
-entity("acirc") -> 226;
-entity("atilde") -> 227;
-entity("auml") -> 228;
-entity("aring") -> 229;
-entity("aelig") -> 230;
-entity("ccedil") -> 231;
-entity("egrave") -> 232;
-entity("eacute") -> 233;
-entity("ecirc") -> 234;
-entity("euml") -> 235;
-entity("igrave") -> 236;
-entity("iacute") -> 237;
-entity("icirc") -> 238;
-entity("iuml") -> 239;
-entity("eth") -> 240;
-entity("ntilde") -> 241;
-entity("ograve") -> 242;
-entity("oacute") -> 243;
-entity("ocirc") -> 244;
-entity("otilde") -> 245;
-entity("ouml") -> 246;
-entity("divide") -> 247;
-entity("oslash") -> 248;
-entity("ugrave") -> 249;
-entity("uacute") -> 250;
-entity("ucirc") -> 251;
-entity("uuml") -> 252;
-entity("yacute") -> 253;
-entity("thorn") -> 254;
-entity("yuml") -> 255;
-entity("fnof") -> 402;
-entity("Alpha") -> 913;
-entity("Beta") -> 914;
-entity("Gamma") -> 915;
-entity("Delta") -> 916;
-entity("Epsilon") -> 917;
-entity("Zeta") -> 918;
-entity("Eta") -> 919;
-entity("Theta") -> 920;
-entity("Iota") -> 921;
-entity("Kappa") -> 922;
-entity("Lambda") -> 923;
-entity("Mu") -> 924;
-entity("Nu") -> 925;
-entity("Xi") -> 926;
-entity("Omicron") -> 927;
-entity("Pi") -> 928;
-entity("Rho") -> 929;
-entity("Sigma") -> 931;
-entity("Tau") -> 932;
-entity("Upsilon") -> 933;
-entity("Phi") -> 934;
-entity("Chi") -> 935;
-entity("Psi") -> 936;
-entity("Omega") -> 937;
-entity("alpha") -> 945;
-entity("beta") -> 946;
-entity("gamma") -> 947;
-entity("delta") -> 948;
-entity("epsilon") -> 949;
-entity("zeta") -> 950;
-entity("eta") -> 951;
-entity("theta") -> 952;
-entity("iota") -> 953;
-entity("kappa") -> 954;
-entity("lambda") -> 955;
-entity("mu") -> 956;
-entity("nu") -> 957;
-entity("xi") -> 958;
-entity("omicron") -> 959;
-entity("pi") -> 960;
-entity("rho") -> 961;
-entity("sigmaf") -> 962;
-entity("sigma") -> 963;
-entity("tau") -> 964;
-entity("upsilon") -> 965;
-entity("phi") -> 966;
-entity("chi") -> 967;
-entity("psi") -> 968;
-entity("omega") -> 969;
-entity("thetasym") -> 977;
-entity("upsih") -> 978;
-entity("piv") -> 982;
-entity("bull") -> 8226;
-entity("hellip") -> 8230;
-entity("prime") -> 8242;
-entity("Prime") -> 8243;
-entity("oline") -> 8254;
-entity("frasl") -> 8260;
-entity("weierp") -> 8472;
-entity("image") -> 8465;
-entity("real") -> 8476;
-entity("trade") -> 8482;
-entity("alefsym") -> 8501;
-entity("larr") -> 8592;
-entity("uarr") -> 8593;
-entity("rarr") -> 8594;
-entity("darr") -> 8595;
-entity("harr") -> 8596;
-entity("crarr") -> 8629;
-entity("lArr") -> 8656;
-entity("uArr") -> 8657;
-entity("rArr") -> 8658;
-entity("dArr") -> 8659;
-entity("hArr") -> 8660;
-entity("forall") -> 8704;
-entity("part") -> 8706;
-entity("exist") -> 8707;
-entity("empty") -> 8709;
-entity("nabla") -> 8711;
-entity("isin") -> 8712;
-entity("notin") -> 8713;
-entity("ni") -> 8715;
-entity("prod") -> 8719;
-entity("sum") -> 8721;
-entity("minus") -> 8722;
-entity("lowast") -> 8727;
-entity("radic") -> 8730;
-entity("prop") -> 8733;
-entity("infin") -> 8734;
-entity("ang") -> 8736;
-entity("and") -> 8743;
-entity("or") -> 8744;
-entity("cap") -> 8745;
-entity("cup") -> 8746;
-entity("int") -> 8747;
-entity("there4") -> 8756;
-entity("sim") -> 8764;
-entity("cong") -> 8773;
-entity("asymp") -> 8776;
-entity("ne") -> 8800;
-entity("equiv") -> 8801;
-entity("le") -> 8804;
-entity("ge") -> 8805;
-entity("sub") -> 8834;
-entity("sup") -> 8835;
-entity("nsub") -> 8836;
-entity("sube") -> 8838;
-entity("supe") -> 8839;
-entity("oplus") -> 8853;
-entity("otimes") -> 8855;
-entity("perp") -> 8869;
-entity("sdot") -> 8901;
-entity("lceil") -> 8968;
-entity("rceil") -> 8969;
-entity("lfloor") -> 8970;
-entity("rfloor") -> 8971;
-entity("lang") -> 9001;
-entity("rang") -> 9002;
-entity("loz") -> 9674;
-entity("spades") -> 9824;
-entity("clubs") -> 9827;
-entity("hearts") -> 9829;
-entity("diams") -> 9830;
-entity("quot") -> 34;
-entity("amp") -> 38;
-entity("lt") -> 60;
-entity("gt") -> 62;
-entity("OElig") -> 338;
-entity("oelig") -> 339;
-entity("Scaron") -> 352;
-entity("scaron") -> 353;
-entity("Yuml") -> 376;
-entity("circ") -> 710;
-entity("tilde") -> 732;
-entity("ensp") -> 8194;
-entity("emsp") -> 8195;
-entity("thinsp") -> 8201;
-entity("zwnj") -> 8204;
-entity("zwj") -> 8205;
-entity("lrm") -> 8206;
-entity("rlm") -> 8207;
-entity("ndash") -> 8211;
-entity("mdash") -> 8212;
-entity("lsquo") -> 8216;
-entity("rsquo") -> 8217;
-entity("sbquo") -> 8218;
-entity("ldquo") -> 8220;
-entity("rdquo") -> 8221;
-entity("bdquo") -> 8222;
-entity("dagger") -> 8224;
-entity("Dagger") -> 8225;
-entity("permil") -> 8240;
-entity("lsaquo") -> 8249;
-entity("rsaquo") -> 8250;
-entity("euro") -> 8364;
-entity(_) -> undefined.
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-exhaustive_entity_test() ->
- T = mochiweb_cover:clause_lookup_table(?MODULE, entity),
- [?assertEqual(V, entity(K)) || {K, V} <- T].
-
-charref_test() ->
- 1234 = charref("#1234"),
- 255 = charref("#xfF"),
- 255 = charref(<<"#XFf">>),
- 38 = charref("amp"),
- 38 = charref(<<"amp">>),
- undefined = charref("not_an_entity"),
- undefined = charref("#not_an_entity"),
- undefined = charref("#xnot_an_entity"),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_cookies.erl b/1.1.x/src/mochiweb/mochiweb_cookies.erl
deleted file mode 100644
index c090b714..00000000
--- a/1.1.x/src/mochiweb/mochiweb_cookies.erl
+++ /dev/null
@@ -1,309 +0,0 @@
-%% @author Emad El-Haraty <emad@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc HTTP Cookie parsing and generating (RFC 2109, RFC 2965).
-
--module(mochiweb_cookies).
--export([parse_cookie/1, cookie/3, cookie/2]).
-
--define(QUOTE, $\").
-
--define(IS_WHITESPACE(C),
- (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
-
-%% RFC 2616 separators (called tspecials in RFC 2068)
--define(IS_SEPARATOR(C),
- (C < 32 orelse
- C =:= $\s orelse C =:= $\t orelse
- C =:= $( orelse C =:= $) orelse C =:= $< orelse C =:= $> orelse
- C =:= $@ orelse C =:= $, orelse C =:= $; orelse C =:= $: orelse
- C =:= $\\ orelse C =:= $\" orelse C =:= $/ orelse
- C =:= $[ orelse C =:= $] orelse C =:= $? orelse C =:= $= orelse
- C =:= ${ orelse C =:= $})).
-
-%% @type proplist() = [{Key::string(), Value::string()}].
-%% @type header() = {Name::string(), Value::string()}.
-
-%% @spec cookie(Key::string(), Value::string()) -> header()
-%% @doc Short-hand for <code>cookie(Key, Value, [])</code>.
-cookie(Key, Value) ->
- cookie(Key, Value, []).
-
-%% @spec cookie(Key::string(), Value::string(), Options::[Option]) -> header()
-%% where Option = {max_age, integer()} | {local_time, {date(), time()}}
-%% | {domain, string()} | {path, string()}
-%% | {secure, true | false} | {http_only, true | false}
-%%
-%% @doc Generate a Set-Cookie header field tuple.
-cookie(Key, Value, Options) ->
- Cookie = [any_to_list(Key), "=", quote(Value), "; Version=1"],
- %% Set-Cookie:
- %% Comment, Domain, Max-Age, Path, Secure, Version
- %% Set-Cookie2:
- %% Comment, CommentURL, Discard, Domain, Max-Age, Path, Port, Secure,
- %% Version
- ExpiresPart =
- case proplists:get_value(max_age, Options) of
- undefined ->
- "";
- RawAge ->
- When = case proplists:get_value(local_time, Options) of
- undefined ->
- calendar:local_time();
- LocalTime ->
- LocalTime
- end,
- Age = case RawAge < 0 of
- true ->
- 0;
- false ->
- RawAge
- end,
- ["; Expires=", age_to_cookie_date(Age, When),
- "; Max-Age=", quote(Age)]
- end,
- SecurePart =
- case proplists:get_value(secure, Options) of
- true ->
- "; Secure";
- _ ->
- ""
- end,
- DomainPart =
- case proplists:get_value(domain, Options) of
- undefined ->
- "";
- Domain ->
- ["; Domain=", quote(Domain)]
- end,
- PathPart =
- case proplists:get_value(path, Options) of
- undefined ->
- "";
- Path ->
- ["; Path=", quote(Path)]
- end,
- HttpOnlyPart =
- case proplists:get_value(http_only, Options) of
- true ->
- "; HttpOnly";
- _ ->
- ""
- end,
- CookieParts = [Cookie, ExpiresPart, SecurePart, DomainPart, PathPart, HttpOnlyPart],
- {"Set-Cookie", lists:flatten(CookieParts)}.
-
-
-%% Every major browser incorrectly handles quoted strings in a
-%% different and (worse) incompatible manner. Instead of wasting time
-%% writing redundant code for each browser, we restrict cookies to
-%% only contain characters that browsers handle compatibly.
-%%
-%% By replacing the definition of quote with this, we generate
-%% RFC-compliant cookies:
-%%
-%% quote(V) ->
-%% Fun = fun(?QUOTE, Acc) -> [$\\, ?QUOTE | Acc];
-%% (Ch, Acc) -> [Ch | Acc]
-%% end,
-%% [?QUOTE | lists:foldr(Fun, [?QUOTE], V)].
-
-%% Convert to a string and raise an error if quoting is required.
-quote(V0) ->
- V = any_to_list(V0),
- lists:all(fun(Ch) -> Ch =:= $/ orelse not ?IS_SEPARATOR(Ch) end, V)
- orelse erlang:error({cookie_quoting_required, V}),
- V.
-
-add_seconds(Secs, LocalTime) ->
- Greg = calendar:datetime_to_gregorian_seconds(LocalTime),
- calendar:gregorian_seconds_to_datetime(Greg + Secs).
-
-age_to_cookie_date(Age, LocalTime) ->
- httpd_util:rfc1123_date(add_seconds(Age, LocalTime)).
-
-%% @spec parse_cookie(string()) -> [{K::string(), V::string()}]
-%% @doc Parse the contents of a Cookie header field, ignoring cookie
-%% attributes, and return a simple property list.
-parse_cookie("") ->
- [];
-parse_cookie(Cookie) ->
- parse_cookie(Cookie, []).
-
-%% Internal API
-
-parse_cookie([], Acc) ->
- lists:reverse(Acc);
-parse_cookie(String, Acc) ->
- {{Token, Value}, Rest} = read_pair(String),
- Acc1 = case Token of
- "" ->
- Acc;
- "$" ++ _ ->
- Acc;
- _ ->
- [{Token, Value} | Acc]
- end,
- parse_cookie(Rest, Acc1).
-
-read_pair(String) ->
- {Token, Rest} = read_token(skip_whitespace(String)),
- {Value, Rest1} = read_value(skip_whitespace(Rest)),
- {{Token, Value}, skip_past_separator(Rest1)}.
-
-read_value([$= | Value]) ->
- Value1 = skip_whitespace(Value),
- case Value1 of
- [?QUOTE | _] ->
- read_quoted(Value1);
- _ ->
- read_token(Value1)
- end;
-read_value(String) ->
- {"", String}.
-
-read_quoted([?QUOTE | String]) ->
- read_quoted(String, []).
-
-read_quoted([], Acc) ->
- {lists:reverse(Acc), []};
-read_quoted([?QUOTE | Rest], Acc) ->
- {lists:reverse(Acc), Rest};
-read_quoted([$\\, Any | Rest], Acc) ->
- read_quoted(Rest, [Any | Acc]);
-read_quoted([C | Rest], Acc) ->
- read_quoted(Rest, [C | Acc]).
-
-skip_whitespace(String) ->
- F = fun (C) -> ?IS_WHITESPACE(C) end,
- lists:dropwhile(F, String).
-
-read_token(String) ->
- F = fun (C) -> not ?IS_SEPARATOR(C) end,
- lists:splitwith(F, String).
-
-skip_past_separator([]) ->
- [];
-skip_past_separator([$; | Rest]) ->
- Rest;
-skip_past_separator([$, | Rest]) ->
- Rest;
-skip_past_separator([_ | Rest]) ->
- skip_past_separator(Rest).
-
-any_to_list(V) when is_list(V) ->
- V;
-any_to_list(V) when is_atom(V) ->
- atom_to_list(V);
-any_to_list(V) when is_binary(V) ->
- binary_to_list(V);
-any_to_list(V) when is_integer(V) ->
- integer_to_list(V).
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-quote_test() ->
- %% ?assertError eunit macro is not compatible with coverage module
- try quote(":wq")
- catch error:{cookie_quoting_required, ":wq"} -> ok
- end,
- ?assertEqual(
- "foo",
- quote(foo)),
- ok.
-
-parse_cookie_test() ->
- %% RFC example
- C1 = "$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\";
- Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\";
- Shipping=\"FedEx\"; $Path=\"/acme\"",
- ?assertEqual(
- [{"Customer","WILE_E_COYOTE"},
- {"Part_Number","Rocket_Launcher_0001"},
- {"Shipping","FedEx"}],
- parse_cookie(C1)),
- %% Potential edge cases
- ?assertEqual(
- [{"foo", "x"}],
- parse_cookie("foo=\"\\x\"")),
- ?assertEqual(
- [],
- parse_cookie("=")),
- ?assertEqual(
- [{"foo", ""}, {"bar", ""}],
- parse_cookie(" foo ; bar ")),
- ?assertEqual(
- [{"foo", ""}, {"bar", ""}],
- parse_cookie("foo=;bar=")),
- ?assertEqual(
- [{"foo", "\";"}, {"bar", ""}],
- parse_cookie("foo = \"\\\";\";bar ")),
- ?assertEqual(
- [{"foo", "\";bar"}],
- parse_cookie("foo=\"\\\";bar")),
- ?assertEqual(
- [],
- parse_cookie([])),
- ?assertEqual(
- [{"foo", "bar"}, {"baz", "wibble"}],
- parse_cookie("foo=bar , baz=wibble ")),
- ok.
-
-domain_test() ->
- ?assertEqual(
- {"Set-Cookie",
- "Customer=WILE_E_COYOTE; "
- "Version=1; "
- "Domain=acme.com; "
- "HttpOnly"},
- cookie("Customer", "WILE_E_COYOTE",
- [{http_only, true}, {domain, "acme.com"}])),
- ok.
-
-local_time_test() ->
- {"Set-Cookie", S} = cookie("Customer", "WILE_E_COYOTE",
- [{max_age, 111}, {secure, true}]),
- ?assertMatch(
- ["Customer=WILE_E_COYOTE",
- " Version=1",
- " Expires=" ++ _,
- " Max-Age=111",
- " Secure"],
- string:tokens(S, ";")),
- ok.
-
-cookie_test() ->
- C1 = {"Set-Cookie",
- "Customer=WILE_E_COYOTE; "
- "Version=1; "
- "Path=/acme"},
- C1 = cookie("Customer", "WILE_E_COYOTE", [{path, "/acme"}]),
- C1 = cookie("Customer", "WILE_E_COYOTE",
- [{path, "/acme"}, {badoption, "negatory"}]),
- C1 = cookie('Customer', 'WILE_E_COYOTE', [{path, '/acme'}]),
- C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, [{path, <<"/acme">>}]),
-
- {"Set-Cookie","=NoKey; Version=1"} = cookie("", "NoKey", []),
- {"Set-Cookie","=NoKey; Version=1"} = cookie("", "NoKey"),
- LocalTime = calendar:universal_time_to_local_time({{2007, 5, 15}, {13, 45, 33}}),
- C2 = {"Set-Cookie",
- "Customer=WILE_E_COYOTE; "
- "Version=1; "
- "Expires=Tue, 15 May 2007 13:45:33 GMT; "
- "Max-Age=0"},
- C2 = cookie("Customer", "WILE_E_COYOTE",
- [{max_age, -111}, {local_time, LocalTime}]),
- C3 = {"Set-Cookie",
- "Customer=WILE_E_COYOTE; "
- "Version=1; "
- "Expires=Wed, 16 May 2007 13:45:50 GMT; "
- "Max-Age=86417"},
- C3 = cookie("Customer", "WILE_E_COYOTE",
- [{max_age, 86417}, {local_time, LocalTime}]),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_cover.erl b/1.1.x/src/mochiweb/mochiweb_cover.erl
deleted file mode 100644
index 6a14ef51..00000000
--- a/1.1.x/src/mochiweb/mochiweb_cover.erl
+++ /dev/null
@@ -1,75 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2010 Mochi Media, Inc.
-
-%% @doc Workarounds for various cover deficiencies.
--module(mochiweb_cover).
--export([get_beam/1, get_abstract_code/1,
- get_clauses/2, clause_lookup_table/1]).
--export([clause_lookup_table/2]).
-
-%% Internal
-
-get_beam(Module) ->
- {Module, Beam, _Path} = code:get_object_code(Module),
- Beam.
-
-get_abstract_code(Beam) ->
- {ok, {_Module,
- [{abstract_code,
- {raw_abstract_v1, L}}]}} = beam_lib:chunks(Beam, [abstract_code]),
- L.
-
-get_clauses(Function, Code) ->
- [L] = [Clauses || {function, _, FName, _, Clauses}
- <- Code, FName =:= Function],
- L.
-
-clause_lookup_table(Module, Function) ->
- clause_lookup_table(
- get_clauses(Function,
- get_abstract_code(get_beam(Module)))).
-
-clause_lookup_table(Clauses) ->
- lists:foldr(fun clause_fold/2, [], Clauses).
-
-clause_fold({clause, _,
- [InTerm],
- _Guards=[],
- [OutTerm]},
- Acc) ->
- try [{erl_parse:normalise(InTerm), erl_parse:normalise(OutTerm)} | Acc]
- catch error:_ -> Acc
- end;
-clause_fold(_, Acc) ->
- Acc.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-foo_table(a) -> b;
-foo_table("a") -> <<"b">>;
-foo_table(123) -> {4, 3, 2};
-foo_table([list]) -> [];
-foo_table([list1, list2]) -> [list1, list2, list3];
-foo_table(ignored) -> some, code, ignored;
-foo_table(Var) -> Var.
-
-foo_table_test() ->
- T = clause_lookup_table(?MODULE, foo_table),
- [?assertEqual(V, foo_table(K)) || {K, V} <- T].
-
-clause_lookup_table_test() ->
- ?assertEqual(b, foo_table(a)),
- ?assertEqual(ignored, foo_table(ignored)),
- ?assertEqual('Var', foo_table('Var')),
- ?assertEqual(
- [{a, b},
- {"a", <<"b">>},
- {123, {4, 3, 2}},
- {[list], []},
- {[list1, list2], [list1, list2, list3]}],
- clause_lookup_table(?MODULE, foo_table)).
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_echo.erl b/1.1.x/src/mochiweb/mochiweb_echo.erl
deleted file mode 100644
index 6f7872b9..00000000
--- a/1.1.x/src/mochiweb/mochiweb_echo.erl
+++ /dev/null
@@ -1,38 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Simple and stupid echo server to demo mochiweb_socket_server.
-
--module(mochiweb_echo).
--author('bob@mochimedia.com').
--export([start/0, stop/0, loop/1]).
-
-stop() ->
- mochiweb_socket_server:stop(?MODULE).
-
-start() ->
- mochiweb_socket_server:start([{name, ?MODULE},
- {port, 6789},
- {ip, "127.0.0.1"},
- {max, 1},
- {loop, {?MODULE, loop}}]).
-
-loop(Socket) ->
- case mochiweb_socket:recv(Socket, 0, 30000) of
- {ok, Data} ->
- case mochiweb_socket:send(Socket, Data) of
- ok ->
- loop(Socket);
- _ ->
- exit(normal)
- end;
- _Other ->
- exit(normal)
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_headers.erl b/1.1.x/src/mochiweb/mochiweb_headers.erl
deleted file mode 100644
index 4fce9838..00000000
--- a/1.1.x/src/mochiweb/mochiweb_headers.erl
+++ /dev/null
@@ -1,299 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Case preserving (but case insensitive) HTTP Header dictionary.
-
--module(mochiweb_headers).
--author('bob@mochimedia.com').
--export([empty/0, from_list/1, insert/3, enter/3, get_value/2, lookup/2]).
--export([delete_any/2, get_primary_value/2]).
--export([default/3, enter_from_list/2, default_from_list/2]).
--export([to_list/1, make/1]).
--export([from_binary/1]).
-
-%% @type headers().
-%% @type key() = atom() | binary() | string().
-%% @type value() = atom() | binary() | string() | integer().
-
-%% @spec empty() -> headers()
-%% @doc Create an empty headers structure.
-empty() ->
- gb_trees:empty().
-
-%% @spec make(headers() | [{key(), value()}]) -> headers()
-%% @doc Construct a headers() from the given list.
-make(L) when is_list(L) ->
- from_list(L);
-%% assume a tuple is already mochiweb_headers.
-make(T) when is_tuple(T) ->
- T.
-
-%% @spec from_binary(iolist()) -> headers()
-%% @doc Transforms a raw HTTP header into a mochiweb headers structure.
-%%
-%% The given raw HTTP header can be one of the following:
-%%
-%% 1) A string or a binary representing a full HTTP header ending with
-%% double CRLF.
-%% Examples:
-%% ```
-%% "Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n"
-%% <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>'''
-%%
-%% 2) A list of binaries or strings where each element represents a raw
-%% HTTP header line ending with a single CRLF.
-%% Examples:
-%% ```
-%% [<<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">>]
-%% ["Content-Length: 47\r\n", "Content-Type: text/plain\r\n"]
-%% ["Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">>]'''
-%%
-from_binary(RawHttpHeader) when is_binary(RawHttpHeader) ->
- from_binary(RawHttpHeader, []);
-from_binary(RawHttpHeaderList) ->
- from_binary(list_to_binary([RawHttpHeaderList, "\r\n"])).
-
-from_binary(RawHttpHeader, Acc) ->
- case erlang:decode_packet(httph, RawHttpHeader, []) of
- {ok, {http_header, _, H, _, V}, Rest} ->
- from_binary(Rest, [{H, V} | Acc]);
- _ ->
- make(Acc)
- end.
-
-%% @spec from_list([{key(), value()}]) -> headers()
-%% @doc Construct a headers() from the given list.
-from_list(List) ->
- lists:foldl(fun ({K, V}, T) -> insert(K, V, T) end, empty(), List).
-
-%% @spec enter_from_list([{key(), value()}], headers()) -> headers()
-%% @doc Insert pairs into the headers, replace any values for existing keys.
-enter_from_list(List, T) ->
- lists:foldl(fun ({K, V}, T1) -> enter(K, V, T1) end, T, List).
-
-%% @spec default_from_list([{key(), value()}], headers()) -> headers()
-%% @doc Insert pairs into the headers for keys that do not already exist.
-default_from_list(List, T) ->
- lists:foldl(fun ({K, V}, T1) -> default(K, V, T1) end, T, List).
-
-%% @spec to_list(headers()) -> [{key(), string()}]
-%% @doc Return the contents of the headers. The keys will be the exact key
-%% that was first inserted (e.g. may be an atom or binary, case is
-%% preserved).
-to_list(T) ->
- F = fun ({K, {array, L}}, Acc) ->
- L1 = lists:reverse(L),
- lists:foldl(fun (V, Acc1) -> [{K, V} | Acc1] end, Acc, L1);
- (Pair, Acc) ->
- [Pair | Acc]
- end,
- lists:reverse(lists:foldl(F, [], gb_trees:values(T))).
-
-%% @spec get_value(key(), headers()) -> string() | undefined
-%% @doc Return the value of the given header using a case insensitive search.
-%% undefined will be returned for keys that are not present.
-get_value(K, T) ->
- case lookup(K, T) of
- {value, {_, V}} ->
- expand(V);
- none ->
- undefined
- end.
-
-%% @spec get_primary_value(key(), headers()) -> string() | undefined
-%% @doc Return the value of the given header up to the first semicolon using
-%% a case insensitive search. undefined will be returned for keys
-%% that are not present.
-get_primary_value(K, T) ->
- case get_value(K, T) of
- undefined ->
- undefined;
- V ->
- lists:takewhile(fun (C) -> C =/= $; end, V)
- end.
-
-%% @spec lookup(key(), headers()) -> {value, {key(), string()}} | none
-%% @doc Return the case preserved key and value for the given header using
-%% a case insensitive search. none will be returned for keys that are
-%% not present.
-lookup(K, T) ->
- case gb_trees:lookup(normalize(K), T) of
- {value, {K0, V}} ->
- {value, {K0, expand(V)}};
- none ->
- none
- end.
-
-%% @spec default(key(), value(), headers()) -> headers()
-%% @doc Insert the pair into the headers if it does not already exist.
-default(K, V, T) ->
- K1 = normalize(K),
- V1 = any_to_list(V),
- try gb_trees:insert(K1, {K, V1}, T)
- catch
- error:{key_exists, _} ->
- T
- end.
-
-%% @spec enter(key(), value(), headers()) -> headers()
-%% @doc Insert the pair into the headers, replacing any pre-existing key.
-enter(K, V, T) ->
- K1 = normalize(K),
- V1 = any_to_list(V),
- gb_trees:enter(K1, {K, V1}, T).
-
-%% @spec insert(key(), value(), headers()) -> headers()
-%% @doc Insert the pair into the headers, merging with any pre-existing key.
-%% A merge is done with Value = V0 ++ ", " ++ V1.
-insert(K, V, T) ->
- K1 = normalize(K),
- V1 = any_to_list(V),
- try gb_trees:insert(K1, {K, V1}, T)
- catch
- error:{key_exists, _} ->
- {K0, V0} = gb_trees:get(K1, T),
- V2 = merge(K1, V1, V0),
- gb_trees:update(K1, {K0, V2}, T)
- end.
-
-%% @spec delete_any(key(), headers()) -> headers()
-%% @doc Delete the header corresponding to key if it is present.
-delete_any(K, T) ->
- K1 = normalize(K),
- gb_trees:delete_any(K1, T).
-
-%% Internal API
-
-expand({array, L}) ->
- mochiweb_util:join(lists:reverse(L), ", ");
-expand(V) ->
- V.
-
-merge("set-cookie", V1, {array, L}) ->
- {array, [V1 | L]};
-merge("set-cookie", V1, V0) ->
- {array, [V1, V0]};
-merge(_, V1, V0) ->
- V0 ++ ", " ++ V1.
-
-normalize(K) when is_list(K) ->
- string:to_lower(K);
-normalize(K) when is_atom(K) ->
- normalize(atom_to_list(K));
-normalize(K) when is_binary(K) ->
- normalize(binary_to_list(K)).
-
-any_to_list(V) when is_list(V) ->
- V;
-any_to_list(V) when is_atom(V) ->
- atom_to_list(V);
-any_to_list(V) when is_binary(V) ->
- binary_to_list(V);
-any_to_list(V) when is_integer(V) ->
- integer_to_list(V).
-
-%%
-%% Tests.
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-make_test() ->
- Identity = make([{hdr, foo}]),
- ?assertEqual(
- Identity,
- make(Identity)).
-
-enter_from_list_test() ->
- H = make([{hdr, foo}]),
- ?assertEqual(
- [{baz, "wibble"}, {hdr, "foo"}],
- to_list(enter_from_list([{baz, wibble}], H))),
- ?assertEqual(
- [{hdr, "bar"}],
- to_list(enter_from_list([{hdr, bar}], H))),
- ok.
-
-default_from_list_test() ->
- H = make([{hdr, foo}]),
- ?assertEqual(
- [{baz, "wibble"}, {hdr, "foo"}],
- to_list(default_from_list([{baz, wibble}], H))),
- ?assertEqual(
- [{hdr, "foo"}],
- to_list(default_from_list([{hdr, bar}], H))),
- ok.
-
-get_primary_value_test() ->
- H = make([{hdr, foo}, {baz, <<"wibble;taco">>}]),
- ?assertEqual(
- "foo",
- get_primary_value(hdr, H)),
- ?assertEqual(
- undefined,
- get_primary_value(bar, H)),
- ?assertEqual(
- "wibble",
- get_primary_value(<<"baz">>, H)),
- ok.
-
-set_cookie_test() ->
- H = make([{"set-cookie", foo}, {"set-cookie", bar}, {"set-cookie", baz}]),
- ?assertEqual(
- [{"set-cookie", "foo"}, {"set-cookie", "bar"}, {"set-cookie", "baz"}],
- to_list(H)),
- ok.
-
-headers_test() ->
- H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
- [{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
- H1 = ?MODULE:insert(taco, grande, H),
- [{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
- H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
- [{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
- H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
- [{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
- "foo, bar" = ?MODULE:get_value("set-cookie", H3),
- {value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
- undefined = ?MODULE:get_value("shibby", H3),
- none = ?MODULE:lookup("shibby", H3),
- H4 = ?MODULE:insert("content-type",
- "application/x-www-form-urlencoded; charset=utf8",
- H3),
- "application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
- "content-type", H4),
- H4 = ?MODULE:delete_any("nonexistent-header", H4),
- H3 = ?MODULE:delete_any("content-type", H4),
- HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>,
- H_HB = ?MODULE:from_binary(HB),
- H_HB = ?MODULE:from_binary(binary_to_list(HB)),
- "47" = ?MODULE:get_value("Content-Length", H_HB),
- "text/plain" = ?MODULE:get_value("Content-Type", H_HB),
- L_H_HB = ?MODULE:to_list(H_HB),
- 2 = length(L_H_HB),
- true = lists:member({'Content-Length', "47"}, L_H_HB),
- true = lists:member({'Content-Type', "text/plain"}, L_H_HB),
- HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ],
- HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ],
- HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ],
- H_HL = ?MODULE:from_binary(HL),
- H_HL = ?MODULE:from_binary(HL2),
- H_HL = ?MODULE:from_binary(HL3),
- "47" = ?MODULE:get_value("Content-Length", H_HL),
- "text/plain" = ?MODULE:get_value("Content-Type", H_HL),
- L_H_HL = ?MODULE:to_list(H_HL),
- 2 = length(L_H_HL),
- true = lists:member({'Content-Length', "47"}, L_H_HL),
- true = lists:member({'Content-Type', "text/plain"}, L_H_HL),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<>>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary("")),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<>>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_html.erl b/1.1.x/src/mochiweb/mochiweb_html.erl
deleted file mode 100644
index a15c359c..00000000
--- a/1.1.x/src/mochiweb/mochiweb_html.erl
+++ /dev/null
@@ -1,1061 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Loosely tokenizes and generates parse trees for HTML 4.
--module(mochiweb_html).
--export([tokens/1, parse/1, parse_tokens/1, to_tokens/1, escape/1,
- escape_attr/1, to_html/1]).
-
-%% This is a macro to placate syntax highlighters..
--define(QUOTE, $\").
--define(SQUOTE, $\').
--define(ADV_COL(S, N),
- S#decoder{column=N+S#decoder.column,
- offset=N+S#decoder.offset}).
--define(INC_COL(S),
- S#decoder{column=1+S#decoder.column,
- offset=1+S#decoder.offset}).
--define(INC_LINE(S),
- S#decoder{column=1,
- line=1+S#decoder.line,
- offset=1+S#decoder.offset}).
--define(INC_CHAR(S, C),
- case C of
- $\n ->
- S#decoder{column=1,
- line=1+S#decoder.line,
- offset=1+S#decoder.offset};
- _ ->
- S#decoder{column=1+S#decoder.column,
- offset=1+S#decoder.offset}
- end).
-
--define(IS_WHITESPACE(C),
- (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
--define(IS_LITERAL_SAFE(C),
- ((C >= $A andalso C =< $Z) orelse (C >= $a andalso C =< $z)
- orelse (C >= $0 andalso C =< $9))).
--define(PROBABLE_CLOSE(C),
- (C =:= $> orelse ?IS_WHITESPACE(C))).
-
--record(decoder, {line=1,
- column=1,
- offset=0}).
-
-%% @type html_node() = {string(), [html_attr()], [html_node() | string()]}
-%% @type html_attr() = {string(), string()}
-%% @type html_token() = html_data() | start_tag() | end_tag() | inline_html() | html_comment() | html_doctype()
-%% @type html_data() = {data, string(), Whitespace::boolean()}
-%% @type start_tag() = {start_tag, Name, [html_attr()], Singleton::boolean()}
-%% @type end_tag() = {end_tag, Name}
-%% @type html_comment() = {comment, Comment}
-%% @type html_doctype() = {doctype, [Doctype]}
-%% @type inline_html() = {'=', iolist()}
-
-%% External API.
-
-%% @spec parse(string() | binary()) -> html_node()
-%% @doc tokenize and then transform the token stream into a HTML tree.
-parse(Input) ->
- parse_tokens(tokens(Input)).
-
-%% @spec parse_tokens([html_token()]) -> html_node()
-%% @doc Transform the output of tokens(Doc) into a HTML tree.
-parse_tokens(Tokens) when is_list(Tokens) ->
- %% Skip over doctype, processing instructions
- F = fun (X) ->
- case X of
- {start_tag, _, _, false} ->
- false;
- _ ->
- true
- end
- end,
- [{start_tag, Tag, Attrs, false} | Rest] = lists:dropwhile(F, Tokens),
- {Tree, _} = tree(Rest, [norm({Tag, Attrs})]),
- Tree.
-
-%% @spec tokens(StringOrBinary) -> [html_token()]
-%% @doc Transform the input UTF-8 HTML into a token stream.
-tokens(Input) ->
- tokens(iolist_to_binary(Input), #decoder{}, []).
-
-%% @spec to_tokens(html_node()) -> [html_token()]
-%% @doc Convert a html_node() tree to a list of tokens.
-to_tokens({Tag0}) ->
- to_tokens({Tag0, [], []});
-to_tokens(T={'=', _}) ->
- [T];
-to_tokens(T={doctype, _}) ->
- [T];
-to_tokens(T={comment, _}) ->
- [T];
-to_tokens({Tag0, Acc}) ->
- %% This is only allowed in sub-tags: {p, [{"class", "foo"}]}
- to_tokens({Tag0, [], Acc});
-to_tokens({Tag0, Attrs, Acc}) ->
- Tag = to_tag(Tag0),
- to_tokens([{Tag, Acc}], [{start_tag, Tag, Attrs, is_singleton(Tag)}]).
-
-%% @spec to_html([html_token()] | html_node()) -> iolist()
-%% @doc Convert a list of html_token() to a HTML document.
-to_html(Node) when is_tuple(Node) ->
- to_html(to_tokens(Node));
-to_html(Tokens) when is_list(Tokens) ->
- to_html(Tokens, []).
-
-%% @spec escape(string() | atom() | binary()) -> binary()
-%% @doc Escape a string such that it's safe for HTML (amp; lt; gt;).
-escape(B) when is_binary(B) ->
- escape(binary_to_list(B), []);
-escape(A) when is_atom(A) ->
- escape(atom_to_list(A), []);
-escape(S) when is_list(S) ->
- escape(S, []).
-
-%% @spec escape_attr(string() | binary() | atom() | integer() | float()) -> binary()
-%% @doc Escape a string such that it's safe for HTML attrs
-%% (amp; lt; gt; quot;).
-escape_attr(B) when is_binary(B) ->
- escape_attr(binary_to_list(B), []);
-escape_attr(A) when is_atom(A) ->
- escape_attr(atom_to_list(A), []);
-escape_attr(S) when is_list(S) ->
- escape_attr(S, []);
-escape_attr(I) when is_integer(I) ->
- escape_attr(integer_to_list(I), []);
-escape_attr(F) when is_float(F) ->
- escape_attr(mochinum:digits(F), []).
-
-to_html([], Acc) ->
- lists:reverse(Acc);
-to_html([{'=', Content} | Rest], Acc) ->
- to_html(Rest, [Content | Acc]);
-to_html([{pi, Tag, Attrs} | Rest], Acc) ->
- Open = [<<"<?">>,
- Tag,
- attrs_to_html(Attrs, []),
- <<"?>">>],
- to_html(Rest, [Open | Acc]);
-to_html([{comment, Comment} | Rest], Acc) ->
- to_html(Rest, [[<<"<!--">>, Comment, <<"-->">>] | Acc]);
-to_html([{doctype, Parts} | Rest], Acc) ->
- Inside = doctype_to_html(Parts, Acc),
- to_html(Rest, [[<<"<!DOCTYPE">>, Inside, <<">">>] | Acc]);
-to_html([{data, Data, _Whitespace} | Rest], Acc) ->
- to_html(Rest, [escape(Data) | Acc]);
-to_html([{start_tag, Tag, Attrs, Singleton} | Rest], Acc) ->
- Open = [<<"<">>,
- Tag,
- attrs_to_html(Attrs, []),
- case Singleton of
- true -> <<" />">>;
- false -> <<">">>
- end],
- to_html(Rest, [Open | Acc]);
-to_html([{end_tag, Tag} | Rest], Acc) ->
- to_html(Rest, [[<<"</">>, Tag, <<">">>] | Acc]).
-
-doctype_to_html([], Acc) ->
- lists:reverse(Acc);
-doctype_to_html([Word | Rest], Acc) ->
- case lists:all(fun (C) -> ?IS_LITERAL_SAFE(C) end,
- binary_to_list(iolist_to_binary(Word))) of
- true ->
- doctype_to_html(Rest, [[<<" ">>, Word] | Acc]);
- false ->
- doctype_to_html(Rest, [[<<" \"">>, escape_attr(Word), ?QUOTE] | Acc])
- end.
-
-attrs_to_html([], Acc) ->
- lists:reverse(Acc);
-attrs_to_html([{K, V} | Rest], Acc) ->
- attrs_to_html(Rest,
- [[<<" ">>, escape(K), <<"=\"">>,
- escape_attr(V), <<"\"">>] | Acc]).
-
-escape([], Acc) ->
- list_to_binary(lists:reverse(Acc));
-escape("<" ++ Rest, Acc) ->
- escape(Rest, lists:reverse("&lt;", Acc));
-escape(">" ++ Rest, Acc) ->
- escape(Rest, lists:reverse("&gt;", Acc));
-escape("&" ++ Rest, Acc) ->
- escape(Rest, lists:reverse("&amp;", Acc));
-escape([C | Rest], Acc) ->
- escape(Rest, [C | Acc]).
-
-escape_attr([], Acc) ->
- list_to_binary(lists:reverse(Acc));
-escape_attr("<" ++ Rest, Acc) ->
- escape_attr(Rest, lists:reverse("&lt;", Acc));
-escape_attr(">" ++ Rest, Acc) ->
- escape_attr(Rest, lists:reverse("&gt;", Acc));
-escape_attr("&" ++ Rest, Acc) ->
- escape_attr(Rest, lists:reverse("&amp;", Acc));
-escape_attr([?QUOTE | Rest], Acc) ->
- escape_attr(Rest, lists:reverse("&quot;", Acc));
-escape_attr([C | Rest], Acc) ->
- escape_attr(Rest, [C | Acc]).
-
-to_tag(A) when is_atom(A) ->
- norm(atom_to_list(A));
-to_tag(L) ->
- norm(L).
-
-to_tokens([], Acc) ->
- lists:reverse(Acc);
-to_tokens([{Tag, []} | Rest], Acc) ->
- to_tokens(Rest, [{end_tag, to_tag(Tag)} | Acc]);
-to_tokens([{Tag0, [{T0} | R1]} | Rest], Acc) ->
- %% Allow {br}
- to_tokens([{Tag0, [{T0, [], []} | R1]} | Rest], Acc);
-to_tokens([{Tag0, [T0={'=', _C0} | R1]} | Rest], Acc) ->
- %% Allow {'=', iolist()}
- to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
-to_tokens([{Tag0, [T0={comment, _C0} | R1]} | Rest], Acc) ->
- %% Allow {comment, iolist()}
- to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
-to_tokens([{Tag0, [T0={pi, _S0, _A0} | R1]} | Rest], Acc) ->
- %% Allow {pi, binary(), list()}
- to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
-to_tokens([{Tag0, [{T0, A0=[{_, _} | _]} | R1]} | Rest], Acc) ->
- %% Allow {p, [{"class", "foo"}]}
- to_tokens([{Tag0, [{T0, A0, []} | R1]} | Rest], Acc);
-to_tokens([{Tag0, [{T0, C0} | R1]} | Rest], Acc) ->
- %% Allow {p, "content"} and {p, <<"content">>}
- to_tokens([{Tag0, [{T0, [], C0} | R1]} | Rest], Acc);
-to_tokens([{Tag0, [{T0, A1, C0} | R1]} | Rest], Acc) when is_binary(C0) ->
- %% Allow {"p", [{"class", "foo"}], <<"content">>}
- to_tokens([{Tag0, [{T0, A1, binary_to_list(C0)} | R1]} | Rest], Acc);
-to_tokens([{Tag0, [{T0, A1, C0=[C | _]} | R1]} | Rest], Acc)
- when is_integer(C) ->
- %% Allow {"p", [{"class", "foo"}], "content"}
- to_tokens([{Tag0, [{T0, A1, [C0]} | R1]} | Rest], Acc);
-to_tokens([{Tag0, [{T0, A1, C1} | R1]} | Rest], Acc) ->
- %% Native {"p", [{"class", "foo"}], ["content"]}
- Tag = to_tag(Tag0),
- T1 = to_tag(T0),
- case is_singleton(norm(T1)) of
- true ->
- to_tokens([{Tag, R1} | Rest], [{start_tag, T1, A1, true} | Acc]);
- false ->
- to_tokens([{T1, C1}, {Tag, R1} | Rest],
- [{start_tag, T1, A1, false} | Acc])
- end;
-to_tokens([{Tag0, [L | R1]} | Rest], Acc) when is_list(L) ->
- %% List text
- Tag = to_tag(Tag0),
- to_tokens([{Tag, R1} | Rest], [{data, iolist_to_binary(L), false} | Acc]);
-to_tokens([{Tag0, [B | R1]} | Rest], Acc) when is_binary(B) ->
- %% Binary text
- Tag = to_tag(Tag0),
- to_tokens([{Tag, R1} | Rest], [{data, B, false} | Acc]).
-
-tokens(B, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary>> ->
- lists:reverse(Acc);
- _ ->
- {Tag, S1} = tokenize(B, S),
- case parse_flag(Tag) of
- script ->
- {Tag2, S2} = tokenize_script(B, S1),
- tokens(B, S2, [Tag2, Tag | Acc]);
- textarea ->
- {Tag2, S2} = tokenize_textarea(B, S1),
- tokens(B, S2, [Tag2, Tag | Acc]);
- none ->
- tokens(B, S1, [Tag | Acc])
- end
- end.
-
-parse_flag({start_tag, B, _, false}) ->
- case string:to_lower(binary_to_list(B)) of
- "script" ->
- script;
- "textarea" ->
- textarea;
- _ ->
- none
- end;
-parse_flag(_) ->
- none.
-
-tokenize(B, S=#decoder{offset=O}) ->
- case B of
- <<_:O/binary, "<!--", _/binary>> ->
- tokenize_comment(B, ?ADV_COL(S, 4));
- <<_:O/binary, "<!DOCTYPE", _/binary>> ->
- tokenize_doctype(B, ?ADV_COL(S, 10));
- <<_:O/binary, "<![CDATA[", _/binary>> ->
- tokenize_cdata(B, ?ADV_COL(S, 9));
- <<_:O/binary, "<?", _/binary>> ->
- {Tag, S1} = tokenize_literal(B, ?ADV_COL(S, 2)),
- {Attrs, S2} = tokenize_attributes(B, S1),
- S3 = find_qgt(B, S2),
- {{pi, Tag, Attrs}, S3};
- <<_:O/binary, "&", _/binary>> ->
- tokenize_charref(B, ?INC_COL(S));
- <<_:O/binary, "</", _/binary>> ->
- {Tag, S1} = tokenize_literal(B, ?ADV_COL(S, 2)),
- {S2, _} = find_gt(B, S1),
- {{end_tag, Tag}, S2};
- <<_:O/binary, "<", C, _/binary>> when ?IS_WHITESPACE(C) ->
- %% This isn't really strict HTML
- {{data, Data, _Whitespace}, S1} = tokenize_data(B, ?INC_COL(S)),
- {{data, <<$<, Data/binary>>, false}, S1};
- <<_:O/binary, "<", _/binary>> ->
- {Tag, S1} = tokenize_literal(B, ?INC_COL(S)),
- {Attrs, S2} = tokenize_attributes(B, S1),
- {S3, HasSlash} = find_gt(B, S2),
- Singleton = HasSlash orelse is_singleton(norm(binary_to_list(Tag))),
- {{start_tag, Tag, Attrs, Singleton}, S3};
- _ ->
- tokenize_data(B, S)
- end.
-
-tree_data([{data, Data, Whitespace} | Rest], AllWhitespace, Acc) ->
- tree_data(Rest, (Whitespace andalso AllWhitespace), [Data | Acc]);
-tree_data(Rest, AllWhitespace, Acc) ->
- {iolist_to_binary(lists:reverse(Acc)), AllWhitespace, Rest}.
-
-tree([], Stack) ->
- {destack(Stack), []};
-tree([{end_tag, Tag} | Rest], Stack) ->
- case destack(norm(Tag), Stack) of
- S when is_list(S) ->
- tree(Rest, S);
- Result ->
- {Result, []}
- end;
-tree([{start_tag, Tag, Attrs, true} | Rest], S) ->
- tree(Rest, append_stack_child(norm({Tag, Attrs}), S));
-tree([{start_tag, Tag, Attrs, false} | Rest], S) ->
- tree(Rest, stack(norm({Tag, Attrs}), S));
-tree([T={pi, _Tag, _Attrs} | Rest], S) ->
- tree(Rest, append_stack_child(T, S));
-tree([T={comment, _Comment} | Rest], S) ->
- tree(Rest, append_stack_child(T, S));
-tree(L=[{data, _Data, _Whitespace} | _], S) ->
- case tree_data(L, true, []) of
- {_, true, Rest} ->
- tree(Rest, S);
- {Data, false, Rest} ->
- tree(Rest, append_stack_child(Data, S))
- end;
-tree([{doctype, _} | Rest], Stack) ->
- tree(Rest, Stack).
-
-norm({Tag, Attrs}) ->
- {norm(Tag), [{norm(K), iolist_to_binary(V)} || {K, V} <- Attrs], []};
-norm(Tag) when is_binary(Tag) ->
- Tag;
-norm(Tag) ->
- list_to_binary(string:to_lower(Tag)).
-
-stack(T1={TN, _, _}, Stack=[{TN, _, _} | _Rest])
- when TN =:= <<"li">> orelse TN =:= <<"option">> ->
- [T1 | destack(TN, Stack)];
-stack(T1={TN0, _, _}, Stack=[{TN1, _, _} | _Rest])
- when (TN0 =:= <<"dd">> orelse TN0 =:= <<"dt">>) andalso
- (TN1 =:= <<"dd">> orelse TN1 =:= <<"dt">>) ->
- [T1 | destack(TN1, Stack)];
-stack(T1, Stack) ->
- [T1 | Stack].
-
-append_stack_child(StartTag, [{Name, Attrs, Acc} | Stack]) ->
- [{Name, Attrs, [StartTag | Acc]} | Stack].
-
-destack(TagName, Stack) when is_list(Stack) ->
- F = fun (X) ->
- case X of
- {TagName, _, _} ->
- false;
- _ ->
- true
- end
- end,
- case lists:splitwith(F, Stack) of
- {_, []} ->
- %% If we're parsing something like XML we might find
- %% a <link>tag</link> that is normally a singleton
- %% in HTML but isn't here
- case {is_singleton(TagName), Stack} of
- {true, [{T0, A0, Acc0} | Post0]} ->
- case lists:splitwith(F, Acc0) of
- {_, []} ->
- %% Actually was a singleton
- Stack;
- {Pre, [{T1, A1, []} | Post1]} ->
- [{T0, A0, [{T1, A1, lists:reverse(Pre)} | Post1]}
- | Post0]
- end;
- _ ->
- %% No match, no state change
- Stack
- end;
- {_Pre, [_T]} ->
- %% Unfurl the whole stack, we're done
- destack(Stack);
- {Pre, [T, {T0, A0, Acc0} | Post]} ->
- %% Unfurl up to the tag, then accumulate it
- [{T0, A0, [destack(Pre ++ [T]) | Acc0]} | Post]
- end.
-
-destack([{Tag, Attrs, Acc}]) ->
- {Tag, Attrs, lists:reverse(Acc)};
-destack([{T1, A1, Acc1}, {T0, A0, Acc0} | Rest]) ->
- destack([{T0, A0, [{T1, A1, lists:reverse(Acc1)} | Acc0]} | Rest]).
-
-is_singleton(<<"br">>) -> true;
-is_singleton(<<"hr">>) -> true;
-is_singleton(<<"img">>) -> true;
-is_singleton(<<"input">>) -> true;
-is_singleton(<<"base">>) -> true;
-is_singleton(<<"meta">>) -> true;
-is_singleton(<<"link">>) -> true;
-is_singleton(<<"area">>) -> true;
-is_singleton(<<"param">>) -> true;
-is_singleton(<<"col">>) -> true;
-is_singleton(_) -> false.
-
-tokenize_data(B, S=#decoder{offset=O}) ->
- tokenize_data(B, S, O, true).
-
-tokenize_data(B, S=#decoder{offset=O}, Start, Whitespace) ->
- case B of
- <<_:O/binary, C, _/binary>> when (C =/= $< andalso C =/= $&) ->
- tokenize_data(B, ?INC_CHAR(S, C), Start,
- (Whitespace andalso ?IS_WHITESPACE(C)));
- _ ->
- Len = O - Start,
- <<_:Start/binary, Data:Len/binary, _/binary>> = B,
- {{data, Data, Whitespace}, S}
- end.
-
-tokenize_attributes(B, S) ->
- tokenize_attributes(B, S, []).
-
-tokenize_attributes(B, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary>> ->
- {lists:reverse(Acc), S};
- <<_:O/binary, C, _/binary>> when (C =:= $> orelse C =:= $/) ->
- {lists:reverse(Acc), S};
- <<_:O/binary, "?>", _/binary>> ->
- {lists:reverse(Acc), S};
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
- tokenize_attributes(B, ?INC_CHAR(S, C), Acc);
- _ ->
- {Attr, S1} = tokenize_literal(B, S),
- {Value, S2} = tokenize_attr_value(Attr, B, S1),
- tokenize_attributes(B, S2, [{Attr, Value} | Acc])
- end.
-
-tokenize_attr_value(Attr, B, S) ->
- S1 = skip_whitespace(B, S),
- O = S1#decoder.offset,
- case B of
- <<_:O/binary, "=", _/binary>> ->
- S2 = skip_whitespace(B, ?INC_COL(S1)),
- tokenize_word_or_literal(B, S2);
- _ ->
- {Attr, S1}
- end.
-
-skip_whitespace(B, S=#decoder{offset=O}) ->
- case B of
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
- skip_whitespace(B, ?INC_CHAR(S, C));
- _ ->
- S
- end.
-
-tokenize_literal(Bin, S) ->
- tokenize_literal(Bin, S, []).
-
-tokenize_literal(Bin, S=#decoder{offset=O}, Acc) ->
- case Bin of
- <<_:O/binary, $&, _/binary>> ->
- {{data, Data, false}, S1} = tokenize_charref(Bin, ?INC_COL(S)),
- tokenize_literal(Bin, S1, [Data | Acc]);
- <<_:O/binary, C, _/binary>> when not (?IS_WHITESPACE(C)
- orelse C =:= $>
- orelse C =:= $/
- orelse C =:= $=) ->
- tokenize_literal(Bin, ?INC_COL(S), [C | Acc]);
- _ ->
- {iolist_to_binary(lists:reverse(Acc)), S}
- end.
-
-find_qgt(Bin, S=#decoder{offset=O}) ->
- case Bin of
- <<_:O/binary, "?>", _/binary>> ->
- ?ADV_COL(S, 2);
- %% tokenize_attributes takes care of this state:
- %% <<_:O/binary, C, _/binary>> ->
- %% find_qgt(Bin, ?INC_CHAR(S, C));
- <<_:O/binary>> ->
- S
- end.
-
-find_gt(Bin, S) ->
- find_gt(Bin, S, false).
-
-find_gt(Bin, S=#decoder{offset=O}, HasSlash) ->
- case Bin of
- <<_:O/binary, $/, _/binary>> ->
- find_gt(Bin, ?INC_COL(S), true);
- <<_:O/binary, $>, _/binary>> ->
- {?INC_COL(S), HasSlash};
- <<_:O/binary, C, _/binary>> ->
- find_gt(Bin, ?INC_CHAR(S, C), HasSlash);
- _ ->
- {S, HasSlash}
- end.
-
-tokenize_charref(Bin, S=#decoder{offset=O}) ->
- tokenize_charref(Bin, S, O).
-
-tokenize_charref(Bin, S=#decoder{offset=O}, Start) ->
- case Bin of
- <<_:O/binary>> ->
- <<_:Start/binary, Raw/binary>> = Bin,
- {{data, Raw, false}, S};
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C)
- orelse C =:= ?SQUOTE
- orelse C =:= ?QUOTE
- orelse C =:= $/
- orelse C =:= $> ->
- Len = O - Start,
- <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
- {{data, Raw, false}, S};
- <<_:O/binary, $;, _/binary>> ->
- Len = O - Start,
- <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
- Data = case mochiweb_charref:charref(Raw) of
- undefined ->
- Start1 = Start - 1,
- Len1 = Len + 2,
- <<_:Start1/binary, R:Len1/binary, _/binary>> = Bin,
- R;
- Unichar ->
- mochiutf8:codepoint_to_bytes(Unichar)
- end,
- {{data, Data, false}, ?INC_COL(S)};
- _ ->
- tokenize_charref(Bin, ?INC_COL(S), Start)
- end.
-
-tokenize_doctype(Bin, S) ->
- tokenize_doctype(Bin, S, []).
-
-tokenize_doctype(Bin, S=#decoder{offset=O}, Acc) ->
- case Bin of
- <<_:O/binary>> ->
- {{doctype, lists:reverse(Acc)}, S};
- <<_:O/binary, $>, _/binary>> ->
- {{doctype, lists:reverse(Acc)}, ?INC_COL(S)};
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
- tokenize_doctype(Bin, ?INC_CHAR(S, C), Acc);
- _ ->
- {Word, S1} = tokenize_word_or_literal(Bin, S),
- tokenize_doctype(Bin, S1, [Word | Acc])
- end.
-
-tokenize_word_or_literal(Bin, S=#decoder{offset=O}) ->
- case Bin of
- <<_:O/binary, C, _/binary>> when C =:= ?QUOTE orelse C =:= ?SQUOTE ->
- tokenize_word(Bin, ?INC_COL(S), C);
- <<_:O/binary, C, _/binary>> when not ?IS_WHITESPACE(C) ->
- %% Sanity check for whitespace
- tokenize_literal(Bin, S, [])
- end.
-
-tokenize_word(Bin, S, Quote) ->
- tokenize_word(Bin, S, Quote, []).
-
-tokenize_word(Bin, S=#decoder{offset=O}, Quote, Acc) ->
- case Bin of
- <<_:O/binary>> ->
- {iolist_to_binary(lists:reverse(Acc)), S};
- <<_:O/binary, Quote, _/binary>> ->
- {iolist_to_binary(lists:reverse(Acc)), ?INC_COL(S)};
- <<_:O/binary, $&, _/binary>> ->
- {{data, Data, false}, S1} = tokenize_charref(Bin, ?INC_COL(S)),
- tokenize_word(Bin, S1, Quote, [Data | Acc]);
- <<_:O/binary, C, _/binary>> ->
- tokenize_word(Bin, ?INC_CHAR(S, C), Quote, [C | Acc])
- end.
-
-tokenize_cdata(Bin, S=#decoder{offset=O}) ->
- tokenize_cdata(Bin, S, O).
-
-tokenize_cdata(Bin, S=#decoder{offset=O}, Start) ->
- case Bin of
- <<_:O/binary, "]]>", _/binary>> ->
- Len = O - Start,
- <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
- {{data, Raw, false}, ?ADV_COL(S, 3)};
- <<_:O/binary, C, _/binary>> ->
- tokenize_cdata(Bin, ?INC_CHAR(S, C), Start);
- _ ->
- <<_:O/binary, Raw/binary>> = Bin,
- {{data, Raw, false}, S}
- end.
-
-tokenize_comment(Bin, S=#decoder{offset=O}) ->
- tokenize_comment(Bin, S, O).
-
-tokenize_comment(Bin, S=#decoder{offset=O}, Start) ->
- case Bin of
- <<_:O/binary, "-->", _/binary>> ->
- Len = O - Start,
- <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
- {{comment, Raw}, ?ADV_COL(S, 3)};
- <<_:O/binary, C, _/binary>> ->
- tokenize_comment(Bin, ?INC_CHAR(S, C), Start);
- <<_:Start/binary, Raw/binary>> ->
- {{comment, Raw}, S}
- end.
-
-tokenize_script(Bin, S=#decoder{offset=O}) ->
- tokenize_script(Bin, S, O).
-
-tokenize_script(Bin, S=#decoder{offset=O}, Start) ->
- case Bin of
- %% Just a look-ahead, we want the end_tag separately
- <<_:O/binary, $<, $/, SS, CC, RR, II, PP, TT, ZZ, _/binary>>
- when (SS =:= $s orelse SS =:= $S) andalso
- (CC =:= $c orelse CC =:= $C) andalso
- (RR =:= $r orelse RR =:= $R) andalso
- (II =:= $i orelse II =:= $I) andalso
- (PP =:= $p orelse PP =:= $P) andalso
- (TT=:= $t orelse TT =:= $T) andalso
- ?PROBABLE_CLOSE(ZZ) ->
- Len = O - Start,
- <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
- {{data, Raw, false}, S};
- <<_:O/binary, C, _/binary>> ->
- tokenize_script(Bin, ?INC_CHAR(S, C), Start);
- <<_:Start/binary, Raw/binary>> ->
- {{data, Raw, false}, S}
- end.
-
-tokenize_textarea(Bin, S=#decoder{offset=O}) ->
- tokenize_textarea(Bin, S, O).
-
-tokenize_textarea(Bin, S=#decoder{offset=O}, Start) ->
- case Bin of
- %% Just a look-ahead, we want the end_tag separately
- <<_:O/binary, $<, $/, TT, EE, XX, TT2, AA, RR, EE2, AA2, ZZ, _/binary>>
- when (TT =:= $t orelse TT =:= $T) andalso
- (EE =:= $e orelse EE =:= $E) andalso
- (XX =:= $x orelse XX =:= $X) andalso
- (TT2 =:= $t orelse TT2 =:= $T) andalso
- (AA =:= $a orelse AA =:= $A) andalso
- (RR =:= $r orelse RR =:= $R) andalso
- (EE2 =:= $e orelse EE2 =:= $E) andalso
- (AA2 =:= $a orelse AA2 =:= $A) andalso
- ?PROBABLE_CLOSE(ZZ) ->
- Len = O - Start,
- <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
- {{data, Raw, false}, S};
- <<_:O/binary, C, _/binary>> ->
- tokenize_textarea(Bin, ?INC_CHAR(S, C), Start);
- <<_:Start/binary, Raw/binary>> ->
- {{data, Raw, false}, S}
- end.
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-to_html_test() ->
- ?assertEqual(
- <<"<html><head><title>hey!</title></head><body><p class=\"foo\">what's up<br /></p><div>sucka</div>RAW!<!-- comment! --></body></html>">>,
- iolist_to_binary(
- to_html({html, [],
- [{<<"head">>, [],
- [{title, <<"hey!">>}]},
- {body, [],
- [{p, [{class, foo}], [<<"what's">>, <<" up">>, {br}]},
- {'div', <<"sucka">>},
- {'=', <<"RAW!">>},
- {comment, <<" comment! ">>}]}]}))),
- ?assertEqual(
- <<"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">">>,
- iolist_to_binary(
- to_html({doctype,
- [<<"html">>, <<"PUBLIC">>,
- <<"-//W3C//DTD XHTML 1.0 Transitional//EN">>,
- <<"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">>]}))),
- ?assertEqual(
- <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>,
- iolist_to_binary(
- to_html({<<"html">>,[],
- [{pi, <<"xml:namespace">>,
- [{<<"prefix">>,<<"o">>},
- {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]}))),
- ok.
-
-escape_test() ->
- ?assertEqual(
- <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
- escape(<<"&quot;\"word ><<up!&quot;">>)),
- ?assertEqual(
- <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
- escape("&quot;\"word ><<up!&quot;")),
- ?assertEqual(
- <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
- escape('&quot;\"word ><<up!&quot;')),
- ok.
-
-escape_attr_test() ->
- ?assertEqual(
- <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
- escape_attr(<<"&quot;\"word ><<up!&quot;">>)),
- ?assertEqual(
- <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
- escape_attr("&quot;\"word ><<up!&quot;")),
- ?assertEqual(
- <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
- escape_attr('&quot;\"word ><<up!&quot;')),
- ?assertEqual(
- <<"12345">>,
- escape_attr(12345)),
- ?assertEqual(
- <<"1.5">>,
- escape_attr(1.5)),
- ok.
-
-tokens_test() ->
- ?assertEqual(
- [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
- {<<"wibble">>, <<"wibble">>},
- {<<"alice">>, <<"bob">>}], true}],
- tokens(<<"<foo bar=baz wibble='wibble' alice=\"bob\"/>">>)),
- ?assertEqual(
- [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
- {<<"wibble">>, <<"wibble">>},
- {<<"alice">>, <<"bob">>}], true}],
- tokens(<<"<foo bar=baz wibble='wibble' alice=bob/>">>)),
- ?assertEqual(
- [{comment, <<"[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]">>}],
- tokens(<<"<!--[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]-->">>)),
- ?assertEqual(
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}],
- tokens(<<"<script type=\"text/javascript\"> A= B <= C </script>">>)),
- ?assertEqual(
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}],
- tokens(<<"<script type =\"text/javascript\"> A= B <= C </script>">>)),
- ?assertEqual(
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}],
- tokens(<<"<script type = \"text/javascript\"> A= B <= C </script>">>)),
- ?assertEqual(
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}],
- tokens(<<"<script type= \"text/javascript\"> A= B <= C </script>">>)),
- ?assertEqual(
- [{start_tag, <<"textarea">>, [], false},
- {data, <<"<html></body>">>, false},
- {end_tag, <<"textarea">>}],
- tokens(<<"<textarea><html></body></textarea>">>)),
- ?assertEqual(
- [{start_tag, <<"textarea">>, [], false},
- {data, <<"<html></body></textareaz>">>, false}],
- tokens(<<"<textarea ><html></body></textareaz>">>)),
- ?assertEqual(
- [{pi, <<"xml:namespace">>,
- [{<<"prefix">>,<<"o">>},
- {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
- tokens(<<"<?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?>">>)),
- ?assertEqual(
- [{pi, <<"xml:namespace">>,
- [{<<"prefix">>,<<"o">>},
- {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
- tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office \n?>">>)),
- ?assertEqual(
- [{pi, <<"xml:namespace">>,
- [{<<"prefix">>,<<"o">>},
- {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
- tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office">>)),
- ?assertEqual(
- [{data, <<"<">>, false}],
- tokens(<<"&lt;">>)),
- ?assertEqual(
- [{data, <<"not html ">>, false},
- {data, <<"< at all">>, false}],
- tokens(<<"not html < at all">>)),
- ok.
-
-parse_test() ->
- D0 = <<"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">
-<html>
- <head>
- <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">
- <title>Foo</title>
- <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/rel/dojo/resources/dojo.css\" media=\"screen\">
- <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/foo.css\" media=\"screen\">
- <!--[if lt IE 7]>
- <style type=\"text/css\">
- .no_ie { display: none; }
- </style>
- <![endif]-->
- <link rel=\"icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
- <link rel=\"shortcut icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
- </head>
- <body id=\"home\" class=\"tundra\"><![CDATA[&lt;<this<!-- is -->CDATA>&gt;]]></body>
-</html>">>,
- ?assertEqual(
- {<<"html">>, [],
- [{<<"head">>, [],
- [{<<"meta">>,
- [{<<"http-equiv">>,<<"Content-Type">>},
- {<<"content">>,<<"text/html; charset=UTF-8">>}],
- []},
- {<<"title">>,[],[<<"Foo">>]},
- {<<"link">>,
- [{<<"rel">>,<<"stylesheet">>},
- {<<"type">>,<<"text/css">>},
- {<<"href">>,<<"/static/rel/dojo/resources/dojo.css">>},
- {<<"media">>,<<"screen">>}],
- []},
- {<<"link">>,
- [{<<"rel">>,<<"stylesheet">>},
- {<<"type">>,<<"text/css">>},
- {<<"href">>,<<"/static/foo.css">>},
- {<<"media">>,<<"screen">>}],
- []},
- {comment,<<"[if lt IE 7]>\n <style type=\"text/css\">\n .no_ie { display: none; }\n </style>\n <![endif]">>},
- {<<"link">>,
- [{<<"rel">>,<<"icon">>},
- {<<"href">>,<<"/static/images/favicon.ico">>},
- {<<"type">>,<<"image/x-icon">>}],
- []},
- {<<"link">>,
- [{<<"rel">>,<<"shortcut icon">>},
- {<<"href">>,<<"/static/images/favicon.ico">>},
- {<<"type">>,<<"image/x-icon">>}],
- []}]},
- {<<"body">>,
- [{<<"id">>,<<"home">>},
- {<<"class">>,<<"tundra">>}],
- [<<"&lt;<this<!-- is -->CDATA>&gt;">>]}]},
- parse(D0)),
- ?assertEqual(
- {<<"html">>,[],
- [{pi, <<"xml:namespace">>,
- [{<<"prefix">>,<<"o">>},
- {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]},
- parse(
- <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>)),
- ?assertEqual(
- {<<"html">>, [],
- [{<<"dd">>, [], [<<"foo">>]},
- {<<"dt">>, [], [<<"bar">>]}]},
- parse(<<"<html><dd>foo<dt>bar</html>">>)),
- %% Singleton sadness
- ?assertEqual(
- {<<"html">>, [],
- [{<<"link">>, [], []},
- <<"foo">>,
- {<<"br">>, [], []},
- <<"bar">>]},
- parse(<<"<html><link>foo<br>bar</html>">>)),
- ?assertEqual(
- {<<"html">>, [],
- [{<<"link">>, [], [<<"foo">>,
- {<<"br">>, [], []},
- <<"bar">>]}]},
- parse(<<"<html><link>foo<br>bar</link></html>">>)),
- ok.
-
-exhaustive_is_singleton_test() ->
- T = mochiweb_cover:clause_lookup_table(?MODULE, is_singleton),
- [?assertEqual(V, is_singleton(K)) || {K, V} <- T].
-
-tokenize_attributes_test() ->
- ?assertEqual(
- {<<"foo">>,
- [{<<"bar">>, <<"b\"az">>},
- {<<"wibble">>, <<"wibble">>},
- {<<"taco", 16#c2, 16#a9>>, <<"bell">>},
- {<<"quux">>, <<"quux">>}],
- []},
- parse(<<"<foo bar=\"b&quot;az\" wibble taco&copy;=bell quux">>)),
- ok.
-
-tokens2_test() ->
- D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org</link><description>Bob's Rants</description></channel>">>,
- ?assertEqual(
- [{start_tag,<<"channel">>,[],false},
- {start_tag,<<"title">>,[],false},
- {data,<<"from __future__ import *">>,false},
- {end_tag,<<"title">>},
- {start_tag,<<"link">>,[],true},
- {data,<<"http://bob.pythonmac.org">>,false},
- {end_tag,<<"link">>},
- {start_tag,<<"description">>,[],false},
- {data,<<"Bob's Rants">>,false},
- {end_tag,<<"description">>},
- {end_tag,<<"channel">>}],
- tokens(D0)),
- ok.
-
-to_tokens_test() ->
- ?assertEqual(
- [{start_tag, <<"p">>, [{class, 1}], false},
- {end_tag, <<"p">>}],
- to_tokens({p, [{class, 1}], []})),
- ?assertEqual(
- [{start_tag, <<"p">>, [], false},
- {end_tag, <<"p">>}],
- to_tokens({p})),
- ?assertEqual(
- [{'=', <<"data">>}],
- to_tokens({'=', <<"data">>})),
- ?assertEqual(
- [{comment, <<"comment">>}],
- to_tokens({comment, <<"comment">>})),
- %% This is only allowed in sub-tags:
- %% {p, [{"class", "foo"}]} as {p, [{"class", "foo"}], []}
- %% On the outside it's always treated as follows:
- %% {p, [], [{"class", "foo"}]} as {p, [], [{"class", "foo"}]}
- ?assertEqual(
- [{start_tag, <<"html">>, [], false},
- {start_tag, <<"p">>, [{class, 1}], false},
- {end_tag, <<"p">>},
- {end_tag, <<"html">>}],
- to_tokens({html, [{p, [{class, 1}]}]})),
- ok.
-
-parse2_test() ->
- D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org<br>foo</link><description>Bob's Rants</description></channel>">>,
- ?assertEqual(
- {<<"channel">>,[],
- [{<<"title">>,[],[<<"from __future__ import *">>]},
- {<<"link">>,[],[
- <<"http://bob.pythonmac.org">>,
- {<<"br">>,[],[]},
- <<"foo">>]},
- {<<"description">>,[],[<<"Bob's Rants">>]}]},
- parse(D0)),
- ok.
-
-parse_tokens_test() ->
- D0 = [{doctype,[<<"HTML">>,<<"PUBLIC">>,<<"-//W3C//DTD HTML 4.01 Transitional//EN">>]},
- {data,<<"\n">>,true},
- {start_tag,<<"html">>,[],false}],
- ?assertEqual(
- {<<"html">>, [], []},
- parse_tokens(D0)),
- D1 = D0 ++ [{end_tag, <<"html">>}],
- ?assertEqual(
- {<<"html">>, [], []},
- parse_tokens(D1)),
- D2 = D0 ++ [{start_tag, <<"body">>, [], false}],
- ?assertEqual(
- {<<"html">>, [], [{<<"body">>, [], []}]},
- parse_tokens(D2)),
- D3 = D0 ++ [{start_tag, <<"head">>, [], false},
- {end_tag, <<"head">>},
- {start_tag, <<"body">>, [], false}],
- ?assertEqual(
- {<<"html">>, [], [{<<"head">>, [], []}, {<<"body">>, [], []}]},
- parse_tokens(D3)),
- D4 = D3 ++ [{data,<<"\n">>,true},
- {start_tag,<<"div">>,[{<<"class">>,<<"a">>}],false},
- {start_tag,<<"a">>,[{<<"name">>,<<"#anchor">>}],false},
- {end_tag,<<"a">>},
- {end_tag,<<"div">>},
- {start_tag,<<"div">>,[{<<"class">>,<<"b">>}],false},
- {start_tag,<<"div">>,[{<<"class">>,<<"c">>}],false},
- {end_tag,<<"div">>},
- {end_tag,<<"div">>}],
- ?assertEqual(
- {<<"html">>, [],
- [{<<"head">>, [], []},
- {<<"body">>, [],
- [{<<"div">>, [{<<"class">>, <<"a">>}], [{<<"a">>, [{<<"name">>, <<"#anchor">>}], []}]},
- {<<"div">>, [{<<"class">>, <<"b">>}], [{<<"div">>, [{<<"class">>, <<"c">>}], []}]}
- ]}]},
- parse_tokens(D4)),
- D5 = [{start_tag,<<"html">>,[],false},
- {data,<<"\n">>,true},
- {data,<<"boo">>,false},
- {data,<<"hoo">>,false},
- {data,<<"\n">>,true},
- {end_tag,<<"html">>}],
- ?assertEqual(
- {<<"html">>, [], [<<"\nboohoo\n">>]},
- parse_tokens(D5)),
- D6 = [{start_tag,<<"html">>,[],false},
- {data,<<"\n">>,true},
- {data,<<"\n">>,true},
- {end_tag,<<"html">>}],
- ?assertEqual(
- {<<"html">>, [], []},
- parse_tokens(D6)),
- D7 = [{start_tag,<<"html">>,[],false},
- {start_tag,<<"ul">>,[],false},
- {start_tag,<<"li">>,[],false},
- {data,<<"word">>,false},
- {start_tag,<<"li">>,[],false},
- {data,<<"up">>,false},
- {end_tag,<<"li">>},
- {start_tag,<<"li">>,[],false},
- {data,<<"fdsa">>,false},
- {start_tag,<<"br">>,[],true},
- {data,<<"asdf">>,false},
- {end_tag,<<"ul">>},
- {end_tag,<<"html">>}],
- ?assertEqual(
- {<<"html">>, [],
- [{<<"ul">>, [],
- [{<<"li">>, [], [<<"word">>]},
- {<<"li">>, [], [<<"up">>]},
- {<<"li">>, [], [<<"fdsa">>,{<<"br">>, [], []}, <<"asdf">>]}]}]},
- parse_tokens(D7)),
- ok.
-
-destack_test() ->
- {<<"a">>, [], []} =
- destack([{<<"a">>, [], []}]),
- {<<"a">>, [], [{<<"b">>, [], []}]} =
- destack([{<<"b">>, [], []}, {<<"a">>, [], []}]),
- {<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]} =
- destack([{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
- [{<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]}] =
- destack(<<"b">>,
- [{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
- [{<<"b">>, [], [{<<"c">>, [], []}]}, {<<"a">>, [], []}] =
- destack(<<"c">>,
- [{<<"c">>, [], []}, {<<"b">>, [], []},{<<"a">>, [], []}]),
- ok.
-
-doctype_test() ->
- ?assertEqual(
- {<<"html">>,[],[{<<"head">>,[],[]}]},
- mochiweb_html:parse("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
- "<html><head></head></body></html>")),
- %% http://code.google.com/p/mochiweb/issues/detail?id=52
- ?assertEqual(
- {<<"html">>,[],[{<<"head">>,[],[]}]},
- mochiweb_html:parse("<html>"
- "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
- "<head></head></body></html>")),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_http.erl b/1.1.x/src/mochiweb/mochiweb_http.erl
deleted file mode 100644
index ab0af7e8..00000000
--- a/1.1.x/src/mochiweb/mochiweb_http.erl
+++ /dev/null
@@ -1,273 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc HTTP server.
-
--module(mochiweb_http).
--author('bob@mochimedia.com').
--export([start/0, start/1, stop/0, stop/1]).
--export([loop/2, default_body/1]).
--export([after_response/2, reentry/1]).
--export([parse_range_request/1, range_skip_length/2]).
-
--define(REQUEST_RECV_TIMEOUT, 300000). % timeout waiting for request line
--define(HEADERS_RECV_TIMEOUT, 30000). % timeout waiting for headers
-
--define(MAX_HEADERS, 1000).
--define(DEFAULTS, [{name, ?MODULE},
- {port, 8888}]).
-
-parse_options(Options) ->
- {loop, HttpLoop} = proplists:lookup(loop, Options),
- Loop = fun (S) ->
- ?MODULE:loop(S, HttpLoop)
- end,
- Options1 = [{loop, Loop} | proplists:delete(loop, Options)],
- mochilists:set_defaults(?DEFAULTS, Options1).
-
-stop() ->
- mochiweb_socket_server:stop(?MODULE).
-
-stop(Name) ->
- mochiweb_socket_server:stop(Name).
-
-start() ->
- start([{ip, "127.0.0.1"},
- {loop, {?MODULE, default_body}}]).
-
-start(Options) ->
- mochiweb_socket_server:start(parse_options(Options)).
-
-frm(Body) ->
- ["<html><head></head><body>"
- "<form method=\"POST\">"
- "<input type=\"hidden\" value=\"message\" name=\"hidden\"/>"
- "<input type=\"submit\" value=\"regular POST\">"
- "</form>"
- "<br />"
- "<form method=\"POST\" enctype=\"multipart/form-data\""
- " action=\"/multipart\">"
- "<input type=\"hidden\" value=\"multipart message\" name=\"hidden\"/>"
- "<input type=\"file\" name=\"file\"/>"
- "<input type=\"submit\" value=\"multipart POST\" />"
- "</form>"
- "<pre>", Body, "</pre>"
- "</body></html>"].
-
-default_body(Req, M, "/chunked") when M =:= 'GET'; M =:= 'HEAD' ->
- Res = Req:ok({"text/plain", [], chunked}),
- Res:write_chunk("First chunk\r\n"),
- timer:sleep(5000),
- Res:write_chunk("Last chunk\r\n"),
- Res:write_chunk("");
-default_body(Req, M, _Path) when M =:= 'GET'; M =:= 'HEAD' ->
- Body = io_lib:format("~p~n", [[{parse_qs, Req:parse_qs()},
- {parse_cookie, Req:parse_cookie()},
- Req:dump()]]),
- Req:ok({"text/html",
- [mochiweb_cookies:cookie("mochiweb_http", "test_cookie")],
- frm(Body)});
-default_body(Req, 'POST', "/multipart") ->
- Body = io_lib:format("~p~n", [[{parse_qs, Req:parse_qs()},
- {parse_cookie, Req:parse_cookie()},
- {body, Req:recv_body()},
- Req:dump()]]),
- Req:ok({"text/html", [], frm(Body)});
-default_body(Req, 'POST', _Path) ->
- Body = io_lib:format("~p~n", [[{parse_qs, Req:parse_qs()},
- {parse_cookie, Req:parse_cookie()},
- {parse_post, Req:parse_post()},
- Req:dump()]]),
- Req:ok({"text/html", [], frm(Body)});
-default_body(Req, _Method, _Path) ->
- Req:respond({501, [], []}).
-
-default_body(Req) ->
- default_body(Req, Req:get(method), Req:get(path)).
-
-loop(Socket, Body) ->
- mochiweb_socket:setopts(Socket, [{packet, http}]),
- request(Socket, Body).
-
-request(Socket, Body) ->
- case mochiweb_socket:recv(Socket, 0, ?REQUEST_RECV_TIMEOUT) of
- {ok, {http_request, Method, Path, Version}} ->
- mochiweb_socket:setopts(Socket, [{packet, httph}]),
- headers(Socket, {Method, Path, Version}, [], Body, 0);
- {error, {http_error, "\r\n"}} ->
- request(Socket, Body);
- {error, {http_error, "\n"}} ->
- request(Socket, Body);
- {error, closed} ->
- mochiweb_socket:close(Socket),
- exit(normal);
- {error, timeout} ->
- mochiweb_socket:close(Socket),
- exit(normal);
- _Other ->
- handle_invalid_request(Socket)
- end.
-
-reentry(Body) ->
- fun (Req) ->
- ?MODULE:after_response(Body, Req)
- end.
-
-headers(Socket, Request, Headers, _Body, ?MAX_HEADERS) ->
- %% Too many headers sent, bad request.
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- handle_invalid_request(Socket, Request, Headers);
-headers(Socket, Request, Headers, Body, HeaderCount) ->
- case mochiweb_socket:recv(Socket, 0, ?HEADERS_RECV_TIMEOUT) of
- {ok, http_eoh} ->
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- Req = mochiweb:new_request({Socket, Request,
- lists:reverse(Headers)}),
- call_body(Body, Req),
- ?MODULE:after_response(Body, Req);
- {ok, {http_header, _, Name, _, Value}} ->
- headers(Socket, Request, [{Name, Value} | Headers], Body,
- 1 + HeaderCount);
- {error, closed} ->
- mochiweb_socket:close(Socket),
- exit(normal);
- _Other ->
- handle_invalid_request(Socket, Request, Headers)
- end.
-
-call_body({M, F}, Req) ->
- M:F(Req);
-call_body(Body, Req) ->
- Body(Req).
-
-handle_invalid_request(Socket) ->
- handle_invalid_request(Socket, {'GET', {abs_path, "/"}, {0,9}}, []).
-
-handle_invalid_request(Socket, Request, RevHeaders) ->
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- Req = mochiweb:new_request({Socket, Request,
- lists:reverse(RevHeaders)}),
- Req:respond({400, [], []}),
- mochiweb_socket:close(Socket),
- exit(normal).
-
-after_response(Body, Req) ->
- Socket = Req:get(socket),
- case Req:should_close() of
- true ->
- mochiweb_socket:close(Socket),
- exit(normal);
- false ->
- Req:cleanup(),
- ?MODULE:loop(Socket, Body)
- end.
-
-parse_range_request(RawRange) when is_list(RawRange) ->
- try
- "bytes=" ++ RangeString = RawRange,
- Ranges = string:tokens(RangeString, ","),
- lists:map(fun ("-" ++ V) ->
- {none, list_to_integer(V)};
- (R) ->
- case string:tokens(R, "-") of
- [S1, S2] ->
- {list_to_integer(S1), list_to_integer(S2)};
- [S] ->
- {list_to_integer(S), none}
- end
- end,
- Ranges)
- catch
- _:_ ->
- fail
- end.
-
-range_skip_length(Spec, Size) ->
- case Spec of
- {none, R} when R =< Size, R >= 0 ->
- {Size - R, R};
- {none, _OutOfRange} ->
- {0, Size};
- {R, none} when R >= 0, R < Size ->
- {R, Size - R};
- {_OutOfRange, none} ->
- invalid_range;
- {Start, End} when 0 =< Start, Start =< End, End < Size ->
- {Start, End - Start + 1};
- {_OutOfRange, _End} ->
- invalid_range
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-range_test() ->
- %% valid, single ranges
- ?assertEqual([{20, 30}], parse_range_request("bytes=20-30")),
- ?assertEqual([{20, none}], parse_range_request("bytes=20-")),
- ?assertEqual([{none, 20}], parse_range_request("bytes=-20")),
-
- %% trivial single range
- ?assertEqual(undefined, parse_range_request("bytes=0-")),
-
- %% invalid, single ranges
- ?assertEqual(fail, parse_range_request("")),
- ?assertEqual(fail, parse_range_request("garbage")),
- ?assertEqual(fail, parse_range_request("bytes=-20-30")),
-
- %% valid, multiple range
- ?assertEqual(
- [{20, 30}, {50, 100}, {110, 200}],
- parse_range_request("bytes=20-30,50-100,110-200")),
- ?assertEqual(
- [{20, none}, {50, 100}, {none, 200}],
- parse_range_request("bytes=20-,50-100,-200")),
-
- %% no ranges
- ?assertEqual([], parse_range_request("bytes=")),
- ok.
-
-range_skip_length_test() ->
- Body = <<"012345678901234567890123456789012345678901234567890123456789">>,
- BodySize = byte_size(Body), %% 60
- BodySize = 60,
-
- %% these values assume BodySize =:= 60
- ?assertEqual({1,9}, range_skip_length({1,9}, BodySize)), %% 1-9
- ?assertEqual({10,10}, range_skip_length({10,19}, BodySize)), %% 10-19
- ?assertEqual({40, 20}, range_skip_length({none, 20}, BodySize)), %% -20
- ?assertEqual({30, 30}, range_skip_length({30, none}, BodySize)), %% 30-
-
- %% valid edge cases for range_skip_length
- ?assertEqual({BodySize, 0}, range_skip_length({none, 0}, BodySize)),
- ?assertEqual({0, BodySize}, range_skip_length({none, BodySize}, BodySize)),
- ?assertEqual({0, BodySize}, range_skip_length({0, none}, BodySize)),
- BodySizeLess1 = BodySize - 1,
- ?assertEqual({BodySizeLess1, 1},
- range_skip_length({BodySize - 1, none}, BodySize)),
-
- %% out of range, return whole thing
- ?assertEqual({0, BodySize},
- range_skip_length({none, BodySize + 1}, BodySize)),
- ?assertEqual({0, BodySize},
- range_skip_length({none, -1}, BodySize)),
-
- %% invalid ranges
- ?assertEqual(invalid_range,
- range_skip_length({-1, 30}, BodySize)),
- ?assertEqual(invalid_range,
- range_skip_length({0, BodySize + 1}, BodySize)),
- ?assertEqual(invalid_range,
- range_skip_length({-1, BodySize + 1}, BodySize)),
- ?assertEqual(invalid_range,
- range_skip_length({BodySize, 40}, BodySize)),
- ?assertEqual(invalid_range,
- range_skip_length({-1, none}, BodySize)),
- ?assertEqual(invalid_range,
- range_skip_length({BodySize, none}, BodySize)),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_io.erl b/1.1.x/src/mochiweb/mochiweb_io.erl
deleted file mode 100644
index 6ce57ec8..00000000
--- a/1.1.x/src/mochiweb/mochiweb_io.erl
+++ /dev/null
@@ -1,46 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Utilities for dealing with IO devices (open files).
-
--module(mochiweb_io).
--author('bob@mochimedia.com').
-
--export([iodevice_stream/3, iodevice_stream/2]).
--export([iodevice_foldl/4, iodevice_foldl/3]).
--export([iodevice_size/1]).
--define(READ_SIZE, 8192).
-
-iodevice_foldl(F, Acc, IoDevice) ->
- iodevice_foldl(F, Acc, IoDevice, ?READ_SIZE).
-
-iodevice_foldl(F, Acc, IoDevice, BufferSize) ->
- case file:read(IoDevice, BufferSize) of
- eof ->
- Acc;
- {ok, Data} ->
- iodevice_foldl(F, F(Data, Acc), IoDevice, BufferSize)
- end.
-
-iodevice_stream(Callback, IoDevice) ->
- iodevice_stream(Callback, IoDevice, ?READ_SIZE).
-
-iodevice_stream(Callback, IoDevice, BufferSize) ->
- F = fun (Data, ok) -> Callback(Data) end,
- ok = iodevice_foldl(F, ok, IoDevice, BufferSize).
-
-iodevice_size(IoDevice) ->
- {ok, Size} = file:position(IoDevice, eof),
- {ok, 0} = file:position(IoDevice, bof),
- Size.
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_mime.erl b/1.1.x/src/mochiweb/mochiweb_mime.erl
deleted file mode 100644
index 5344aee7..00000000
--- a/1.1.x/src/mochiweb/mochiweb_mime.erl
+++ /dev/null
@@ -1,94 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Gives a good MIME type guess based on file extension.
-
--module(mochiweb_mime).
--author('bob@mochimedia.com').
--export([from_extension/1]).
-
-%% @spec from_extension(S::string()) -> string() | undefined
-%% @doc Given a filename extension (e.g. ".html") return a guess for the MIME
-%% type such as "text/html". Will return the atom undefined if no good
-%% guess is available.
-from_extension(".html") ->
- "text/html";
-from_extension(".xhtml") ->
- "application/xhtml+xml";
-from_extension(".xml") ->
- "application/xml";
-from_extension(".css") ->
- "text/css";
-from_extension(".js") ->
- "application/x-javascript";
-from_extension(".jpg") ->
- "image/jpeg";
-from_extension(".gif") ->
- "image/gif";
-from_extension(".png") ->
- "image/png";
-from_extension(".swf") ->
- "application/x-shockwave-flash";
-from_extension(".zip") ->
- "application/zip";
-from_extension(".bz2") ->
- "application/x-bzip2";
-from_extension(".gz") ->
- "application/x-gzip";
-from_extension(".tar") ->
- "application/x-tar";
-from_extension(".tgz") ->
- "application/x-gzip";
-from_extension(".txt") ->
- "text/plain";
-from_extension(".doc") ->
- "application/msword";
-from_extension(".pdf") ->
- "application/pdf";
-from_extension(".xls") ->
- "application/vnd.ms-excel";
-from_extension(".rtf") ->
- "application/rtf";
-from_extension(".mov") ->
- "video/quicktime";
-from_extension(".mp3") ->
- "audio/mpeg";
-from_extension(".z") ->
- "application/x-compress";
-from_extension(".wav") ->
- "audio/x-wav";
-from_extension(".ico") ->
- "image/x-icon";
-from_extension(".bmp") ->
- "image/bmp";
-from_extension(".m4a") ->
- "audio/mpeg";
-from_extension(".m3u") ->
- "audio/x-mpegurl";
-from_extension(".exe") ->
- "application/octet-stream";
-from_extension(".csv") ->
- "text/csv";
-from_extension(_) ->
- undefined.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-exhaustive_from_extension_test() ->
- T = mochiweb_cover:clause_lookup_table(?MODULE, from_extension),
- [?assertEqual(V, from_extension(K)) || {K, V} <- T].
-
-from_extension_test() ->
- ?assertEqual("text/html",
- from_extension(".html")),
- ?assertEqual(undefined,
- from_extension("")),
- ?assertEqual(undefined,
- from_extension(".wtf")),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_multipart.erl b/1.1.x/src/mochiweb/mochiweb_multipart.erl
deleted file mode 100644
index 3069cf4d..00000000
--- a/1.1.x/src/mochiweb/mochiweb_multipart.erl
+++ /dev/null
@@ -1,824 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Utilities for parsing multipart/form-data.
-
--module(mochiweb_multipart).
--author('bob@mochimedia.com').
-
--export([parse_form/1, parse_form/2]).
--export([parse_multipart_request/2]).
--export([parts_to_body/3, parts_to_multipart_body/4]).
--export([default_file_handler/2]).
-
--define(CHUNKSIZE, 4096).
-
--record(mp, {state, boundary, length, buffer, callback, req}).
-
-%% TODO: DOCUMENT THIS MODULE.
-%% @type key() = atom() | string() | binary().
-%% @type value() = atom() | iolist() | integer().
-%% @type header() = {key(), value()}.
-%% @type bodypart() = {Start::integer(), End::integer(), Body::iolist()}.
-%% @type formfile() = {Name::string(), ContentType::string(), Content::binary()}.
-%% @type request().
-%% @type file_handler() = (Filename::string(), ContentType::string()) -> file_handler_callback().
-%% @type file_handler_callback() = (binary() | eof) -> file_handler_callback() | term().
-
-%% @spec parts_to_body([bodypart()], ContentType::string(),
-%% Size::integer()) -> {[header()], iolist()}
-%% @doc Return {[header()], iolist()} representing the body for the given
-%% parts, may be a single part or multipart.
-parts_to_body([{Start, End, Body}], ContentType, Size) ->
- HeaderList = [{"Content-Type", ContentType},
- {"Content-Range",
- ["bytes ",
- mochiweb_util:make_io(Start), "-", mochiweb_util:make_io(End),
- "/", mochiweb_util:make_io(Size)]}],
- {HeaderList, Body};
-parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
- parts_to_multipart_body(BodyList, ContentType, Size,
- mochihex:to_hex(crypto:rand_bytes(8))).
-
-%% @spec parts_to_multipart_body([bodypart()], ContentType::string(),
-%% Size::integer(), Boundary::string()) ->
-%% {[header()], iolist()}
-%% @doc Return {[header()], iolist()} representing the body for the given
-%% parts, always a multipart response.
-parts_to_multipart_body(BodyList, ContentType, Size, Boundary) ->
- HeaderList = [{"Content-Type",
- ["multipart/byteranges; ",
- "boundary=", Boundary]}],
- MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
-
- {HeaderList, MultiPartBody}.
-
-%% @spec multipart_body([bodypart()], ContentType::string(),
-%% Boundary::string(), Size::integer()) -> iolist()
-%% @doc Return the representation of a multipart body for the given [bodypart()].
-multipart_body([], _ContentType, Boundary, _Size) ->
- ["--", Boundary, "--\r\n"];
-multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
- ["--", Boundary, "\r\n",
- "Content-Type: ", ContentType, "\r\n",
- "Content-Range: ",
- "bytes ", mochiweb_util:make_io(Start), "-", mochiweb_util:make_io(End),
- "/", mochiweb_util:make_io(Size), "\r\n\r\n",
- Body, "\r\n"
- | multipart_body(BodyList, ContentType, Boundary, Size)].
-
-%% @spec parse_form(request()) -> [{string(), string() | formfile()}]
-%% @doc Parse a multipart form from the given request using the in-memory
-%% default_file_handler/2.
-parse_form(Req) ->
- parse_form(Req, fun default_file_handler/2).
-
-%% @spec parse_form(request(), F::file_handler()) -> [{string(), string() | term()}]
-%% @doc Parse a multipart form from the given request using the given file_handler().
-parse_form(Req, FileHandler) ->
- Callback = fun (Next) -> parse_form_outer(Next, FileHandler, []) end,
- {_, _, Res} = parse_multipart_request(Req, Callback),
- Res.
-
-parse_form_outer(eof, _, Acc) ->
- lists:reverse(Acc);
-parse_form_outer({headers, H}, FileHandler, State) ->
- {"form-data", H1} = proplists:get_value("content-disposition", H),
- Name = proplists:get_value("name", H1),
- Filename = proplists:get_value("filename", H1),
- case Filename of
- undefined ->
- fun (Next) ->
- parse_form_value(Next, {Name, []}, FileHandler, State)
- end;
- _ ->
- ContentType = proplists:get_value("content-type", H),
- Handler = FileHandler(Filename, ContentType),
- fun (Next) ->
- parse_form_file(Next, {Name, Handler}, FileHandler, State)
- end
- end.
-
-parse_form_value(body_end, {Name, Acc}, FileHandler, State) ->
- Value = binary_to_list(iolist_to_binary(lists:reverse(Acc))),
- State1 = [{Name, Value} | State],
- fun (Next) -> parse_form_outer(Next, FileHandler, State1) end;
-parse_form_value({body, Data}, {Name, Acc}, FileHandler, State) ->
- Acc1 = [Data | Acc],
- fun (Next) -> parse_form_value(Next, {Name, Acc1}, FileHandler, State) end.
-
-parse_form_file(body_end, {Name, Handler}, FileHandler, State) ->
- Value = Handler(eof),
- State1 = [{Name, Value} | State],
- fun (Next) -> parse_form_outer(Next, FileHandler, State1) end;
-parse_form_file({body, Data}, {Name, Handler}, FileHandler, State) ->
- H1 = Handler(Data),
- fun (Next) -> parse_form_file(Next, {Name, H1}, FileHandler, State) end.
-
-default_file_handler(Filename, ContentType) ->
- default_file_handler_1(Filename, ContentType, []).
-
-default_file_handler_1(Filename, ContentType, Acc) ->
- fun(eof) ->
- Value = iolist_to_binary(lists:reverse(Acc)),
- {Filename, ContentType, Value};
- (Next) ->
- default_file_handler_1(Filename, ContentType, [Next | Acc])
- end.
-
-parse_multipart_request(Req, Callback) ->
- %% TODO: Support chunked?
- Length = list_to_integer(Req:get_header_value("content-length")),
- Boundary = iolist_to_binary(
- get_boundary(Req:get_header_value("content-type"))),
- Prefix = <<"\r\n--", Boundary/binary>>,
- BS = byte_size(Boundary),
- Chunk = read_chunk(Req, Length),
- Length1 = Length - byte_size(Chunk),
- <<"--", Boundary:BS/binary, "\r\n", Rest/binary>> = Chunk,
- feed_mp(headers, flash_multipart_hack(#mp{boundary=Prefix,
- length=Length1,
- buffer=Rest,
- callback=Callback,
- req=Req})).
-
-parse_headers(<<>>) ->
- [];
-parse_headers(Binary) ->
- parse_headers(Binary, []).
-
-parse_headers(Binary, Acc) ->
- case find_in_binary(<<"\r\n">>, Binary) of
- {exact, N} ->
- <<Line:N/binary, "\r\n", Rest/binary>> = Binary,
- parse_headers(Rest, [split_header(Line) | Acc]);
- not_found ->
- lists:reverse([split_header(Binary) | Acc])
- end.
-
-split_header(Line) ->
- {Name, [$: | Value]} = lists:splitwith(fun (C) -> C =/= $: end,
- binary_to_list(Line)),
- {string:to_lower(string:strip(Name)),
- mochiweb_util:parse_header(Value)}.
-
-read_chunk(Req, Length) when Length > 0 ->
- case Length of
- Length when Length < ?CHUNKSIZE ->
- Req:recv(Length);
- _ ->
- Req:recv(?CHUNKSIZE)
- end.
-
-read_more(State=#mp{length=Length, buffer=Buffer, req=Req}) ->
- Data = read_chunk(Req, Length),
- Buffer1 = <<Buffer/binary, Data/binary>>,
- flash_multipart_hack(State#mp{length=Length - byte_size(Data),
- buffer=Buffer1}).
-
-flash_multipart_hack(State=#mp{length=0, buffer=Buffer, boundary=Prefix}) ->
- %% http://code.google.com/p/mochiweb/issues/detail?id=22
- %% Flash doesn't terminate multipart with \r\n properly so we fix it up here
- PrefixSize = size(Prefix),
- case size(Buffer) - (2 + PrefixSize) of
- Seek when Seek >= 0 ->
- case Buffer of
- <<_:Seek/binary, Prefix:PrefixSize/binary, "--">> ->
- Buffer1 = <<Buffer/binary, "\r\n">>,
- State#mp{buffer=Buffer1};
- _ ->
- State
- end;
- _ ->
- State
- end;
-flash_multipart_hack(State) ->
- State.
-
-feed_mp(headers, State=#mp{buffer=Buffer, callback=Callback}) ->
- {State1, P} = case find_in_binary(<<"\r\n\r\n">>, Buffer) of
- {exact, N} ->
- {State, N};
- _ ->
- S1 = read_more(State),
- %% Assume headers must be less than ?CHUNKSIZE
- {exact, N} = find_in_binary(<<"\r\n\r\n">>,
- S1#mp.buffer),
- {S1, N}
- end,
- <<Headers:P/binary, "\r\n\r\n", Rest/binary>> = State1#mp.buffer,
- NextCallback = Callback({headers, parse_headers(Headers)}),
- feed_mp(body, State1#mp{buffer=Rest,
- callback=NextCallback});
-feed_mp(body, State=#mp{boundary=Prefix, buffer=Buffer, callback=Callback}) ->
- Boundary = find_boundary(Prefix, Buffer),
- case Boundary of
- {end_boundary, Start, Skip} ->
- <<Data:Start/binary, _:Skip/binary, Rest/binary>> = Buffer,
- C1 = Callback({body, Data}),
- C2 = C1(body_end),
- {State#mp.length, Rest, C2(eof)};
- {next_boundary, Start, Skip} ->
- <<Data:Start/binary, _:Skip/binary, Rest/binary>> = Buffer,
- C1 = Callback({body, Data}),
- feed_mp(headers, State#mp{callback=C1(body_end),
- buffer=Rest});
- {maybe, Start} ->
- <<Data:Start/binary, Rest/binary>> = Buffer,
- feed_mp(body, read_more(State#mp{callback=Callback({body, Data}),
- buffer=Rest}));
- not_found ->
- {Data, Rest} = {Buffer, <<>>},
- feed_mp(body, read_more(State#mp{callback=Callback({body, Data}),
- buffer=Rest}))
- end.
-
-get_boundary(ContentType) ->
- {"multipart/form-data", Opts} = mochiweb_util:parse_header(ContentType),
- case proplists:get_value("boundary", Opts) of
- S when is_list(S) ->
- S
- end.
-
-find_in_binary(B, Data) when size(B) > 0 ->
- case size(Data) - size(B) of
- Last when Last < 0 ->
- partial_find(B, Data, 0, size(Data));
- Last ->
- find_in_binary(B, size(B), Data, 0, Last)
- end.
-
-find_in_binary(B, BS, D, N, Last) when N =< Last->
- case D of
- <<_:N/binary, B:BS/binary, _/binary>> ->
- {exact, N};
- _ ->
- find_in_binary(B, BS, D, 1 + N, Last)
- end;
-find_in_binary(B, BS, D, N, Last) when N =:= 1 + Last ->
- partial_find(B, D, N, BS - 1).
-
-partial_find(_B, _D, _N, 0) ->
- not_found;
-partial_find(B, D, N, K) ->
- <<B1:K/binary, _/binary>> = B,
- case D of
- <<_Skip:N/binary, B1:K/binary>> ->
- {partial, N, K};
- _ ->
- partial_find(B, D, 1 + N, K - 1)
- end.
-
-find_boundary(Prefix, Data) ->
- case find_in_binary(Prefix, Data) of
- {exact, Skip} ->
- PrefixSkip = Skip + size(Prefix),
- case Data of
- <<_:PrefixSkip/binary, "\r\n", _/binary>> ->
- {next_boundary, Skip, size(Prefix) + 2};
- <<_:PrefixSkip/binary, "--\r\n", _/binary>> ->
- {end_boundary, Skip, size(Prefix) + 4};
- _ when size(Data) < PrefixSkip + 4 ->
- %% Underflow
- {maybe, Skip};
- _ ->
- %% False positive
- not_found
- end;
- {partial, Skip, Length} when (Skip + Length) =:= size(Data) ->
- %% Underflow
- {maybe, Skip};
- _ ->
- not_found
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-ssl_cert_opts() ->
- EbinDir = filename:dirname(code:which(?MODULE)),
- CertDir = filename:join([EbinDir, "..", "support", "test-materials"]),
- CertFile = filename:join(CertDir, "test_ssl_cert.pem"),
- KeyFile = filename:join(CertDir, "test_ssl_key.pem"),
- [{certfile, CertFile}, {keyfile, KeyFile}].
-
-with_socket_server(Transport, ServerFun, ClientFun) ->
- ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}],
- ServerOpts = case Transport of
- plain ->
- ServerOpts0;
- ssl ->
- ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}]
- end,
- {ok, Server} = mochiweb_socket_server:start(ServerOpts),
- Port = mochiweb_socket_server:get(Server, port),
- ClientOpts = [binary, {active, false}],
- {ok, Client} = case Transport of
- plain ->
- gen_tcp:connect("127.0.0.1", Port, ClientOpts);
- ssl ->
- ClientOpts1 = [{ssl_imp, new} | ClientOpts],
- {ok, SslSocket} = ssl:connect("127.0.0.1", Port, ClientOpts1),
- {ok, {ssl, SslSocket}}
- end,
- Res = (catch ClientFun(Client)),
- mochiweb_socket_server:stop(Server),
- Res.
-
-fake_request(Socket, ContentType, Length) ->
- mochiweb_request:new(Socket,
- 'POST',
- "/multipart",
- {1,1},
- mochiweb_headers:make(
- [{"content-type", ContentType},
- {"content-length", Length}])).
-
-test_callback({body, <<>>}, Rest=[body_end | _]) ->
- %% When expecting the body_end we might get an empty binary
- fun (Next) -> test_callback(Next, Rest) end;
-test_callback({body, Got}, [{body, Expect} | Rest]) when Got =/= Expect ->
- %% Partial response
- GotSize = size(Got),
- <<Got:GotSize/binary, Expect1/binary>> = Expect,
- fun (Next) -> test_callback(Next, [{body, Expect1} | Rest]) end;
-test_callback(Got, [Expect | Rest]) ->
- ?assertEqual(Got, Expect),
- case Rest of
- [] ->
- ok;
- _ ->
- fun (Next) -> test_callback(Next, Rest) end
- end.
-
-parse3_http_test() ->
- parse3(plain).
-
-parse3_https_test() ->
- parse3(ssl).
-
-parse3(Transport) ->
- ContentType = "multipart/form-data; boundary=---------------------------7386909285754635891697677882",
- BinContent = <<"-----------------------------7386909285754635891697677882\r\nContent-Disposition: form-data; name=\"hidden\"\r\n\r\nmultipart message\r\n-----------------------------7386909285754635891697677882\r\nContent-Disposition: form-data; name=\"file\"; filename=\"test_file.txt\"\r\nContent-Type: text/plain\r\n\r\nWoo multiline text file\n\nLa la la\r\n-----------------------------7386909285754635891697677882--\r\n">>,
- Expect = [{headers,
- [{"content-disposition",
- {"form-data", [{"name", "hidden"}]}}]},
- {body, <<"multipart message">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "file"}, {"filename", "test_file.txt"}]}},
- {"content-type", {"text/plain", []}}]},
- {body, <<"Woo multiline text file\n\nLa la la">>},
- body_end,
- eof],
- TestCallback = fun (Next) -> test_callback(Next, Expect) end,
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_multipart_request(Req, TestCallback),
- {0, <<>>, ok} = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-parse2_http_test() ->
- parse2(plain).
-
-parse2_https_test() ->
- parse2(ssl).
-
-parse2(Transport) ->
- ContentType = "multipart/form-data; boundary=---------------------------6072231407570234361599764024",
- BinContent = <<"-----------------------------6072231407570234361599764024\r\nContent-Disposition: form-data; name=\"hidden\"\r\n\r\nmultipart message\r\n-----------------------------6072231407570234361599764024\r\nContent-Disposition: form-data; name=\"file\"; filename=\"\"\r\nContent-Type: application/octet-stream\r\n\r\n\r\n-----------------------------6072231407570234361599764024--\r\n">>,
- Expect = [{headers,
- [{"content-disposition",
- {"form-data", [{"name", "hidden"}]}}]},
- {body, <<"multipart message">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "file"}, {"filename", ""}]}},
- {"content-type", {"application/octet-stream", []}}]},
- {body, <<>>},
- body_end,
- eof],
- TestCallback = fun (Next) -> test_callback(Next, Expect) end,
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_multipart_request(Req, TestCallback),
- {0, <<>>, ok} = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-parse_form_http_test() ->
- do_parse_form(plain).
-
-parse_form_https_test() ->
- do_parse_form(ssl).
-
-do_parse_form(Transport) ->
- ContentType = "multipart/form-data; boundary=AaB03x",
- "AaB03x" = get_boundary(ContentType),
- Content = mochiweb_util:join(
- ["--AaB03x",
- "Content-Disposition: form-data; name=\"submit-name\"",
- "",
- "Larry",
- "--AaB03x",
- "Content-Disposition: form-data; name=\"files\";"
- ++ "filename=\"file1.txt\"",
- "Content-Type: text/plain",
- "",
- "... contents of file1.txt ...",
- "--AaB03x--",
- ""], "\r\n"),
- BinContent = iolist_to_binary(Content),
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_form(Req),
- [{"submit-name", "Larry"},
- {"files", {"file1.txt", {"text/plain",[]},
- <<"... contents of file1.txt ...">>}
- }] = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-parse_http_test() ->
- do_parse(plain).
-
-parse_https_test() ->
- do_parse(ssl).
-
-do_parse(Transport) ->
- ContentType = "multipart/form-data; boundary=AaB03x",
- "AaB03x" = get_boundary(ContentType),
- Content = mochiweb_util:join(
- ["--AaB03x",
- "Content-Disposition: form-data; name=\"submit-name\"",
- "",
- "Larry",
- "--AaB03x",
- "Content-Disposition: form-data; name=\"files\";"
- ++ "filename=\"file1.txt\"",
- "Content-Type: text/plain",
- "",
- "... contents of file1.txt ...",
- "--AaB03x--",
- ""], "\r\n"),
- BinContent = iolist_to_binary(Content),
- Expect = [{headers,
- [{"content-disposition",
- {"form-data", [{"name", "submit-name"}]}}]},
- {body, <<"Larry">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "files"}, {"filename", "file1.txt"}]}},
- {"content-type", {"text/plain", []}}]},
- {body, <<"... contents of file1.txt ...">>},
- body_end,
- eof],
- TestCallback = fun (Next) -> test_callback(Next, Expect) end,
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_multipart_request(Req, TestCallback),
- {0, <<>>, ok} = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-parse_partial_body_boundary_http_test() ->
- parse_partial_body_boundary(plain).
-
-parse_partial_body_boundary_https_test() ->
- parse_partial_body_boundary(ssl).
-
-parse_partial_body_boundary(Transport) ->
- Boundary = string:copies("$", 2048),
- ContentType = "multipart/form-data; boundary=" ++ Boundary,
- ?assertEqual(Boundary, get_boundary(ContentType)),
- Content = mochiweb_util:join(
- ["--" ++ Boundary,
- "Content-Disposition: form-data; name=\"submit-name\"",
- "",
- "Larry",
- "--" ++ Boundary,
- "Content-Disposition: form-data; name=\"files\";"
- ++ "filename=\"file1.txt\"",
- "Content-Type: text/plain",
- "",
- "... contents of file1.txt ...",
- "--" ++ Boundary ++ "--",
- ""], "\r\n"),
- BinContent = iolist_to_binary(Content),
- Expect = [{headers,
- [{"content-disposition",
- {"form-data", [{"name", "submit-name"}]}}]},
- {body, <<"Larry">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "files"}, {"filename", "file1.txt"}]}},
- {"content-type", {"text/plain", []}}
- ]},
- {body, <<"... contents of file1.txt ...">>},
- body_end,
- eof],
- TestCallback = fun (Next) -> test_callback(Next, Expect) end,
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_multipart_request(Req, TestCallback),
- {0, <<>>, ok} = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-parse_large_header_http_test() ->
- parse_large_header(plain).
-
-parse_large_header_https_test() ->
- parse_large_header(ssl).
-
-parse_large_header(Transport) ->
- ContentType = "multipart/form-data; boundary=AaB03x",
- "AaB03x" = get_boundary(ContentType),
- Content = mochiweb_util:join(
- ["--AaB03x",
- "Content-Disposition: form-data; name=\"submit-name\"",
- "",
- "Larry",
- "--AaB03x",
- "Content-Disposition: form-data; name=\"files\";"
- ++ "filename=\"file1.txt\"",
- "Content-Type: text/plain",
- "x-large-header: " ++ string:copies("%", 4096),
- "",
- "... contents of file1.txt ...",
- "--AaB03x--",
- ""], "\r\n"),
- BinContent = iolist_to_binary(Content),
- Expect = [{headers,
- [{"content-disposition",
- {"form-data", [{"name", "submit-name"}]}}]},
- {body, <<"Larry">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "files"}, {"filename", "file1.txt"}]}},
- {"content-type", {"text/plain", []}},
- {"x-large-header", {string:copies("%", 4096), []}}
- ]},
- {body, <<"... contents of file1.txt ...">>},
- body_end,
- eof],
- TestCallback = fun (Next) -> test_callback(Next, Expect) end,
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_multipart_request(Req, TestCallback),
- {0, <<>>, ok} = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-find_boundary_test() ->
- B = <<"\r\n--X">>,
- {next_boundary, 0, 7} = find_boundary(B, <<"\r\n--X\r\nRest">>),
- {next_boundary, 1, 7} = find_boundary(B, <<"!\r\n--X\r\nRest">>),
- {end_boundary, 0, 9} = find_boundary(B, <<"\r\n--X--\r\nRest">>),
- {end_boundary, 1, 9} = find_boundary(B, <<"!\r\n--X--\r\nRest">>),
- not_found = find_boundary(B, <<"--X\r\nRest">>),
- {maybe, 0} = find_boundary(B, <<"\r\n--X\r">>),
- {maybe, 1} = find_boundary(B, <<"!\r\n--X\r">>),
- P = <<"\r\n-----------------------------16037454351082272548568224146">>,
- B0 = <<55,212,131,77,206,23,216,198,35,87,252,118,252,8,25,211,132,229,
- 182,42,29,188,62,175,247,243,4,4,0,59, 13,10,45,45,45,45,45,45,45,
- 45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,
- 49,54,48,51,55,52,53,52,51,53,49>>,
- {maybe, 30} = find_boundary(P, B0),
- not_found = find_boundary(B, <<"\r\n--XJOPKE">>),
- ok.
-
-find_in_binary_test() ->
- {exact, 0} = find_in_binary(<<"foo">>, <<"foobarbaz">>),
- {exact, 1} = find_in_binary(<<"oo">>, <<"foobarbaz">>),
- {exact, 8} = find_in_binary(<<"z">>, <<"foobarbaz">>),
- not_found = find_in_binary(<<"q">>, <<"foobarbaz">>),
- {partial, 7, 2} = find_in_binary(<<"azul">>, <<"foobarbaz">>),
- {exact, 0} = find_in_binary(<<"foobarbaz">>, <<"foobarbaz">>),
- {partial, 0, 3} = find_in_binary(<<"foobar">>, <<"foo">>),
- {partial, 1, 3} = find_in_binary(<<"foobar">>, <<"afoo">>),
- ok.
-
-flash_parse_http_test() ->
- flash_parse(plain).
-
-flash_parse_https_test() ->
- flash_parse(ssl).
-
-flash_parse(Transport) ->
- ContentType = "multipart/form-data; boundary=----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5",
- "----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5" = get_boundary(ContentType),
- BinContent = <<"------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Filename\"\r\n\r\nhello.txt\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"success_action_status\"\r\n\r\n201\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"file\"; filename=\"hello.txt\"\r\nContent-Type: application/octet-stream\r\n\r\nhello\n\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Upload\"\r\n\r\nSubmit Query\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5--">>,
- Expect = [{headers,
- [{"content-disposition",
- {"form-data", [{"name", "Filename"}]}}]},
- {body, <<"hello.txt">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "success_action_status"}]}}]},
- {body, <<"201">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "file"}, {"filename", "hello.txt"}]}},
- {"content-type", {"application/octet-stream", []}}]},
- {body, <<"hello\n">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "Upload"}]}}]},
- {body, <<"Submit Query">>},
- body_end,
- eof],
- TestCallback = fun (Next) -> test_callback(Next, Expect) end,
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_multipart_request(Req, TestCallback),
- {0, <<>>, ok} = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-flash_parse2_http_test() ->
- flash_parse2(plain).
-
-flash_parse2_https_test() ->
- flash_parse2(ssl).
-
-flash_parse2(Transport) ->
- ContentType = "multipart/form-data; boundary=----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5",
- "----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5" = get_boundary(ContentType),
- Chunk = iolist_to_binary(string:copies("%", 4096)),
- BinContent = <<"------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Filename\"\r\n\r\nhello.txt\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"success_action_status\"\r\n\r\n201\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"file\"; filename=\"hello.txt\"\r\nContent-Type: application/octet-stream\r\n\r\n", Chunk/binary, "\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Upload\"\r\n\r\nSubmit Query\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5--">>,
- Expect = [{headers,
- [{"content-disposition",
- {"form-data", [{"name", "Filename"}]}}]},
- {body, <<"hello.txt">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "success_action_status"}]}}]},
- {body, <<"201">>},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "file"}, {"filename", "hello.txt"}]}},
- {"content-type", {"application/octet-stream", []}}]},
- {body, Chunk},
- body_end,
- {headers,
- [{"content-disposition",
- {"form-data", [{"name", "Upload"}]}}]},
- {body, <<"Submit Query">>},
- body_end,
- eof],
- TestCallback = fun (Next) -> test_callback(Next, Expect) end,
- ServerFun = fun (Socket) ->
- ok = mochiweb_socket:send(Socket, BinContent),
- exit(normal)
- end,
- ClientFun = fun (Socket) ->
- Req = fake_request(Socket, ContentType,
- byte_size(BinContent)),
- Res = parse_multipart_request(Req, TestCallback),
- {0, <<>>, ok} = Res,
- ok
- end,
- ok = with_socket_server(Transport, ServerFun, ClientFun),
- ok.
-
-parse_headers_test() ->
- ?assertEqual([], parse_headers(<<>>)).
-
-flash_multipart_hack_test() ->
- Buffer = <<"prefix-">>,
- Prefix = <<"prefix">>,
- State = #mp{length=0, buffer=Buffer, boundary=Prefix},
- ?assertEqual(State,
- flash_multipart_hack(State)).
-
-parts_to_body_single_test() ->
- {HL, B} = parts_to_body([{0, 5, <<"01234">>}],
- "text/plain",
- 10),
- [{"Content-Range", Range},
- {"Content-Type", Type}] = lists:sort(HL),
- ?assertEqual(
- <<"bytes 0-5/10">>,
- iolist_to_binary(Range)),
- ?assertEqual(
- <<"text/plain">>,
- iolist_to_binary(Type)),
- ?assertEqual(
- <<"01234">>,
- iolist_to_binary(B)),
- ok.
-
-parts_to_body_multi_test() ->
- {[{"Content-Type", Type}],
- _B} = parts_to_body([{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
- "text/plain",
- 10),
- ?assertMatch(
- <<"multipart/byteranges; boundary=", _/binary>>,
- iolist_to_binary(Type)),
- ok.
-
-parts_to_multipart_body_test() ->
- {[{"Content-Type", V}], B} = parts_to_multipart_body(
- [{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
- "text/plain",
- 10,
- "BOUNDARY"),
- MB = multipart_body(
- [{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
- "text/plain",
- "BOUNDARY",
- 10),
- ?assertEqual(
- <<"multipart/byteranges; boundary=BOUNDARY">>,
- iolist_to_binary(V)),
- ?assertEqual(
- iolist_to_binary(MB),
- iolist_to_binary(B)),
- ok.
-
-multipart_body_test() ->
- ?assertEqual(
- <<"--BOUNDARY--\r\n">>,
- iolist_to_binary(multipart_body([], "text/plain", "BOUNDARY", 0))),
- ?assertEqual(
- <<"--BOUNDARY\r\n"
- "Content-Type: text/plain\r\n"
- "Content-Range: bytes 0-5/10\r\n\r\n"
- "01234\r\n"
- "--BOUNDARY\r\n"
- "Content-Type: text/plain\r\n"
- "Content-Range: bytes 5-10/10\r\n\r\n"
- "56789\r\n"
- "--BOUNDARY--\r\n">>,
- iolist_to_binary(multipart_body([{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
- "text/plain",
- "BOUNDARY",
- 10))),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_request.erl b/1.1.x/src/mochiweb/mochiweb_request.erl
deleted file mode 100644
index ffe4e9eb..00000000
--- a/1.1.x/src/mochiweb/mochiweb_request.erl
+++ /dev/null
@@ -1,768 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc MochiWeb HTTP Request abstraction.
-
--module(mochiweb_request, [Socket, Method, RawPath, Version, Headers]).
--author('bob@mochimedia.com').
-
--include_lib("kernel/include/file.hrl").
--include("internal.hrl").
-
--define(QUIP, "Any of you quaids got a smint?").
-
--export([get_header_value/1, get_primary_header_value/1, get/1, dump/0]).
--export([send/1, recv/1, recv/2, recv_body/0, recv_body/1, stream_body/3]).
--export([start_response/1, start_response_length/1, start_raw_response/1]).
--export([respond/1, ok/1]).
--export([not_found/0, not_found/1]).
--export([parse_post/0, parse_qs/0]).
--export([should_close/0, cleanup/0]).
--export([parse_cookie/0, get_cookie_value/1]).
--export([serve_file/2, serve_file/3]).
--export([accepted_encodings/1]).
--export([accepts_content_type/1]).
-
--define(SAVE_QS, mochiweb_request_qs).
--define(SAVE_PATH, mochiweb_request_path).
--define(SAVE_RECV, mochiweb_request_recv).
--define(SAVE_BODY, mochiweb_request_body).
--define(SAVE_BODY_LENGTH, mochiweb_request_body_length).
--define(SAVE_POST, mochiweb_request_post).
--define(SAVE_COOKIE, mochiweb_request_cookie).
--define(SAVE_FORCE_CLOSE, mochiweb_request_force_close).
-
-%% @type iolist() = [iolist() | binary() | char()].
-%% @type iodata() = binary() | iolist().
-%% @type key() = atom() | string() | binary()
-%% @type value() = atom() | string() | binary() | integer()
-%% @type headers(). A mochiweb_headers structure.
-%% @type response(). A mochiweb_response parameterized module instance.
-%% @type ioheaders() = headers() | [{key(), value()}].
-
-% 10 second default idle timeout
--define(IDLE_TIMEOUT, 10000).
-
-% Maximum recv_body() length of 1MB
--define(MAX_RECV_BODY, (1024*1024)).
-
-%% @spec get_header_value(K) -> undefined | Value
-%% @doc Get the value of a given request header.
-get_header_value(K) ->
- mochiweb_headers:get_value(K, Headers).
-
-get_primary_header_value(K) ->
- mochiweb_headers:get_primary_value(K, Headers).
-
-%% @type field() = socket | scheme | method | raw_path | version | headers | peer | path | body_length | range
-
-%% @spec get(field()) -> term()
-%% @doc Return the internal representation of the given field. If
-%% <code>socket</code> is requested on a HTTPS connection, then
-%% an ssl socket will be returned as <code>{ssl, SslSocket}</code>.
-%% You can use <code>SslSocket</code> with the <code>ssl</code>
-%% application, eg: <code>ssl:peercert(SslSocket)</code>.
-get(socket) ->
- Socket;
-get(scheme) ->
- case mochiweb_socket:type(Socket) of
- plain ->
- http;
- ssl ->
- https
- end;
-get(method) ->
- Method;
-get(raw_path) ->
- RawPath;
-get(version) ->
- Version;
-get(headers) ->
- Headers;
-get(peer) ->
- case mochiweb_socket:peername(Socket) of
- {ok, {Addr={10, _, _, _}, _Port}} ->
- case get_header_value("x-forwarded-for") of
- undefined ->
- inet_parse:ntoa(Addr);
- Hosts ->
- string:strip(lists:last(string:tokens(Hosts, ",")))
- end;
- {ok, {{127, 0, 0, 1}, _Port}} ->
- case get_header_value("x-forwarded-for") of
- undefined ->
- "127.0.0.1";
- Hosts ->
- string:strip(lists:last(string:tokens(Hosts, ",")))
- end;
- {ok, {Addr, _Port}} ->
- inet_parse:ntoa(Addr);
- {error, enotconn} ->
- exit(normal)
- end;
-get(path) ->
- case erlang:get(?SAVE_PATH) of
- undefined ->
- {Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
- Path = mochiweb_util:unquote(Path0),
- put(?SAVE_PATH, Path),
- Path;
- Cached ->
- Cached
- end;
-get(body_length) ->
- case erlang:get(?SAVE_BODY_LENGTH) of
- undefined ->
- BodyLength = body_length(),
- put(?SAVE_BODY_LENGTH, {cached, BodyLength}),
- BodyLength;
- {cached, Cached} ->
- Cached
- end;
-get(range) ->
- case get_header_value(range) of
- undefined ->
- undefined;
- RawRange ->
- mochiweb_http:parse_range_request(RawRange)
- end.
-
-%% @spec dump() -> {mochiweb_request, [{atom(), term()}]}
-%% @doc Dump the internal representation to a "human readable" set of terms
-%% for debugging/inspection purposes.
-dump() ->
- {?MODULE, [{method, Method},
- {version, Version},
- {raw_path, RawPath},
- {headers, mochiweb_headers:to_list(Headers)}]}.
-
-%% @spec send(iodata()) -> ok
-%% @doc Send data over the socket.
-send(Data) ->
- case mochiweb_socket:send(Socket, Data) of
- ok ->
- ok;
- _ ->
- exit(normal)
- end.
-
-%% @spec recv(integer()) -> binary()
-%% @doc Receive Length bytes from the client as a binary, with the default
-%% idle timeout.
-recv(Length) ->
- recv(Length, ?IDLE_TIMEOUT).
-
-%% @spec recv(integer(), integer()) -> binary()
-%% @doc Receive Length bytes from the client as a binary, with the given
-%% Timeout in msec.
-recv(Length, Timeout) ->
- case mochiweb_socket:recv(Socket, Length, Timeout) of
- {ok, Data} ->
- put(?SAVE_RECV, true),
- Data;
- _ ->
- exit(normal)
- end.
-
-%% @spec body_length() -> undefined | chunked | unknown_transfer_encoding | integer()
-%% @doc Infer body length from transfer-encoding and content-length headers.
-body_length() ->
- case get_header_value("transfer-encoding") of
- undefined ->
- case get_header_value("content-length") of
- undefined ->
- undefined;
- Length ->
- list_to_integer(Length)
- end;
- "chunked" ->
- chunked;
- Unknown ->
- {unknown_transfer_encoding, Unknown}
- end.
-
-
-%% @spec recv_body() -> binary()
-%% @doc Receive the body of the HTTP request (defined by Content-Length).
-%% Will only receive up to the default max-body length of 1MB.
-recv_body() ->
- recv_body(?MAX_RECV_BODY).
-
-%% @spec recv_body(integer()) -> binary()
-%% @doc Receive the body of the HTTP request (defined by Content-Length).
-%% Will receive up to MaxBody bytes.
-recv_body(MaxBody) ->
- case erlang:get(?SAVE_BODY) of
- undefined ->
- % we could use a sane constant for max chunk size
- Body = stream_body(?MAX_RECV_BODY, fun
- ({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
- iolist_to_binary(lists:reverse(BinAcc));
- ({Length, Bin}, {LengthAcc, BinAcc}) ->
- NewLength = Length + LengthAcc,
- if NewLength > MaxBody ->
- exit({body_too_large, chunked});
- true ->
- {NewLength, [Bin | BinAcc]}
- end
- end, {0, []}, MaxBody),
- put(?SAVE_BODY, Body),
- Body;
- Cached -> Cached
- end.
-
-stream_body(MaxChunkSize, ChunkFun, FunState) ->
- stream_body(MaxChunkSize, ChunkFun, FunState, undefined).
-
-stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength) ->
- Expect = case get_header_value("expect") of
- undefined ->
- undefined;
- Value when is_list(Value) ->
- string:to_lower(Value)
- end,
- case Expect of
- "100-continue" ->
- start_raw_response({100, gb_trees:empty()});
- _Else ->
- ok
- end,
- case body_length() of
- undefined ->
- undefined;
- {unknown_transfer_encoding, Unknown} ->
- exit({unknown_transfer_encoding, Unknown});
- chunked ->
- % In this case the MaxBody is actually used to
- % determine the maximum allowed size of a single
- % chunk.
- stream_chunked_body(MaxChunkSize, ChunkFun, FunState);
- 0 ->
- <<>>;
- Length when is_integer(Length) ->
- case MaxBodyLength of
- MaxBodyLength when is_integer(MaxBodyLength), MaxBodyLength < Length ->
- exit({body_too_large, content_length});
- _ ->
- stream_unchunked_body(Length, ChunkFun, FunState)
- end;
- Length ->
- exit({length_not_integer, Length})
- end.
-
-
-%% @spec start_response({integer(), ioheaders()}) -> response()
-%% @doc Start the HTTP response by sending the Code HTTP response and
-%% ResponseHeaders. The server will set header defaults such as Server
-%% and Date if not present in ResponseHeaders.
-start_response({Code, ResponseHeaders}) ->
- HResponse = mochiweb_headers:make(ResponseHeaders),
- HResponse1 = mochiweb_headers:default_from_list(server_headers(),
- HResponse),
- start_raw_response({Code, HResponse1}).
-
-%% @spec start_raw_response({integer(), headers()}) -> response()
-%% @doc Start the HTTP response by sending the Code HTTP response and
-%% ResponseHeaders.
-start_raw_response({Code, ResponseHeaders}) ->
- F = fun ({K, V}, Acc) ->
- [mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
- end,
- End = lists:foldl(F, [<<"\r\n">>],
- mochiweb_headers:to_list(ResponseHeaders)),
- send([make_version(Version), make_code(Code), <<"\r\n">> | End]),
- mochiweb:new_response({THIS, Code, ResponseHeaders}).
-
-
-%% @spec start_response_length({integer(), ioheaders(), integer()}) -> response()
-%% @doc Start the HTTP response by sending the Code HTTP response and
-%% ResponseHeaders including a Content-Length of Length. The server
-%% will set header defaults such as Server
-%% and Date if not present in ResponseHeaders.
-start_response_length({Code, ResponseHeaders, Length}) ->
- HResponse = mochiweb_headers:make(ResponseHeaders),
- HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
- start_response({Code, HResponse1}).
-
-%% @spec respond({integer(), ioheaders(), iodata() | chunked | {file, IoDevice}}) -> response()
-%% @doc Start the HTTP response with start_response, and send Body to the
-%% client (if the get(method) /= 'HEAD'). The Content-Length header
-%% will be set by the Body length, and the server will insert header
-%% defaults.
-respond({Code, ResponseHeaders, {file, IoDevice}}) ->
- Length = mochiweb_io:iodevice_size(IoDevice),
- Response = start_response_length({Code, ResponseHeaders, Length}),
- case Method of
- 'HEAD' ->
- ok;
- _ ->
- mochiweb_io:iodevice_stream(fun send/1, IoDevice)
- end,
- Response;
-respond({Code, ResponseHeaders, chunked}) ->
- HResponse = mochiweb_headers:make(ResponseHeaders),
- HResponse1 = case Method of
- 'HEAD' ->
- %% This is what Google does, http://www.google.com/
- %% is chunked but HEAD gets Content-Length: 0.
- %% The RFC is ambiguous so emulating Google is smart.
- mochiweb_headers:enter("Content-Length", "0",
- HResponse);
- _ when Version >= {1, 1} ->
- %% Only use chunked encoding for HTTP/1.1
- mochiweb_headers:enter("Transfer-Encoding", "chunked",
- HResponse);
- _ ->
- %% For pre-1.1 clients we send the data as-is
- %% without a Content-Length header and without
- %% chunk delimiters. Since the end of the document
- %% is now ambiguous we must force a close.
- put(?SAVE_FORCE_CLOSE, true),
- HResponse
- end,
- start_response({Code, HResponse1});
-respond({Code, ResponseHeaders, Body}) ->
- Response = start_response_length({Code, ResponseHeaders, iolist_size(Body)}),
- case Method of
- 'HEAD' ->
- ok;
- _ ->
- send(Body)
- end,
- Response.
-
-%% @spec not_found() -> response()
-%% @doc Alias for <code>not_found([])</code>.
-not_found() ->
- not_found([]).
-
-%% @spec not_found(ExtraHeaders) -> response()
-%% @doc Alias for <code>respond({404, [{"Content-Type", "text/plain"}
-%% | ExtraHeaders], &lt;&lt;"Not found."&gt;&gt;})</code>.
-not_found(ExtraHeaders) ->
- respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
- <<"Not found.">>}).
-
-%% @spec ok({value(), iodata()} | {value(), ioheaders(), iodata() | {file, IoDevice}}) ->
-%% response()
-%% @doc respond({200, [{"Content-Type", ContentType} | Headers], Body}).
-ok({ContentType, Body}) ->
- ok({ContentType, [], Body});
-ok({ContentType, ResponseHeaders, Body}) ->
- HResponse = mochiweb_headers:make(ResponseHeaders),
- case THIS:get(range) of
- X when (X =:= undefined orelse X =:= fail) orelse Body =:= chunked ->
- %% http://code.google.com/p/mochiweb/issues/detail?id=54
- %% Range header not supported when chunked, return 200 and provide
- %% full response.
- HResponse1 = mochiweb_headers:enter("Content-Type", ContentType,
- HResponse),
- respond({200, HResponse1, Body});
- Ranges ->
- {PartList, Size} = range_parts(Body, Ranges),
- case PartList of
- [] -> %% no valid ranges
- HResponse1 = mochiweb_headers:enter("Content-Type",
- ContentType,
- HResponse),
- %% could be 416, for now we'll just return 200
- respond({200, HResponse1, Body});
- PartList ->
- {RangeHeaders, RangeBody} =
- mochiweb_multipart:parts_to_body(PartList, ContentType, Size),
- HResponse1 = mochiweb_headers:enter_from_list(
- [{"Accept-Ranges", "bytes"} |
- RangeHeaders],
- HResponse),
- respond({206, HResponse1, RangeBody})
- end
- end.
-
-%% @spec should_close() -> bool()
-%% @doc Return true if the connection must be closed. If false, using
-%% Keep-Alive should be safe.
-should_close() ->
- ForceClose = erlang:get(mochiweb_request_force_close) =/= undefined,
- DidNotRecv = erlang:get(mochiweb_request_recv) =:= undefined,
- ForceClose orelse Version < {1, 0}
- %% Connection: close
- orelse get_header_value("connection") =:= "close"
- %% HTTP 1.0 requires Connection: Keep-Alive
- orelse (Version =:= {1, 0}
- andalso get_header_value("connection") =/= "Keep-Alive")
- %% unread data left on the socket, can't safely continue
- orelse (DidNotRecv
- andalso get_header_value("content-length") =/= undefined
- andalso list_to_integer(get_header_value("content-length")) > 0)
- orelse (DidNotRecv
- andalso get_header_value("transfer-encoding") =:= "chunked").
-
-%% @spec cleanup() -> ok
-%% @doc Clean up any junk in the process dictionary, required before continuing
-%% a Keep-Alive request.
-cleanup() ->
- [erase(K) || K <- [?SAVE_QS,
- ?SAVE_PATH,
- ?SAVE_RECV,
- ?SAVE_BODY,
- ?SAVE_POST,
- ?SAVE_COOKIE,
- ?SAVE_FORCE_CLOSE]],
- ok.
-
-%% @spec parse_qs() -> [{Key::string(), Value::string()}]
-%% @doc Parse the query string of the URL.
-parse_qs() ->
- case erlang:get(?SAVE_QS) of
- undefined ->
- {_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
- Parsed = mochiweb_util:parse_qs(QueryString),
- put(?SAVE_QS, Parsed),
- Parsed;
- Cached ->
- Cached
- end.
-
-%% @spec get_cookie_value(Key::string) -> string() | undefined
-%% @doc Get the value of the given cookie.
-get_cookie_value(Key) ->
- proplists:get_value(Key, parse_cookie()).
-
-%% @spec parse_cookie() -> [{Key::string(), Value::string()}]
-%% @doc Parse the cookie header.
-parse_cookie() ->
- case erlang:get(?SAVE_COOKIE) of
- undefined ->
- Cookies = case get_header_value("cookie") of
- undefined ->
- [];
- Value ->
- mochiweb_cookies:parse_cookie(Value)
- end,
- put(?SAVE_COOKIE, Cookies),
- Cookies;
- Cached ->
- Cached
- end.
-
-%% @spec parse_post() -> [{Key::string(), Value::string()}]
-%% @doc Parse an application/x-www-form-urlencoded form POST. This
-%% has the side-effect of calling recv_body().
-parse_post() ->
- case erlang:get(?SAVE_POST) of
- undefined ->
- Parsed = case recv_body() of
- undefined ->
- [];
- Binary ->
- case get_primary_header_value("content-type") of
- "application/x-www-form-urlencoded" ++ _ ->
- mochiweb_util:parse_qs(Binary);
- _ ->
- []
- end
- end,
- put(?SAVE_POST, Parsed),
- Parsed;
- Cached ->
- Cached
- end.
-
-%% @spec stream_chunked_body(integer(), fun(), term()) -> term()
-%% @doc The function is called for each chunk.
-%% Used internally by read_chunked_body.
-stream_chunked_body(MaxChunkSize, Fun, FunState) ->
- case read_chunk_length() of
- 0 ->
- Fun({0, read_chunk(0)}, FunState);
- Length when Length > MaxChunkSize ->
- NewState = read_sub_chunks(Length, MaxChunkSize, Fun, FunState),
- stream_chunked_body(MaxChunkSize, Fun, NewState);
- Length ->
- NewState = Fun({Length, read_chunk(Length)}, FunState),
- stream_chunked_body(MaxChunkSize, Fun, NewState)
- end.
-
-stream_unchunked_body(0, Fun, FunState) ->
- Fun({0, <<>>}, FunState);
-stream_unchunked_body(Length, Fun, FunState) when Length > 0 ->
- PktSize = case Length > ?RECBUF_SIZE of
- true ->
- ?RECBUF_SIZE;
- false ->
- Length
- end,
- Bin = recv(PktSize),
- NewState = Fun({PktSize, Bin}, FunState),
- stream_unchunked_body(Length - PktSize, Fun, NewState).
-
-%% @spec read_chunk_length() -> integer()
-%% @doc Read the length of the next HTTP chunk.
-read_chunk_length() ->
- mochiweb_socket:setopts(Socket, [{packet, line}]),
- case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
- {ok, Header} ->
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- Splitter = fun (C) ->
- C =/= $\r andalso C =/= $\n andalso C =/= $
- end,
- {Hex, _Rest} = lists:splitwith(Splitter, binary_to_list(Header)),
- mochihex:to_int(Hex);
- _ ->
- exit(normal)
- end.
-
-%% @spec read_chunk(integer()) -> Chunk::binary() | [Footer::binary()]
-%% @doc Read in a HTTP chunk of the given length. If Length is 0, then read the
-%% HTTP footers (as a list of binaries, since they're nominal).
-read_chunk(0) ->
- mochiweb_socket:setopts(Socket, [{packet, line}]),
- F = fun (F1, Acc) ->
- case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
- {ok, <<"\r\n">>} ->
- Acc;
- {ok, Footer} ->
- F1(F1, [Footer | Acc]);
- _ ->
- exit(normal)
- end
- end,
- Footers = F(F, []),
- mochiweb_socket:setopts(Socket, [{packet, raw}]),
- put(?SAVE_RECV, true),
- Footers;
-read_chunk(Length) ->
- case mochiweb_socket:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
- {ok, <<Chunk:Length/binary, "\r\n">>} ->
- Chunk;
- _ ->
- exit(normal)
- end.
-
-read_sub_chunks(Length, MaxChunkSize, Fun, FunState) when Length > MaxChunkSize ->
- Bin = recv(MaxChunkSize),
- NewState = Fun({size(Bin), Bin}, FunState),
- read_sub_chunks(Length - MaxChunkSize, MaxChunkSize, Fun, NewState);
-
-read_sub_chunks(Length, _MaxChunkSize, Fun, FunState) ->
- Fun({Length, read_chunk(Length)}, FunState).
-
-%% @spec serve_file(Path, DocRoot) -> Response
-%% @doc Serve a file relative to DocRoot.
-serve_file(Path, DocRoot) ->
- serve_file(Path, DocRoot, []).
-
-%% @spec serve_file(Path, DocRoot, ExtraHeaders) -> Response
-%% @doc Serve a file relative to DocRoot.
-serve_file(Path, DocRoot, ExtraHeaders) ->
- case mochiweb_util:safe_relative_path(Path) of
- undefined ->
- not_found(ExtraHeaders);
- RelPath ->
- FullPath = filename:join([DocRoot, RelPath]),
- case filelib:is_dir(FullPath) of
- true ->
- maybe_redirect(RelPath, FullPath, ExtraHeaders);
- false ->
- maybe_serve_file(FullPath, ExtraHeaders)
- end
- end.
-
-%% Internal API
-
-%% This has the same effect as the DirectoryIndex directive in httpd
-directory_index(FullPath) ->
- filename:join([FullPath, "index.html"]).
-
-maybe_redirect([], FullPath, ExtraHeaders) ->
- maybe_serve_file(directory_index(FullPath), ExtraHeaders);
-
-maybe_redirect(RelPath, FullPath, ExtraHeaders) ->
- case string:right(RelPath, 1) of
- "/" ->
- maybe_serve_file(directory_index(FullPath), ExtraHeaders);
- _ ->
- Host = mochiweb_headers:get_value("host", Headers),
- Location = "http://" ++ Host ++ "/" ++ RelPath ++ "/",
- LocationBin = list_to_binary(Location),
- MoreHeaders = [{"Location", Location},
- {"Content-Type", "text/html"} | ExtraHeaders],
- Top = <<"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">"
- "<html><head>"
- "<title>301 Moved Permanently</title>"
- "</head><body>"
- "<h1>Moved Permanently</h1>"
- "<p>The document has moved <a href=\"">>,
- Bottom = <<">here</a>.</p></body></html>\n">>,
- Body = <<Top/binary, LocationBin/binary, Bottom/binary>>,
- respond({301, MoreHeaders, Body})
- end.
-
-maybe_serve_file(File, ExtraHeaders) ->
- case file:read_file_info(File) of
- {ok, FileInfo} ->
- LastModified = httpd_util:rfc1123_date(FileInfo#file_info.mtime),
- case get_header_value("if-modified-since") of
- LastModified ->
- respond({304, ExtraHeaders, ""});
- _ ->
- case file:open(File, [raw, binary]) of
- {ok, IoDevice} ->
- ContentType = mochiweb_util:guess_mime(File),
- Res = ok({ContentType,
- [{"last-modified", LastModified}
- | ExtraHeaders],
- {file, IoDevice}}),
- file:close(IoDevice),
- Res;
- _ ->
- not_found(ExtraHeaders)
- end
- end;
- {error, _} ->
- not_found(ExtraHeaders)
- end.
-
-server_headers() ->
- [{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
- {"Date", httpd_util:rfc1123_date()}].
-
-make_code(X) when is_integer(X) ->
- [integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
-make_code(Io) when is_list(Io); is_binary(Io) ->
- Io.
-
-make_version({1, 0}) ->
- <<"HTTP/1.0 ">>;
-make_version(_) ->
- <<"HTTP/1.1 ">>.
-
-range_parts({file, IoDevice}, Ranges) ->
- Size = mochiweb_io:iodevice_size(IoDevice),
- F = fun (Spec, Acc) ->
- case mochiweb_http:range_skip_length(Spec, Size) of
- invalid_range ->
- Acc;
- V ->
- [V | Acc]
- end
- end,
- LocNums = lists:foldr(F, [], Ranges),
- {ok, Data} = file:pread(IoDevice, LocNums),
- Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
- {Skip, Skip + Length - 1, PartialBody}
- end,
- LocNums, Data),
- {Bodies, Size};
-range_parts(Body0, Ranges) ->
- Body = iolist_to_binary(Body0),
- Size = size(Body),
- F = fun(Spec, Acc) ->
- case mochiweb_http:range_skip_length(Spec, Size) of
- invalid_range ->
- Acc;
- {Skip, Length} ->
- <<_:Skip/binary, PartialBody:Length/binary, _/binary>> = Body,
- [{Skip, Skip + Length - 1, PartialBody} | Acc]
- end
- end,
- {lists:foldr(F, [], Ranges), Size}.
-
-%% @spec accepted_encodings([encoding()]) -> [encoding()] | bad_accept_encoding_value
-%% @type encoding() = string().
-%%
-%% @doc Returns a list of encodings accepted by a request. Encodings that are
-%% not supported by the server will not be included in the return list.
-%% This list is computed from the "Accept-Encoding" header and
-%% its elements are ordered, descendingly, according to their Q values.
-%%
-%% Section 14.3 of the RFC 2616 (HTTP 1.1) describes the "Accept-Encoding"
-%% header and the process of determining which server supported encodings
-%% can be used for encoding the body for the request's response.
-%%
-%% Examples
-%%
-%% 1) For a missing "Accept-Encoding" header:
-%% accepted_encodings(["gzip", "identity"]) -> ["identity"]
-%%
-%% 2) For an "Accept-Encoding" header with value "gzip, deflate":
-%% accepted_encodings(["gzip", "identity"]) -> ["gzip", "identity"]
-%%
-%% 3) For an "Accept-Encoding" header with value "gzip;q=0.5, deflate":
-%% accepted_encodings(["gzip", "deflate", "identity"]) ->
-%% ["deflate", "gzip", "identity"]
-%%
-accepted_encodings(SupportedEncodings) ->
- AcceptEncodingHeader = case get_header_value("Accept-Encoding") of
- undefined ->
- "";
- Value ->
- Value
- end,
- case mochiweb_util:parse_qvalues(AcceptEncodingHeader) of
- invalid_qvalue_string ->
- bad_accept_encoding_value;
- QList ->
- mochiweb_util:pick_accepted_encodings(
- QList, SupportedEncodings, "identity"
- )
- end.
-
-%% @spec accepts_content_type(string() | binary()) -> boolean() | bad_accept_header
-%%
-%% @doc Determines whether a request accepts a given media type by analyzing its
-%% "Accept" header.
-%%
-%% Examples
-%%
-%% 1) For a missing "Accept" header:
-%% accepts_content_type("application/json") -> true
-%%
-%% 2) For an "Accept" header with value "text/plain, application/*":
-%% accepts_content_type("application/json") -> true
-%%
-%% 3) For an "Accept" header with value "text/plain, */*; q=0.0":
-%% accepts_content_type("application/json") -> false
-%%
-%% 4) For an "Accept" header with value "text/plain; q=0.5, */*; q=0.1":
-%% accepts_content_type("application/json") -> true
-%%
-%% 5) For an "Accept" header with value "text/*; q=0.0, */*":
-%% accepts_content_type("text/plain") -> false
-%%
-accepts_content_type(ContentType) when is_binary(ContentType) ->
- accepts_content_type(binary_to_list(ContentType));
-accepts_content_type(ContentType1) ->
- ContentType = re:replace(ContentType1, "\\s", "", [global, {return, list}]),
- AcceptHeader = case get_header_value("Accept") of
- undefined ->
- "*/*";
- Value ->
- Value
- end,
- case mochiweb_util:parse_qvalues(AcceptHeader) of
- invalid_qvalue_string ->
- bad_accept_header;
- QList ->
- [MainType, _SubType] = string:tokens(ContentType, "/"),
- SuperType = MainType ++ "/*",
- lists:any(
- fun({"*/*", Q}) when Q > 0.0 ->
- true;
- ({Type, Q}) when Q > 0.0 ->
- Type =:= ContentType orelse Type =:= SuperType;
- (_) ->
- false
- end,
- QList
- ) andalso
- (not lists:member({ContentType, 0.0}, QList)) andalso
- (not lists:member({SuperType, 0.0}, QList))
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_response.erl b/1.1.x/src/mochiweb/mochiweb_response.erl
deleted file mode 100644
index ab8ee61c..00000000
--- a/1.1.x/src/mochiweb/mochiweb_response.erl
+++ /dev/null
@@ -1,64 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Response abstraction.
-
--module(mochiweb_response, [Request, Code, Headers]).
--author('bob@mochimedia.com').
-
--define(QUIP, "Any of you quaids got a smint?").
-
--export([get_header_value/1, get/1, dump/0]).
--export([send/1, write_chunk/1]).
-
-%% @spec get_header_value(string() | atom() | binary()) -> string() | undefined
-%% @doc Get the value of the given response header.
-get_header_value(K) ->
- mochiweb_headers:get_value(K, Headers).
-
-%% @spec get(request | code | headers) -> term()
-%% @doc Return the internal representation of the given field.
-get(request) ->
- Request;
-get(code) ->
- Code;
-get(headers) ->
- Headers.
-
-%% @spec dump() -> {mochiweb_request, [{atom(), term()}]}
-%% @doc Dump the internal representation to a "human readable" set of terms
-%% for debugging/inspection purposes.
-dump() ->
- [{request, Request:dump()},
- {code, Code},
- {headers, mochiweb_headers:to_list(Headers)}].
-
-%% @spec send(iodata()) -> ok
-%% @doc Send data over the socket if the method is not HEAD.
-send(Data) ->
- case Request:get(method) of
- 'HEAD' ->
- ok;
- _ ->
- Request:send(Data)
- end.
-
-%% @spec write_chunk(iodata()) -> ok
-%% @doc Write a chunk of a HTTP chunked response. If Data is zero length,
-%% then the chunked response will be finished.
-write_chunk(Data) ->
- case Request:get(version) of
- Version when Version >= {1, 1} ->
- Length = iolist_size(Data),
- send([io_lib:format("~.16b\r\n", [Length]), Data, <<"\r\n">>]);
- _ ->
- send(Data)
- end.
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_skel.erl b/1.1.x/src/mochiweb/mochiweb_skel.erl
deleted file mode 100644
index 76eefa60..00000000
--- a/1.1.x/src/mochiweb/mochiweb_skel.erl
+++ /dev/null
@@ -1,86 +0,0 @@
--module(mochiweb_skel).
--export([skelcopy/2]).
-
--include_lib("kernel/include/file.hrl").
-
-%% External API
-
-skelcopy(DestDir, Name) ->
- ok = ensuredir(DestDir),
- LDst = case length(filename:dirname(DestDir)) of
- 1 -> %% handle case when dirname returns "/"
- 0;
- N ->
- N + 1
- end,
- skelcopy(src(), DestDir, Name, LDst),
- DestLink = filename:join([DestDir, Name, "deps", "mochiweb-src"]),
- ok = filelib:ensure_dir(DestLink),
- ok = file:make_symlink(
- filename:join(filename:dirname(code:which(?MODULE)), ".."),
- DestLink).
-
-%% Internal API
-
-src() ->
- Dir = filename:dirname(code:which(?MODULE)),
- filename:join(Dir, "../priv/skel").
-
-skel() ->
- "skel".
-
-skelcopy(Src, DestDir, Name, LDst) ->
- Dest = re:replace(filename:basename(Src), skel(), Name,
- [global, {return, list}]),
- case file:read_file_info(Src) of
- {ok, #file_info{type=directory, mode=Mode}} ->
- Dir = DestDir ++ "/" ++ Dest,
- EDst = lists:nthtail(LDst, Dir),
- ok = ensuredir(Dir),
- ok = file:write_file_info(Dir, #file_info{mode=Mode}),
- case filename:basename(Src) of
- "ebin" ->
- ok;
- _ ->
- {ok, Files} = file:list_dir(Src),
- io:format("~s/~n", [EDst]),
- lists:foreach(fun ("." ++ _) -> ok;
- (F) ->
- skelcopy(filename:join(Src, F),
- Dir,
- Name,
- LDst)
- end,
- Files),
- ok
- end;
- {ok, #file_info{type=regular, mode=Mode}} ->
- OutFile = filename:join(DestDir, Dest),
- {ok, B} = file:read_file(Src),
- S = re:replace(binary_to_list(B), skel(), Name,
- [{return, list}, global]),
- ok = file:write_file(OutFile, list_to_binary(S)),
- ok = file:write_file_info(OutFile, #file_info{mode=Mode}),
- io:format(" ~s~n", [filename:basename(Src)]),
- ok;
- {ok, _} ->
- io:format("ignored source file: ~p~n", [Src]),
- ok
- end.
-
-ensuredir(Dir) ->
- case file:make_dir(Dir) of
- ok ->
- ok;
- {error, eexist} ->
- ok;
- E ->
- E
- end.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_socket.erl b/1.1.x/src/mochiweb/mochiweb_socket.erl
deleted file mode 100644
index 76b018c8..00000000
--- a/1.1.x/src/mochiweb/mochiweb_socket.erl
+++ /dev/null
@@ -1,84 +0,0 @@
-%% @copyright 2010 Mochi Media, Inc.
-
-%% @doc MochiWeb socket - wrapper for plain and ssl sockets.
-
--module(mochiweb_socket).
-
--export([listen/4, accept/1, recv/3, send/2, close/1, port/1, peername/1,
- setopts/2, type/1]).
-
--define(ACCEPT_TIMEOUT, 2000).
-
-listen(Ssl, Port, Opts, SslOpts) ->
- case Ssl of
- true ->
- case ssl:listen(Port, Opts ++ SslOpts) of
- {ok, ListenSocket} ->
- {ok, {ssl, ListenSocket}};
- {error, _} = Err ->
- Err
- end;
- false ->
- gen_tcp:listen(Port, Opts)
- end.
-
-accept({ssl, ListenSocket}) ->
- % There's a bug in ssl:transport_accept/2 at the moment, which is the
- % reason for the try...catch block. Should be fixed in OTP R14.
- try ssl:transport_accept(ListenSocket) of
- {ok, Socket} ->
- case ssl:ssl_accept(Socket) of
- ok ->
- {ok, {ssl, Socket}};
- {error, _} = Err ->
- Err
- end;
- {error, _} = Err ->
- Err
- catch
- error:{badmatch, {error, Reason}} ->
- {error, Reason}
- end;
-accept(ListenSocket) ->
- gen_tcp:accept(ListenSocket, ?ACCEPT_TIMEOUT).
-
-recv({ssl, Socket}, Length, Timeout) ->
- ssl:recv(Socket, Length, Timeout);
-recv(Socket, Length, Timeout) ->
- gen_tcp:recv(Socket, Length, Timeout).
-
-send({ssl, Socket}, Data) ->
- ssl:send(Socket, Data);
-send(Socket, Data) ->
- gen_tcp:send(Socket, Data).
-
-close({ssl, Socket}) ->
- ssl:close(Socket);
-close(Socket) ->
- gen_tcp:close(Socket).
-
-port({ssl, Socket}) ->
- case ssl:sockname(Socket) of
- {ok, {_, Port}} ->
- {ok, Port};
- {error, _} = Err ->
- Err
- end;
-port(Socket) ->
- inet:port(Socket).
-
-peername({ssl, Socket}) ->
- ssl:peername(Socket);
-peername(Socket) ->
- inet:peername(Socket).
-
-setopts({ssl, Socket}, Opts) ->
- ssl:setopts(Socket, Opts);
-setopts(Socket, Opts) ->
- inet:setopts(Socket, Opts).
-
-type({ssl, _}) ->
- ssl;
-type(_) ->
- plain.
-
diff --git a/1.1.x/src/mochiweb/mochiweb_socket_server.erl b/1.1.x/src/mochiweb/mochiweb_socket_server.erl
deleted file mode 100644
index 1aae09ac..00000000
--- a/1.1.x/src/mochiweb/mochiweb_socket_server.erl
+++ /dev/null
@@ -1,272 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc MochiWeb socket server.
-
--module(mochiweb_socket_server).
--author('bob@mochimedia.com').
--behaviour(gen_server).
-
--include("internal.hrl").
-
--export([start/1, stop/1]).
--export([init/1, handle_call/3, handle_cast/2, terminate/2, code_change/3,
- handle_info/2]).
--export([get/2]).
-
--record(mochiweb_socket_server,
- {port,
- loop,
- name=undefined,
- %% NOTE: This is currently ignored.
- max=2048,
- ip=any,
- listen=null,
- nodelay=false,
- backlog=128,
- active_sockets=0,
- acceptor_pool_size=16,
- ssl=false,
- ssl_opts=[{ssl_imp, new}],
- acceptor_pool=sets:new()}).
-
-start(State=#mochiweb_socket_server{}) ->
- start_server(State);
-start(Options) ->
- start(parse_options(Options)).
-
-get(Name, Property) ->
- gen_server:call(Name, {get, Property}).
-
-stop(Name) when is_atom(Name) ->
- gen_server:cast(Name, stop);
-stop(Pid) when is_pid(Pid) ->
- gen_server:cast(Pid, stop);
-stop({local, Name}) ->
- stop(Name);
-stop({global, Name}) ->
- stop(Name);
-stop(Options) ->
- State = parse_options(Options),
- stop(State#mochiweb_socket_server.name).
-
-%% Internal API
-
-parse_options(Options) ->
- parse_options(Options, #mochiweb_socket_server{}).
-
-parse_options([], State) ->
- State;
-parse_options([{name, L} | Rest], State) when is_list(L) ->
- Name = {local, list_to_atom(L)},
- parse_options(Rest, State#mochiweb_socket_server{name=Name});
-parse_options([{name, A} | Rest], State) when A =:= undefined ->
- parse_options(Rest, State#mochiweb_socket_server{name=A});
-parse_options([{name, A} | Rest], State) when is_atom(A) ->
- Name = {local, A},
- parse_options(Rest, State#mochiweb_socket_server{name=Name});
-parse_options([{name, Name} | Rest], State) ->
- parse_options(Rest, State#mochiweb_socket_server{name=Name});
-parse_options([{port, L} | Rest], State) when is_list(L) ->
- Port = list_to_integer(L),
- parse_options(Rest, State#mochiweb_socket_server{port=Port});
-parse_options([{port, Port} | Rest], State) ->
- parse_options(Rest, State#mochiweb_socket_server{port=Port});
-parse_options([{ip, Ip} | Rest], State) ->
- ParsedIp = case Ip of
- any ->
- any;
- Ip when is_tuple(Ip) ->
- Ip;
- Ip when is_list(Ip) ->
- {ok, IpTuple} = inet_parse:address(Ip),
- IpTuple
- end,
- parse_options(Rest, State#mochiweb_socket_server{ip=ParsedIp});
-parse_options([{loop, Loop} | Rest], State) ->
- parse_options(Rest, State#mochiweb_socket_server{loop=Loop});
-parse_options([{backlog, Backlog} | Rest], State) ->
- parse_options(Rest, State#mochiweb_socket_server{backlog=Backlog});
-parse_options([{nodelay, NoDelay} | Rest], State) ->
- parse_options(Rest, State#mochiweb_socket_server{nodelay=NoDelay});
-parse_options([{acceptor_pool_size, Max} | Rest], State) ->
- MaxInt = ensure_int(Max),
- parse_options(Rest,
- State#mochiweb_socket_server{acceptor_pool_size=MaxInt});
-parse_options([{max, Max} | Rest], State) ->
- error_logger:info_report([{warning, "TODO: max is currently unsupported"},
- {max, Max}]),
- MaxInt = ensure_int(Max),
- parse_options(Rest, State#mochiweb_socket_server{max=MaxInt});
-parse_options([{ssl, Ssl} | Rest], State) when is_boolean(Ssl) ->
- parse_options(Rest, State#mochiweb_socket_server{ssl=Ssl});
-parse_options([{ssl_opts, SslOpts} | Rest], State) when is_list(SslOpts) ->
- SslOpts1 = [{ssl_imp, new} | proplists:delete(ssl_imp, SslOpts)],
- parse_options(Rest, State#mochiweb_socket_server{ssl_opts=SslOpts1}).
-
-start_server(State=#mochiweb_socket_server{ssl=Ssl, name=Name}) ->
- case Ssl of
- true ->
- application:start(crypto),
- application:start(public_key),
- application:start(ssl);
- false ->
- void
- end,
- case Name of
- undefined ->
- gen_server:start_link(?MODULE, State, []);
- _ ->
- gen_server:start_link(Name, ?MODULE, State, [])
- end.
-
-ensure_int(N) when is_integer(N) ->
- N;
-ensure_int(S) when is_list(S) ->
- integer_to_list(S).
-
-ipv6_supported() ->
- case (catch inet:getaddr("localhost", inet6)) of
- {ok, _Addr} ->
- true;
- {error, _} ->
- false
- end.
-
-init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=NoDelay}) ->
- process_flag(trap_exit, true),
- BaseOpts = [binary,
- {reuseaddr, true},
- {packet, 0},
- {backlog, Backlog},
- {recbuf, ?RECBUF_SIZE},
- {active, false},
- {nodelay, NoDelay}],
- Opts = case Ip of
- any ->
- case ipv6_supported() of % IPv4, and IPv6 if supported
- true -> [inet, inet6 | BaseOpts];
- _ -> BaseOpts
- end;
- {_, _, _, _} -> % IPv4
- [inet, {ip, Ip} | BaseOpts];
- {_, _, _, _, _, _, _, _} -> % IPv6
- [inet6, {ip, Ip} | BaseOpts]
- end,
- case listen(Port, Opts, State) of
- {stop, eacces} ->
- case Port < 1024 of
- true ->
- case fdsrv:start() of
- {ok, _} ->
- case fdsrv:bind_socket(tcp, Port) of
- {ok, Fd} ->
- listen(Port, [{fd, Fd} | Opts], State);
- _ ->
- {stop, fdsrv_bind_failed}
- end;
- _ ->
- {stop, fdsrv_start_failed}
- end;
- false ->
- {stop, eacces}
- end;
- Other ->
- Other
- end.
-
-new_acceptor_pool(Listen,
- State=#mochiweb_socket_server{acceptor_pool=Pool,
- acceptor_pool_size=Size,
- loop=Loop}) ->
- F = fun (_, S) ->
- Pid = mochiweb_acceptor:start_link(self(), Listen, Loop),
- sets:add_element(Pid, S)
- end,
- Pool1 = lists:foldl(F, Pool, lists:seq(1, Size)),
- State#mochiweb_socket_server{acceptor_pool=Pool1}.
-
-listen(Port, Opts, State=#mochiweb_socket_server{ssl=Ssl, ssl_opts=SslOpts}) ->
- case mochiweb_socket:listen(Ssl, Port, Opts, SslOpts) of
- {ok, Listen} ->
- {ok, ListenPort} = mochiweb_socket:port(Listen),
- {ok, new_acceptor_pool(
- Listen,
- State#mochiweb_socket_server{listen=Listen,
- port=ListenPort})};
- {error, Reason} ->
- {stop, Reason}
- end.
-
-do_get(port, #mochiweb_socket_server{port=Port}) ->
- Port;
-do_get(active_sockets, #mochiweb_socket_server{active_sockets=ActiveSockets}) ->
- ActiveSockets.
-
-handle_call({get, Property}, _From, State) ->
- Res = do_get(Property, State),
- {reply, Res, State};
-handle_call(_Message, _From, State) ->
- Res = error,
- {reply, Res, State}.
-
-handle_cast({accepted, Pid, _Timing},
- State=#mochiweb_socket_server{active_sockets=ActiveSockets}) ->
- State1 = State#mochiweb_socket_server{active_sockets=1 + ActiveSockets},
- {noreply, recycle_acceptor(Pid, State1)};
-handle_cast(stop, State) ->
- {stop, normal, State}.
-
-terminate(_Reason, #mochiweb_socket_server{listen=Listen, port=Port}) ->
- mochiweb_socket:close(Listen),
- case Port < 1024 of
- true ->
- catch fdsrv:stop(),
- ok;
- false ->
- ok
- end.
-
-code_change(_OldVsn, State, _Extra) ->
- State.
-
-recycle_acceptor(Pid, State=#mochiweb_socket_server{
- acceptor_pool=Pool,
- listen=Listen,
- loop=Loop,
- active_sockets=ActiveSockets}) ->
- case sets:is_element(Pid, Pool) of
- true ->
- Acceptor = mochiweb_acceptor:start_link(self(), Listen, Loop),
- Pool1 = sets:add_element(Acceptor, sets:del_element(Pid, Pool)),
- State#mochiweb_socket_server{acceptor_pool=Pool1};
- false ->
- State#mochiweb_socket_server{active_sockets=ActiveSockets - 1}
- end.
-
-handle_info({'EXIT', Pid, normal}, State) ->
- {noreply, recycle_acceptor(Pid, State)};
-handle_info({'EXIT', Pid, Reason},
- State=#mochiweb_socket_server{acceptor_pool=Pool}) ->
- case sets:is_element(Pid, Pool) of
- true ->
- %% If there was an unexpected error accepting, log and sleep.
- error_logger:error_report({?MODULE, ?LINE,
- {acceptor_error, Reason}}),
- timer:sleep(100);
- false ->
- ok
- end,
- {noreply, recycle_acceptor(Pid, State)};
-handle_info(Info, State) ->
- error_logger:info_report([{'INFO', Info}, {'State', State}]),
- {noreply, State}.
-
-
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_sup.erl b/1.1.x/src/mochiweb/mochiweb_sup.erl
deleted file mode 100644
index af7df9b3..00000000
--- a/1.1.x/src/mochiweb/mochiweb_sup.erl
+++ /dev/null
@@ -1,41 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Supervisor for the mochiweb application.
-
--module(mochiweb_sup).
--author('bob@mochimedia.com').
-
--behaviour(supervisor).
-
-%% External exports
--export([start_link/0, upgrade/0]).
-
-%% supervisor callbacks
--export([init/1]).
-
-%% @spec start_link() -> ServerRet
-%% @doc API for starting the supervisor.
-start_link() ->
- supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-
-%% @spec upgrade() -> ok
-%% @doc Add processes if necessary.
-upgrade() ->
- {ok, {_, Specs}} = init([]),
- [supervisor:start_child(?MODULE, Spec) || Spec <- Specs],
- ok.
-
-%% @spec init([]) -> SupervisorTree
-%% @doc supervisor callback, ensures yaws is in embedded mode and then
-%% returns the supervisor tree.
-init([]) ->
- Processes = [],
- {ok, {{one_for_one, 10, 10}, Processes}}.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.
diff --git a/1.1.x/src/mochiweb/mochiweb_util.erl b/1.1.x/src/mochiweb/mochiweb_util.erl
deleted file mode 100644
index 62ff0d06..00000000
--- a/1.1.x/src/mochiweb/mochiweb_util.erl
+++ /dev/null
@@ -1,973 +0,0 @@
-%% @author Bob Ippolito <bob@mochimedia.com>
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Utilities for parsing and quoting.
-
--module(mochiweb_util).
--author('bob@mochimedia.com').
--export([join/2, quote_plus/1, urlencode/1, parse_qs/1, unquote/1]).
--export([path_split/1]).
--export([urlsplit/1, urlsplit_path/1, urlunsplit/1, urlunsplit_path/1]).
--export([guess_mime/1, parse_header/1]).
--export([shell_quote/1, cmd/1, cmd_string/1, cmd_port/2, cmd_status/1]).
--export([record_to_proplist/2, record_to_proplist/3]).
--export([safe_relative_path/1, partition/2]).
--export([parse_qvalues/1, pick_accepted_encodings/3]).
--export([make_io/1]).
-
--define(PERCENT, 37). % $\%
--define(FULLSTOP, 46). % $\.
--define(IS_HEX(C), ((C >= $0 andalso C =< $9) orelse
- (C >= $a andalso C =< $f) orelse
- (C >= $A andalso C =< $F))).
--define(QS_SAFE(C), ((C >= $a andalso C =< $z) orelse
- (C >= $A andalso C =< $Z) orelse
- (C >= $0 andalso C =< $9) orelse
- (C =:= ?FULLSTOP orelse C =:= $- orelse C =:= $~ orelse
- C =:= $_))).
-
-hexdigit(C) when C < 10 -> $0 + C;
-hexdigit(C) when C < 16 -> $A + (C - 10).
-
-unhexdigit(C) when C >= $0, C =< $9 -> C - $0;
-unhexdigit(C) when C >= $a, C =< $f -> C - $a + 10;
-unhexdigit(C) when C >= $A, C =< $F -> C - $A + 10.
-
-%% @spec partition(String, Sep) -> {String, [], []} | {Prefix, Sep, Postfix}
-%% @doc Inspired by Python 2.5's str.partition:
-%% partition("foo/bar", "/") = {"foo", "/", "bar"},
-%% partition("foo", "/") = {"foo", "", ""}.
-partition(String, Sep) ->
- case partition(String, Sep, []) of
- undefined ->
- {String, "", ""};
- Result ->
- Result
- end.
-
-partition("", _Sep, _Acc) ->
- undefined;
-partition(S, Sep, Acc) ->
- case partition2(S, Sep) of
- undefined ->
- [C | Rest] = S,
- partition(Rest, Sep, [C | Acc]);
- Rest ->
- {lists:reverse(Acc), Sep, Rest}
- end.
-
-partition2(Rest, "") ->
- Rest;
-partition2([C | R1], [C | R2]) ->
- partition2(R1, R2);
-partition2(_S, _Sep) ->
- undefined.
-
-
-
-%% @spec safe_relative_path(string()) -> string() | undefined
-%% @doc Return the reduced version of a relative path or undefined if it
-%% is not safe. safe relative paths can be joined with an absolute path
-%% and will result in a subdirectory of the absolute path.
-safe_relative_path("/" ++ _) ->
- undefined;
-safe_relative_path(P) ->
- safe_relative_path(P, []).
-
-safe_relative_path("", Acc) ->
- case Acc of
- [] ->
- "";
- _ ->
- string:join(lists:reverse(Acc), "/")
- end;
-safe_relative_path(P, Acc) ->
- case partition(P, "/") of
- {"", "/", _} ->
- %% /foo or foo//bar
- undefined;
- {"..", _, _} when Acc =:= [] ->
- undefined;
- {"..", _, Rest} ->
- safe_relative_path(Rest, tl(Acc));
- {Part, "/", ""} ->
- safe_relative_path("", ["", Part | Acc]);
- {Part, _, Rest} ->
- safe_relative_path(Rest, [Part | Acc])
- end.
-
-%% @spec shell_quote(string()) -> string()
-%% @doc Quote a string according to UNIX shell quoting rules, returns a string
-%% surrounded by double quotes.
-shell_quote(L) ->
- shell_quote(L, [$\"]).
-
-%% @spec cmd_port([string()], Options) -> port()
-%% @doc open_port({spawn, mochiweb_util:cmd_string(Argv)}, Options).
-cmd_port(Argv, Options) ->
- open_port({spawn, cmd_string(Argv)}, Options).
-
-%% @spec cmd([string()]) -> string()
-%% @doc os:cmd(cmd_string(Argv)).
-cmd(Argv) ->
- os:cmd(cmd_string(Argv)).
-
-%% @spec cmd_string([string()]) -> string()
-%% @doc Create a shell quoted command string from a list of arguments.
-cmd_string(Argv) ->
- string:join([shell_quote(X) || X <- Argv], " ").
-
-%% @spec cmd_status([string()]) -> {ExitStatus::integer(), Stdout::binary()}
-%% @doc Accumulate the output and exit status from the given application, will be
-%% spawned with cmd_port/2.
-cmd_status(Argv) ->
- Port = cmd_port(Argv, [exit_status, stderr_to_stdout,
- use_stdio, binary]),
- try cmd_loop(Port, [])
- after catch port_close(Port)
- end.
-
-%% @spec cmd_loop(port(), list()) -> {ExitStatus::integer(), Stdout::binary()}
-%% @doc Accumulate the output and exit status from a port.
-cmd_loop(Port, Acc) ->
- receive
- {Port, {exit_status, Status}} ->
- {Status, iolist_to_binary(lists:reverse(Acc))};
- {Port, {data, Data}} ->
- cmd_loop(Port, [Data | Acc])
- end.
-
-%% @spec join([iolist()], iolist()) -> iolist()
-%% @doc Join a list of strings or binaries together with the given separator
-%% string or char or binary. The output is flattened, but may be an
-%% iolist() instead of a string() if any of the inputs are binary().
-join([], _Separator) ->
- [];
-join([S], _Separator) ->
- lists:flatten(S);
-join(Strings, Separator) ->
- lists:flatten(revjoin(lists:reverse(Strings), Separator, [])).
-
-revjoin([], _Separator, Acc) ->
- Acc;
-revjoin([S | Rest], Separator, []) ->
- revjoin(Rest, Separator, [S]);
-revjoin([S | Rest], Separator, Acc) ->
- revjoin(Rest, Separator, [S, Separator | Acc]).
-
-%% @spec quote_plus(atom() | integer() | float() | string() | binary()) -> string()
-%% @doc URL safe encoding of the given term.
-quote_plus(Atom) when is_atom(Atom) ->
- quote_plus(atom_to_list(Atom));
-quote_plus(Int) when is_integer(Int) ->
- quote_plus(integer_to_list(Int));
-quote_plus(Binary) when is_binary(Binary) ->
- quote_plus(binary_to_list(Binary));
-quote_plus(Float) when is_float(Float) ->
- quote_plus(mochinum:digits(Float));
-quote_plus(String) ->
- quote_plus(String, []).
-
-quote_plus([], Acc) ->
- lists:reverse(Acc);
-quote_plus([C | Rest], Acc) when ?QS_SAFE(C) ->
- quote_plus(Rest, [C | Acc]);
-quote_plus([$\s | Rest], Acc) ->
- quote_plus(Rest, [$+ | Acc]);
-quote_plus([C | Rest], Acc) ->
- <<Hi:4, Lo:4>> = <<C>>,
- quote_plus(Rest, [hexdigit(Lo), hexdigit(Hi), ?PERCENT | Acc]).
-
-%% @spec urlencode([{Key, Value}]) -> string()
-%% @doc URL encode the property list.
-urlencode(Props) ->
- Pairs = lists:foldr(
- fun ({K, V}, Acc) ->
- [quote_plus(K) ++ "=" ++ quote_plus(V) | Acc]
- end, [], Props),
- string:join(Pairs, "&").
-
-%% @spec parse_qs(string() | binary()) -> [{Key, Value}]
-%% @doc Parse a query string or application/x-www-form-urlencoded.
-parse_qs(Binary) when is_binary(Binary) ->
- parse_qs(binary_to_list(Binary));
-parse_qs(String) ->
- parse_qs(String, []).
-
-parse_qs([], Acc) ->
- lists:reverse(Acc);
-parse_qs(String, Acc) ->
- {Key, Rest} = parse_qs_key(String),
- {Value, Rest1} = parse_qs_value(Rest),
- parse_qs(Rest1, [{Key, Value} | Acc]).
-
-parse_qs_key(String) ->
- parse_qs_key(String, []).
-
-parse_qs_key([], Acc) ->
- {qs_revdecode(Acc), ""};
-parse_qs_key([$= | Rest], Acc) ->
- {qs_revdecode(Acc), Rest};
-parse_qs_key(Rest=[$; | _], Acc) ->
- {qs_revdecode(Acc), Rest};
-parse_qs_key(Rest=[$& | _], Acc) ->
- {qs_revdecode(Acc), Rest};
-parse_qs_key([C | Rest], Acc) ->
- parse_qs_key(Rest, [C | Acc]).
-
-parse_qs_value(String) ->
- parse_qs_value(String, []).
-
-parse_qs_value([], Acc) ->
- {qs_revdecode(Acc), ""};
-parse_qs_value([$; | Rest], Acc) ->
- {qs_revdecode(Acc), Rest};
-parse_qs_value([$& | Rest], Acc) ->
- {qs_revdecode(Acc), Rest};
-parse_qs_value([C | Rest], Acc) ->
- parse_qs_value(Rest, [C | Acc]).
-
-%% @spec unquote(string() | binary()) -> string()
-%% @doc Unquote a URL encoded string.
-unquote(Binary) when is_binary(Binary) ->
- unquote(binary_to_list(Binary));
-unquote(String) ->
- qs_revdecode(lists:reverse(String)).
-
-qs_revdecode(S) ->
- qs_revdecode(S, []).
-
-qs_revdecode([], Acc) ->
- Acc;
-qs_revdecode([$+ | Rest], Acc) ->
- qs_revdecode(Rest, [$\s | Acc]);
-qs_revdecode([Lo, Hi, ?PERCENT | Rest], Acc) when ?IS_HEX(Lo), ?IS_HEX(Hi) ->
- qs_revdecode(Rest, [(unhexdigit(Lo) bor (unhexdigit(Hi) bsl 4)) | Acc]);
-qs_revdecode([C | Rest], Acc) ->
- qs_revdecode(Rest, [C | Acc]).
-
-%% @spec urlsplit(Url) -> {Scheme, Netloc, Path, Query, Fragment}
-%% @doc Return a 5-tuple, does not expand % escapes. Only supports HTTP style
-%% URLs.
-urlsplit(Url) ->
- {Scheme, Url1} = urlsplit_scheme(Url),
- {Netloc, Url2} = urlsplit_netloc(Url1),
- {Path, Query, Fragment} = urlsplit_path(Url2),
- {Scheme, Netloc, Path, Query, Fragment}.
-
-urlsplit_scheme(Url) ->
- case urlsplit_scheme(Url, []) of
- no_scheme ->
- {"", Url};
- Res ->
- Res
- end.
-
-urlsplit_scheme([C | Rest], Acc) when ((C >= $a andalso C =< $z) orelse
- (C >= $A andalso C =< $Z) orelse
- (C >= $0 andalso C =< $9) orelse
- C =:= $+ orelse C =:= $- orelse
- C =:= $.) ->
- urlsplit_scheme(Rest, [C | Acc]);
-urlsplit_scheme([$: | Rest], Acc=[_ | _]) ->
- {string:to_lower(lists:reverse(Acc)), Rest};
-urlsplit_scheme(_Rest, _Acc) ->
- no_scheme.
-
-urlsplit_netloc("//" ++ Rest) ->
- urlsplit_netloc(Rest, []);
-urlsplit_netloc(Path) ->
- {"", Path}.
-
-urlsplit_netloc("", Acc) ->
- {lists:reverse(Acc), ""};
-urlsplit_netloc(Rest=[C | _], Acc) when C =:= $/; C =:= $?; C =:= $# ->
- {lists:reverse(Acc), Rest};
-urlsplit_netloc([C | Rest], Acc) ->
- urlsplit_netloc(Rest, [C | Acc]).
-
-
-%% @spec path_split(string()) -> {Part, Rest}
-%% @doc Split a path starting from the left, as in URL traversal.
-%% path_split("foo/bar") = {"foo", "bar"},
-%% path_split("/foo/bar") = {"", "foo/bar"}.
-path_split(S) ->
- path_split(S, []).
-
-path_split("", Acc) ->
- {lists:reverse(Acc), ""};
-path_split("/" ++ Rest, Acc) ->
- {lists:reverse(Acc), Rest};
-path_split([C | Rest], Acc) ->
- path_split(Rest, [C | Acc]).
-
-
-%% @spec urlunsplit({Scheme, Netloc, Path, Query, Fragment}) -> string()
-%% @doc Assemble a URL from the 5-tuple. Path must be absolute.
-urlunsplit({Scheme, Netloc, Path, Query, Fragment}) ->
- lists:flatten([case Scheme of "" -> ""; _ -> [Scheme, "://"] end,
- Netloc,
- urlunsplit_path({Path, Query, Fragment})]).
-
-%% @spec urlunsplit_path({Path, Query, Fragment}) -> string()
-%% @doc Assemble a URL path from the 3-tuple.
-urlunsplit_path({Path, Query, Fragment}) ->
- lists:flatten([Path,
- case Query of "" -> ""; _ -> [$? | Query] end,
- case Fragment of "" -> ""; _ -> [$# | Fragment] end]).
-
-%% @spec urlsplit_path(Url) -> {Path, Query, Fragment}
-%% @doc Return a 3-tuple, does not expand % escapes. Only supports HTTP style
-%% paths.
-urlsplit_path(Path) ->
- urlsplit_path(Path, []).
-
-urlsplit_path("", Acc) ->
- {lists:reverse(Acc), "", ""};
-urlsplit_path("?" ++ Rest, Acc) ->
- {Query, Fragment} = urlsplit_query(Rest),
- {lists:reverse(Acc), Query, Fragment};
-urlsplit_path("#" ++ Rest, Acc) ->
- {lists:reverse(Acc), "", Rest};
-urlsplit_path([C | Rest], Acc) ->
- urlsplit_path(Rest, [C | Acc]).
-
-urlsplit_query(Query) ->
- urlsplit_query(Query, []).
-
-urlsplit_query("", Acc) ->
- {lists:reverse(Acc), ""};
-urlsplit_query("#" ++ Rest, Acc) ->
- {lists:reverse(Acc), Rest};
-urlsplit_query([C | Rest], Acc) ->
- urlsplit_query(Rest, [C | Acc]).
-
-%% @spec guess_mime(string()) -> string()
-%% @doc Guess the mime type of a file by the extension of its filename.
-guess_mime(File) ->
- case mochiweb_mime:from_extension(filename:extension(File)) of
- undefined ->
- "text/plain";
- Mime ->
- Mime
- end.
-
-%% @spec parse_header(string()) -> {Type, [{K, V}]}
-%% @doc Parse a Content-Type like header, return the main Content-Type
-%% and a property list of options.
-parse_header(String) ->
- %% TODO: This is exactly as broken as Python's cgi module.
- %% Should parse properly like mochiweb_cookies.
- [Type | Parts] = [string:strip(S) || S <- string:tokens(String, ";")],
- F = fun (S, Acc) ->
- case lists:splitwith(fun (C) -> C =/= $= end, S) of
- {"", _} ->
- %% Skip anything with no name
- Acc;
- {_, ""} ->
- %% Skip anything with no value
- Acc;
- {Name, [$\= | Value]} ->
- [{string:to_lower(string:strip(Name)),
- unquote_header(string:strip(Value))} | Acc]
- end
- end,
- {string:to_lower(Type),
- lists:foldr(F, [], Parts)}.
-
-unquote_header("\"" ++ Rest) ->
- unquote_header(Rest, []);
-unquote_header(S) ->
- S.
-
-unquote_header("", Acc) ->
- lists:reverse(Acc);
-unquote_header("\"", Acc) ->
- lists:reverse(Acc);
-unquote_header([$\\, C | Rest], Acc) ->
- unquote_header(Rest, [C | Acc]);
-unquote_header([C | Rest], Acc) ->
- unquote_header(Rest, [C | Acc]).
-
-%% @spec record_to_proplist(Record, Fields) -> proplist()
-%% @doc calls record_to_proplist/3 with a default TypeKey of '__record'
-record_to_proplist(Record, Fields) ->
- record_to_proplist(Record, Fields, '__record').
-
-%% @spec record_to_proplist(Record, Fields, TypeKey) -> proplist()
-%% @doc Return a proplist of the given Record with each field in the
-%% Fields list set as a key with the corresponding value in the Record.
-%% TypeKey is the key that is used to store the record type
-%% Fields should be obtained by calling record_info(fields, record_type)
-%% where record_type is the record type of Record
-record_to_proplist(Record, Fields, TypeKey)
- when tuple_size(Record) - 1 =:= length(Fields) ->
- lists:zip([TypeKey | Fields], tuple_to_list(Record)).
-
-
-shell_quote([], Acc) ->
- lists:reverse([$\" | Acc]);
-shell_quote([C | Rest], Acc) when C =:= $\" orelse C =:= $\` orelse
- C =:= $\\ orelse C =:= $\$ ->
- shell_quote(Rest, [C, $\\ | Acc]);
-shell_quote([C | Rest], Acc) ->
- shell_quote(Rest, [C | Acc]).
-
-%% @spec parse_qvalues(string()) -> [qvalue()] | invalid_qvalue_string
-%% @type qvalue() = {media_type() | encoding() , float()}.
-%% @type media_type() = string().
-%% @type encoding() = string().
-%%
-%% @doc Parses a list (given as a string) of elements with Q values associated
-%% to them. Elements are separated by commas and each element is separated
-%% from its Q value by a semicolon. Q values are optional but when missing
-%% the value of an element is considered as 1.0. A Q value is always in the
-%% range [0.0, 1.0]. A Q value list is used for example as the value of the
-%% HTTP "Accept" and "Accept-Encoding" headers.
-%%
-%% Q values are described in section 2.9 of the RFC 2616 (HTTP 1.1).
-%%
-%% Example:
-%%
-%% parse_qvalues("gzip; q=0.5, deflate, identity;q=0.0") ->
-%% [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 0.0}]
-%%
-parse_qvalues(QValuesStr) ->
- try
- lists:map(
- fun(Pair) ->
- [Type | Params] = string:tokens(Pair, ";"),
- NormParams = normalize_media_params(Params),
- {Q, NonQParams} = extract_q(NormParams),
- {string:join([string:strip(Type) | NonQParams], ";"), Q}
- end,
- string:tokens(string:to_lower(QValuesStr), ",")
- )
- catch
- _Type:_Error ->
- invalid_qvalue_string
- end.
-
-normalize_media_params(Params) ->
- {ok, Re} = re:compile("\\s"),
- normalize_media_params(Re, Params, []).
-
-normalize_media_params(_Re, [], Acc) ->
- lists:reverse(Acc);
-normalize_media_params(Re, [Param | Rest], Acc) ->
- NormParam = re:replace(Param, Re, "", [global, {return, list}]),
- normalize_media_params(Re, Rest, [NormParam | Acc]).
-
-extract_q(NormParams) ->
- {ok, KVRe} = re:compile("^([^=]+)=([^=]+)$"),
- {ok, QRe} = re:compile("^((?:0|1)(?:\\.\\d{1,3})?)$"),
- extract_q(KVRe, QRe, NormParams, []).
-
-extract_q(_KVRe, _QRe, [], Acc) ->
- {1.0, lists:reverse(Acc)};
-extract_q(KVRe, QRe, [Param | Rest], Acc) ->
- case re:run(Param, KVRe, [{capture, [1, 2], list}]) of
- {match, [Name, Value]} ->
- case Name of
- "q" ->
- {match, [Q]} = re:run(Value, QRe, [{capture, [1], list}]),
- QVal = case Q of
- "0" ->
- 0.0;
- "1" ->
- 1.0;
- Else ->
- list_to_float(Else)
- end,
- case QVal < 0.0 orelse QVal > 1.0 of
- false ->
- {QVal, lists:reverse(Acc) ++ Rest}
- end;
- _ ->
- extract_q(KVRe, QRe, Rest, [Param | Acc])
- end
- end.
-
-%% @spec pick_accepted_encodings([qvalue()], [encoding()], encoding()) ->
-%% [encoding()]
-%%
-%% @doc Determines which encodings specified in the given Q values list are
-%% valid according to a list of supported encodings and a default encoding.
-%%
-%% The returned list of encodings is sorted, descendingly, according to the
-%% Q values of the given list. The last element of this list is the given
-%% default encoding unless this encoding is explicitily or implicitily
-%% marked with a Q value of 0.0 in the given Q values list.
-%% Note: encodings with the same Q value are kept in the same order as
-%% found in the input Q values list.
-%%
-%% This encoding picking process is described in section 14.3 of the
-%% RFC 2616 (HTTP 1.1).
-%%
-%% Example:
-%%
-%% pick_accepted_encodings(
-%% [{"gzip", 0.5}, {"deflate", 1.0}],
-%% ["gzip", "identity"],
-%% "identity"
-%% ) ->
-%% ["gzip", "identity"]
-%%
-pick_accepted_encodings(AcceptedEncs, SupportedEncs, DefaultEnc) ->
- SortedQList = lists:reverse(
- lists:sort(fun({_, Q1}, {_, Q2}) -> Q1 < Q2 end, AcceptedEncs)
- ),
- {Accepted, Refused} = lists:foldr(
- fun({E, Q}, {A, R}) ->
- case Q > 0.0 of
- true ->
- {[E | A], R};
- false ->
- {A, [E | R]}
- end
- end,
- {[], []},
- SortedQList
- ),
- Refused1 = lists:foldr(
- fun(Enc, Acc) ->
- case Enc of
- "*" ->
- lists:subtract(SupportedEncs, Accepted) ++ Acc;
- _ ->
- [Enc | Acc]
- end
- end,
- [],
- Refused
- ),
- Accepted1 = lists:foldr(
- fun(Enc, Acc) ->
- case Enc of
- "*" ->
- lists:subtract(SupportedEncs, Accepted ++ Refused1) ++ Acc;
- _ ->
- [Enc | Acc]
- end
- end,
- [],
- Accepted
- ),
- Accepted2 = case lists:member(DefaultEnc, Accepted1) of
- true ->
- Accepted1;
- false ->
- Accepted1 ++ [DefaultEnc]
- end,
- [E || E <- Accepted2, lists:member(E, SupportedEncs),
- not lists:member(E, Refused1)].
-
-make_io(Atom) when is_atom(Atom) ->
- atom_to_list(Atom);
-make_io(Integer) when is_integer(Integer) ->
- integer_to_list(Integer);
-make_io(Io) when is_list(Io); is_binary(Io) ->
- Io.
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-make_io_test() ->
- ?assertEqual(
- <<"atom">>,
- iolist_to_binary(make_io(atom))),
- ?assertEqual(
- <<"20">>,
- iolist_to_binary(make_io(20))),
- ?assertEqual(
- <<"list">>,
- iolist_to_binary(make_io("list"))),
- ?assertEqual(
- <<"binary">>,
- iolist_to_binary(make_io(<<"binary">>))),
- ok.
-
--record(test_record, {field1=f1, field2=f2}).
-record_to_proplist_test() ->
- ?assertEqual(
- [{'__record', test_record},
- {field1, f1},
- {field2, f2}],
- record_to_proplist(#test_record{}, record_info(fields, test_record))),
- ?assertEqual(
- [{'typekey', test_record},
- {field1, f1},
- {field2, f2}],
- record_to_proplist(#test_record{},
- record_info(fields, test_record),
- typekey)),
- ok.
-
-shell_quote_test() ->
- ?assertEqual(
- "\"foo \\$bar\\\"\\`' baz\"",
- shell_quote("foo $bar\"`' baz")),
- ok.
-
-cmd_port_test_spool(Port, Acc) ->
- receive
- {Port, eof} ->
- Acc;
- {Port, {data, {eol, Data}}} ->
- cmd_port_test_spool(Port, ["\n", Data | Acc]);
- {Port, Unknown} ->
- throw({unknown, Unknown})
- after 100 ->
- throw(timeout)
- end.
-
-cmd_port_test() ->
- Port = cmd_port(["echo", "$bling$ `word`!"],
- [eof, stream, {line, 4096}]),
- Res = try lists:append(lists:reverse(cmd_port_test_spool(Port, [])))
- after catch port_close(Port)
- end,
- self() ! {Port, wtf},
- try cmd_port_test_spool(Port, [])
- catch throw:{unknown, wtf} -> ok
- end,
- try cmd_port_test_spool(Port, [])
- catch throw:timeout -> ok
- end,
- ?assertEqual(
- "$bling$ `word`!\n",
- Res).
-
-cmd_test() ->
- ?assertEqual(
- "$bling$ `word`!\n",
- cmd(["echo", "$bling$ `word`!"])),
- ok.
-
-cmd_string_test() ->
- ?assertEqual(
- "\"echo\" \"\\$bling\\$ \\`word\\`!\"",
- cmd_string(["echo", "$bling$ `word`!"])),
- ok.
-
-cmd_status_test() ->
- ?assertEqual(
- {0, <<"$bling$ `word`!\n">>},
- cmd_status(["echo", "$bling$ `word`!"])),
- ok.
-
-
-parse_header_test() ->
- ?assertEqual(
- {"multipart/form-data", [{"boundary", "AaB03x"}]},
- parse_header("multipart/form-data; boundary=AaB03x")),
- %% This tests (currently) intentionally broken behavior
- ?assertEqual(
- {"multipart/form-data",
- [{"b", ""},
- {"cgi", "is"},
- {"broken", "true\"e"}]},
- parse_header("multipart/form-data;b=;cgi=\"i\\s;broken=true\"e;=z;z")),
- ok.
-
-guess_mime_test() ->
- "text/plain" = guess_mime(""),
- "text/plain" = guess_mime(".text"),
- "application/zip" = guess_mime(".zip"),
- "application/zip" = guess_mime("x.zip"),
- "text/html" = guess_mime("x.html"),
- "application/xhtml+xml" = guess_mime("x.xhtml"),
- ok.
-
-path_split_test() ->
- {"", "foo/bar"} = path_split("/foo/bar"),
- {"foo", "bar"} = path_split("foo/bar"),
- {"bar", ""} = path_split("bar"),
- ok.
-
-urlsplit_test() ->
- {"", "", "/foo", "", "bar?baz"} = urlsplit("/foo#bar?baz"),
- {"http", "host:port", "/foo", "", "bar?baz"} =
- urlsplit("http://host:port/foo#bar?baz"),
- {"http", "host", "", "", ""} = urlsplit("http://host"),
- {"", "", "/wiki/Category:Fruit", "", ""} =
- urlsplit("/wiki/Category:Fruit"),
- ok.
-
-urlsplit_path_test() ->
- {"/foo/bar", "", ""} = urlsplit_path("/foo/bar"),
- {"/foo", "baz", ""} = urlsplit_path("/foo?baz"),
- {"/foo", "", "bar?baz"} = urlsplit_path("/foo#bar?baz"),
- {"/foo", "", "bar?baz#wibble"} = urlsplit_path("/foo#bar?baz#wibble"),
- {"/foo", "bar", "baz"} = urlsplit_path("/foo?bar#baz"),
- {"/foo", "bar?baz", "baz"} = urlsplit_path("/foo?bar?baz#baz"),
- ok.
-
-urlunsplit_test() ->
- "/foo#bar?baz" = urlunsplit({"", "", "/foo", "", "bar?baz"}),
- "http://host:port/foo#bar?baz" =
- urlunsplit({"http", "host:port", "/foo", "", "bar?baz"}),
- ok.
-
-urlunsplit_path_test() ->
- "/foo/bar" = urlunsplit_path({"/foo/bar", "", ""}),
- "/foo?baz" = urlunsplit_path({"/foo", "baz", ""}),
- "/foo#bar?baz" = urlunsplit_path({"/foo", "", "bar?baz"}),
- "/foo#bar?baz#wibble" = urlunsplit_path({"/foo", "", "bar?baz#wibble"}),
- "/foo?bar#baz" = urlunsplit_path({"/foo", "bar", "baz"}),
- "/foo?bar?baz#baz" = urlunsplit_path({"/foo", "bar?baz", "baz"}),
- ok.
-
-join_test() ->
- ?assertEqual("foo,bar,baz",
- join(["foo", "bar", "baz"], $,)),
- ?assertEqual("foo,bar,baz",
- join(["foo", "bar", "baz"], ",")),
- ?assertEqual("foo bar",
- join([["foo", " bar"]], ",")),
- ?assertEqual("foo bar,baz",
- join([["foo", " bar"], "baz"], ",")),
- ?assertEqual("foo",
- join(["foo"], ",")),
- ?assertEqual("foobarbaz",
- join(["foo", "bar", "baz"], "")),
- ?assertEqual("foo" ++ [<<>>] ++ "bar" ++ [<<>>] ++ "baz",
- join(["foo", "bar", "baz"], <<>>)),
- ?assertEqual("foobar" ++ [<<"baz">>],
- join(["foo", "bar", <<"baz">>], "")),
- ?assertEqual("",
- join([], "any")),
- ok.
-
-quote_plus_test() ->
- "foo" = quote_plus(foo),
- "1" = quote_plus(1),
- "1.1" = quote_plus(1.1),
- "foo" = quote_plus("foo"),
- "foo+bar" = quote_plus("foo bar"),
- "foo%0A" = quote_plus("foo\n"),
- "foo%0A" = quote_plus("foo\n"),
- "foo%3B%26%3D" = quote_plus("foo;&="),
- "foo%3B%26%3D" = quote_plus(<<"foo;&=">>),
- ok.
-
-unquote_test() ->
- ?assertEqual("foo bar",
- unquote("foo+bar")),
- ?assertEqual("foo bar",
- unquote("foo%20bar")),
- ?assertEqual("foo\r\n",
- unquote("foo%0D%0A")),
- ?assertEqual("foo\r\n",
- unquote(<<"foo%0D%0A">>)),
- ok.
-
-urlencode_test() ->
- "foo=bar&baz=wibble+%0D%0A&z=1" = urlencode([{foo, "bar"},
- {"baz", "wibble \r\n"},
- {z, 1}]),
- ok.
-
-parse_qs_test() ->
- ?assertEqual(
- [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}],
- parse_qs("foo=bar&baz=wibble+%0D%0a&z=1")),
- ?assertEqual(
- [{"", "bar"}, {"baz", "wibble \r\n"}, {"z", ""}],
- parse_qs("=bar&baz=wibble+%0D%0a&z=")),
- ?assertEqual(
- [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}],
- parse_qs(<<"foo=bar&baz=wibble+%0D%0a&z=1">>)),
- ?assertEqual(
- [],
- parse_qs("")),
- ?assertEqual(
- [{"foo", ""}, {"bar", ""}, {"baz", ""}],
- parse_qs("foo;bar&baz")),
- ok.
-
-partition_test() ->
- {"foo", "", ""} = partition("foo", "/"),
- {"foo", "/", "bar"} = partition("foo/bar", "/"),
- {"foo", "/", ""} = partition("foo/", "/"),
- {"", "/", "bar"} = partition("/bar", "/"),
- {"f", "oo/ba", "r"} = partition("foo/bar", "oo/ba"),
- ok.
-
-safe_relative_path_test() ->
- "foo" = safe_relative_path("foo"),
- "foo/" = safe_relative_path("foo/"),
- "foo" = safe_relative_path("foo/bar/.."),
- "bar" = safe_relative_path("foo/../bar"),
- "bar/" = safe_relative_path("foo/../bar/"),
- "" = safe_relative_path("foo/.."),
- "" = safe_relative_path("foo/../"),
- undefined = safe_relative_path("/foo"),
- undefined = safe_relative_path("../foo"),
- undefined = safe_relative_path("foo/../.."),
- undefined = safe_relative_path("foo//"),
- ok.
-
-parse_qvalues_test() ->
- [] = parse_qvalues(""),
- [{"identity", 0.0}] = parse_qvalues("identity;q=0"),
- [{"identity", 0.0}] = parse_qvalues("identity ;q=0"),
- [{"identity", 0.0}] = parse_qvalues(" identity; q =0 "),
- [{"identity", 0.0}] = parse_qvalues("identity ; q = 0"),
- [{"identity", 0.0}] = parse_qvalues("identity ; q= 0.0"),
- [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
- "gzip,deflate,identity;q=0.0"
- ),
- [{"deflate", 1.0}, {"gzip", 1.0}, {"identity", 0.0}] = parse_qvalues(
- "deflate,gzip,identity;q=0.0"
- ),
- [{"gzip", 1.0}, {"deflate", 1.0}, {"gzip", 1.0}, {"identity", 0.0}] =
- parse_qvalues("gzip,deflate,gzip,identity;q=0"),
- [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
- "gzip, deflate , identity; q=0.0"
- ),
- [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
- "gzip; q=1, deflate;q=1.0, identity;q=0.0"
- ),
- [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
- "gzip; q=0.5, deflate;q=1.0, identity;q=0"
- ),
- [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
- "gzip; q=0.5, deflate , identity;q=0.0"
- ),
- [{"gzip", 0.5}, {"deflate", 0.8}, {"identity", 0.0}] = parse_qvalues(
- "gzip; q=0.5, deflate;q=0.8, identity;q=0.0"
- ),
- [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 1.0}] = parse_qvalues(
- "gzip; q=0.5,deflate,identity"
- ),
- [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 1.0}, {"identity", 1.0}] =
- parse_qvalues("gzip; q=0.5,deflate,identity, identity "),
- [{"text/html;level=1", 1.0}, {"text/plain", 0.5}] =
- parse_qvalues("text/html;level=1, text/plain;q=0.5"),
- [{"text/html;level=1", 0.3}, {"text/plain", 1.0}] =
- parse_qvalues("text/html;level=1;q=0.3, text/plain"),
- [{"text/html;level=1", 0.3}, {"text/plain", 1.0}] =
- parse_qvalues("text/html; level = 1; q = 0.3, text/plain"),
- [{"text/html;level=1", 0.3}, {"text/plain", 1.0}] =
- parse_qvalues("text/html;q=0.3;level=1, text/plain"),
- invalid_qvalue_string = parse_qvalues("gzip; q=1.1, deflate"),
- invalid_qvalue_string = parse_qvalues("gzip; q=0.5, deflate;q=2"),
- invalid_qvalue_string = parse_qvalues("gzip, deflate;q=AB"),
- invalid_qvalue_string = parse_qvalues("gzip; q=2.1, deflate"),
- invalid_qvalue_string = parse_qvalues("gzip; q=0.1234, deflate"),
- invalid_qvalue_string = parse_qvalues("text/html;level=1;q=0.3, text/html;level"),
- ok.
-
-pick_accepted_encodings_test() ->
- ["identity"] = pick_accepted_encodings(
- [],
- ["gzip", "identity"],
- "identity"
- ),
- ["gzip", "identity"] = pick_accepted_encodings(
- [{"gzip", 1.0}],
- ["gzip", "identity"],
- "identity"
- ),
- ["identity"] = pick_accepted_encodings(
- [{"gzip", 0.0}],
- ["gzip", "identity"],
- "identity"
- ),
- ["gzip", "identity"] = pick_accepted_encodings(
- [{"gzip", 1.0}, {"deflate", 1.0}],
- ["gzip", "identity"],
- "identity"
- ),
- ["gzip", "identity"] = pick_accepted_encodings(
- [{"gzip", 0.5}, {"deflate", 1.0}],
- ["gzip", "identity"],
- "identity"
- ),
- ["identity"] = pick_accepted_encodings(
- [{"gzip", 0.0}, {"deflate", 0.0}],
- ["gzip", "identity"],
- "identity"
- ),
- ["gzip"] = pick_accepted_encodings(
- [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}],
- ["gzip", "identity"],
- "identity"
- ),
- ["gzip", "deflate", "identity"] = pick_accepted_encodings(
- [{"gzip", 1.0}, {"deflate", 1.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip", "deflate"] = pick_accepted_encodings(
- [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["deflate", "gzip", "identity"] = pick_accepted_encodings(
- [{"gzip", 0.2}, {"deflate", 1.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["deflate", "deflate", "gzip", "identity"] = pick_accepted_encodings(
- [{"gzip", 0.2}, {"deflate", 1.0}, {"deflate", 1.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["deflate", "gzip", "gzip", "identity"] = pick_accepted_encodings(
- [{"gzip", 0.2}, {"deflate", 1.0}, {"gzip", 1.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip", "deflate", "gzip", "identity"] = pick_accepted_encodings(
- [{"gzip", 0.2}, {"deflate", 0.9}, {"gzip", 1.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- [] = pick_accepted_encodings(
- [{"*", 0.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip", "deflate", "identity"] = pick_accepted_encodings(
- [{"*", 1.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip", "deflate", "identity"] = pick_accepted_encodings(
- [{"*", 0.6}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip"] = pick_accepted_encodings(
- [{"gzip", 1.0}, {"*", 0.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip", "deflate"] = pick_accepted_encodings(
- [{"gzip", 1.0}, {"deflate", 0.6}, {"*", 0.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["deflate", "gzip"] = pick_accepted_encodings(
- [{"gzip", 0.5}, {"deflate", 1.0}, {"*", 0.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip", "identity"] = pick_accepted_encodings(
- [{"deflate", 0.0}, {"*", 1.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ["gzip", "identity"] = pick_accepted_encodings(
- [{"*", 1.0}, {"deflate", 0.0}],
- ["gzip", "deflate", "identity"],
- "identity"
- ),
- ok.
-
--endif.
diff --git a/1.1.x/src/mochiweb/reloader.erl b/1.1.x/src/mochiweb/reloader.erl
deleted file mode 100644
index c0f5de88..00000000
--- a/1.1.x/src/mochiweb/reloader.erl
+++ /dev/null
@@ -1,161 +0,0 @@
-%% @copyright 2007 Mochi Media, Inc.
-%% @author Matthew Dempsky <matthew@mochimedia.com>
-%%
-%% @doc Erlang module for automatically reloading modified modules
-%% during development.
-
--module(reloader).
--author("Matthew Dempsky <matthew@mochimedia.com>").
-
--include_lib("kernel/include/file.hrl").
-
--behaviour(gen_server).
--export([start/0, start_link/0]).
--export([stop/0]).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
--export([all_changed/0]).
--export([is_changed/1]).
--export([reload_modules/1]).
--record(state, {last, tref}).
-
-%% External API
-
-%% @spec start() -> ServerRet
-%% @doc Start the reloader.
-start() ->
- gen_server:start({local, ?MODULE}, ?MODULE, [], []).
-
-%% @spec start_link() -> ServerRet
-%% @doc Start the reloader.
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-%% @spec stop() -> ok
-%% @doc Stop the reloader.
-stop() ->
- gen_server:call(?MODULE, stop).
-
-%% gen_server callbacks
-
-%% @spec init([]) -> {ok, State}
-%% @doc gen_server init, opens the server in an initial state.
-init([]) ->
- {ok, TRef} = timer:send_interval(timer:seconds(1), doit),
- {ok, #state{last = stamp(), tref = TRef}}.
-
-%% @spec handle_call(Args, From, State) -> tuple()
-%% @doc gen_server callback.
-handle_call(stop, _From, State) ->
- {stop, shutdown, stopped, State};
-handle_call(_Req, _From, State) ->
- {reply, {error, badrequest}, State}.
-
-%% @spec handle_cast(Cast, State) -> tuple()
-%% @doc gen_server callback.
-handle_cast(_Req, State) ->
- {noreply, State}.
-
-%% @spec handle_info(Info, State) -> tuple()
-%% @doc gen_server callback.
-handle_info(doit, State) ->
- Now = stamp(),
- doit(State#state.last, Now),
- {noreply, State#state{last = Now}};
-handle_info(_Info, State) ->
- {noreply, State}.
-
-%% @spec terminate(Reason, State) -> ok
-%% @doc gen_server termination callback.
-terminate(_Reason, State) ->
- {ok, cancel} = timer:cancel(State#state.tref),
- ok.
-
-
-%% @spec code_change(_OldVsn, State, _Extra) -> State
-%% @doc gen_server code_change callback (trivial).
-code_change(_Vsn, State, _Extra) ->
- {ok, State}.
-
-%% @spec reload_modules([atom()]) -> [{module, atom()} | {error, term()}]
-%% @doc code:purge/1 and code:load_file/1 the given list of modules in order,
-%% return the results of code:load_file/1.
-reload_modules(Modules) ->
- [begin code:purge(M), code:load_file(M) end || M <- Modules].
-
-%% @spec all_changed() -> [atom()]
-%% @doc Return a list of beam modules that have changed.
-all_changed() ->
- [M || {M, Fn} <- code:all_loaded(), is_list(Fn), is_changed(M)].
-
-%% @spec is_changed(atom()) -> boolean()
-%% @doc true if the loaded module is a beam with a vsn attribute
-%% and does not match the on-disk beam file, returns false otherwise.
-is_changed(M) ->
- try
- module_vsn(M:module_info()) =/= module_vsn(code:get_object_code(M))
- catch _:_ ->
- false
- end.
-
-%% Internal API
-
-module_vsn({M, Beam, _Fn}) ->
- {ok, {M, Vsn}} = beam_lib:version(Beam),
- Vsn;
-module_vsn(L) when is_list(L) ->
- {_, Attrs} = lists:keyfind(attributes, 1, L),
- {_, Vsn} = lists:keyfind(vsn, 1, Attrs),
- Vsn.
-
-doit(From, To) ->
- [case file:read_file_info(Filename) of
- {ok, #file_info{mtime = Mtime}} when Mtime >= From, Mtime < To ->
- reload(Module);
- {ok, _} ->
- unmodified;
- {error, enoent} ->
- %% The Erlang compiler deletes existing .beam files if
- %% recompiling fails. Maybe it's worth spitting out a
- %% warning here, but I'd want to limit it to just once.
- gone;
- {error, Reason} ->
- io:format("Error reading ~s's file info: ~p~n",
- [Filename, Reason]),
- error
- end || {Module, Filename} <- code:all_loaded(), is_list(Filename)].
-
-reload(Module) ->
- io:format("Reloading ~p ...", [Module]),
- code:purge(Module),
- case code:load_file(Module) of
- {module, Module} ->
- io:format(" ok.~n"),
- case erlang:function_exported(Module, test, 0) of
- true ->
- io:format(" - Calling ~p:test() ...", [Module]),
- case catch Module:test() of
- ok ->
- io:format(" ok.~n"),
- reload;
- Reason ->
- io:format(" fail: ~p.~n", [Reason]),
- reload_but_test_failed
- end;
- false ->
- reload
- end;
- {error, Reason} ->
- io:format(" fail: ~p.~n", [Reason]),
- error
- end.
-
-
-stamp() ->
- erlang:localtime().
-
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
--endif.